diff --git a/.patchable b/.patchable new file mode 100644 index 000000000..a7b374335 --- /dev/null +++ b/.patchable @@ -0,0 +1 @@ +# Marker file for Patchable to know that this is the image repository diff --git a/CHANGELOG.md b/CHANGELOG.md index 805d2a4c1..5b68a5b3f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,7 +28,7 @@ All notable changes to this project will be documented in this file. - trino-cli: Add version 470 ([#999]). - trino-storage-connector: Add version 470 ([#999]). - superset: Add version `4.1.1` ([#991]). -- Add Patchable patch management tool ([#1003], [#1007]). +- Add Patchable patch management tool ([#1003], [#1005], [#1007]). - nifi: Add 1.28.1, 2.2.0 ([#1006]). ### Changed @@ -78,6 +78,7 @@ All notable changes to this project will be documented in this file. [#999]: https://github.com/stackabletech/docker-images/pull/999 [#1000]: https://github.com/stackabletech/docker-images/pull/1000 [#1003]: https://github.com/stackabletech/docker-images/pull/1003 +[#1005]: https://github.com/stackabletech/docker-images/pull/1005 [#1006]: https://github.com/stackabletech/docker-images/pull/1006 [#1007]: https://github.com/stackabletech/docker-images/pull/1007 diff --git a/druid/stackable/patches/26.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch b/druid/stackable/patches/26.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch index 6823e2c61..1e8e17aba 100644 --- a/druid/stackable/patches/26.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch +++ b/druid/stackable/patches/26.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch @@ -1,6 +1,6 @@ -From a8bec93ee6d0a4364676333168229aa0ec56657e Mon Sep 17 00:00:00 2001 +From 098e0333cb3977164c62ab0f29aafaf9b1ac6c7c Mon Sep 17 00:00:00 2001 From: Lars Francke -Date: Thu, 12 Dec 2024 17:59:17 +0100 +Date: Wed, 10 Jul 2024 17:07:13 +0200 Subject: Removes all traces of the druid ranger extension --- diff --git a/druid/stackable/patches/26.0.0/0002-Include-Prometheus-emitter-in-distribution.patch b/druid/stackable/patches/26.0.0/0002-Include-Prometheus-emitter-in-distribution.patch index 3bc040817..269b870f8 100644 --- a/druid/stackable/patches/26.0.0/0002-Include-Prometheus-emitter-in-distribution.patch +++ b/druid/stackable/patches/26.0.0/0002-Include-Prometheus-emitter-in-distribution.patch @@ -1,6 +1,6 @@ -From c19288cd84492d76f924152f2d4f0d0fc0499ed6 Mon Sep 17 00:00:00 2001 +From cb547abec8bb002984bcb598c75f5031ea9513e1 Mon Sep 17 00:00:00 2001 From: Lars Francke -Date: Thu, 12 Dec 2024 17:59:17 +0100 +Date: Wed, 10 Jul 2024 17:07:13 +0200 Subject: Include Prometheus emitter in distribution --- diff --git a/druid/stackable/patches/26.0.0/0003-Stop-building-unused-extensions.patch b/druid/stackable/patches/26.0.0/0003-Stop-building-unused-extensions.patch index 722e9e42a..a46cccaf7 100644 --- a/druid/stackable/patches/26.0.0/0003-Stop-building-unused-extensions.patch +++ b/druid/stackable/patches/26.0.0/0003-Stop-building-unused-extensions.patch @@ -1,4 +1,4 @@ -From 85cacbcc47c88a56acd60d91fbf0412040523c8d Mon Sep 17 00:00:00 2001 +From 2722e90c01f02f804f1030f6aa91d07638e5e0a3 Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 17:59:17 +0100 Subject: Stop building unused extensions. diff --git a/druid/stackable/patches/26.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch b/druid/stackable/patches/26.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch index 53c20d559..1bb778624 100644 --- a/druid/stackable/patches/26.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch +++ b/druid/stackable/patches/26.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch @@ -1,4 +1,4 @@ -From 4229d1c0d096e10dce72929224a7b4c2284fb417 Mon Sep 17 00:00:00 2001 +From 6a6cd8806bffe6b8f5da14d0d9f9b75fb79ac3cd Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 17:59:17 +0100 Subject: Updates all dependencies that have a new patch release available. diff --git a/druid/stackable/patches/26.0.0/0005-Include-jackson-dataformat-xml-dependency.patch b/druid/stackable/patches/26.0.0/0005-Include-jackson-dataformat-xml-dependency.patch index 4032142ab..e2876fc7a 100644 --- a/druid/stackable/patches/26.0.0/0005-Include-jackson-dataformat-xml-dependency.patch +++ b/druid/stackable/patches/26.0.0/0005-Include-jackson-dataformat-xml-dependency.patch @@ -1,4 +1,4 @@ -From d55895a2525286a5198a3b327c3ce503bc852ead Mon Sep 17 00:00:00 2001 +From 1e4c0f607abfe1362941af5f53e04cd0b845f41f Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 17:59:17 +0100 Subject: Include jackson-dataformat-xml dependency. diff --git a/druid/stackable/patches/26.0.0/0006-Stop-building-the-tar.gz-distribution.patch b/druid/stackable/patches/26.0.0/0006-Stop-building-the-tar.gz-distribution.patch index 910a7a0a5..00a9e9719 100644 --- a/druid/stackable/patches/26.0.0/0006-Stop-building-the-tar.gz-distribution.patch +++ b/druid/stackable/patches/26.0.0/0006-Stop-building-the-tar.gz-distribution.patch @@ -1,4 +1,4 @@ -From d1ae8732e2eee44abb5c831f5363c69e75e64a9a Mon Sep 17 00:00:00 2001 +From c7d74ad665618125e09f365bae0ecaa2876b6a87 Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 17:59:17 +0100 Subject: Stop building the tar.gz distribution. diff --git a/druid/stackable/patches/26.0.0/0007-Update-CycloneDX-plugin.patch b/druid/stackable/patches/26.0.0/0007-Update-CycloneDX-plugin.patch index 36756ca94..89e466782 100644 --- a/druid/stackable/patches/26.0.0/0007-Update-CycloneDX-plugin.patch +++ b/druid/stackable/patches/26.0.0/0007-Update-CycloneDX-plugin.patch @@ -1,6 +1,6 @@ -From ff7d6a5ea07ea30653b47f6ef6844103a7ac3349 Mon Sep 17 00:00:00 2001 +From 56541040ce6a5c36d53fdda71316d2a7ab8245c0 Mon Sep 17 00:00:00 2001 From: Lukas Voetmand -Date: Thu, 12 Dec 2024 17:59:17 +0100 +Date: Fri, 6 Sep 2024 17:53:52 +0200 Subject: Update CycloneDX plugin --- diff --git a/druid/stackable/patches/26.0.0/0008-Fix-CVE-2024-36114.patch b/druid/stackable/patches/26.0.0/0008-Fix-CVE-2024-36114.patch index 7368f95e7..766d53a5f 100644 --- a/druid/stackable/patches/26.0.0/0008-Fix-CVE-2024-36114.patch +++ b/druid/stackable/patches/26.0.0/0008-Fix-CVE-2024-36114.patch @@ -1,4 +1,4 @@ -From bdd52ae32874b686d6ddfa3179f6af787444662f Mon Sep 17 00:00:00 2001 +From a8442f203b70216cd7b3ec9cefe8f7627fa0d7e2 Mon Sep 17 00:00:00 2001 From: Malte Sander Date: Thu, 12 Dec 2024 17:59:17 +0100 Subject: Fix CVE-2024-36114 diff --git a/druid/stackable/patches/26.0.0/0009-Update-FMPP-version.patch b/druid/stackable/patches/26.0.0/0009-Update-FMPP-version.patch index 67120aec1..90ea19709 100644 --- a/druid/stackable/patches/26.0.0/0009-Update-FMPP-version.patch +++ b/druid/stackable/patches/26.0.0/0009-Update-FMPP-version.patch @@ -1,4 +1,4 @@ -From 736165ab0fe73e0bef765f2cfd21cd800baddbc1 Mon Sep 17 00:00:00 2001 +From 2d634afe93690d295ddf69751b03e824cbd9f934 Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 06:35:21 +0100 Subject: Update FMPP version diff --git a/druid/stackable/patches/30.0.0/01-remove-ranger-security.patch b/druid/stackable/patches/30.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch similarity index 79% rename from druid/stackable/patches/30.0.0/01-remove-ranger-security.patch rename to druid/stackable/patches/30.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch index 14c4d1c5e..cbbe1578a 100644 --- a/druid/stackable/patches/30.0.0/01-remove-ranger-security.patch +++ b/druid/stackable/patches/30.0.0/0001-Removes-all-traces-of-the-druid-ranger-extension.patch @@ -1,13 +1,15 @@ -Removes all traces of the druid ranger extension - +From b6665733d54d730bafcd4c238b11bedd4e412667 Mon Sep 17 00:00:00 2001 From: Lars Francke - +Date: Wed, 10 Jul 2024 17:07:13 +0200 +Subject: Removes all traces of the druid ranger extension --- - 0 files changed + distribution/pom.xml | 2 -- + pom.xml | 1 - + 2 files changed, 3 deletions(-) diff --git a/distribution/pom.xml b/distribution/pom.xml -index 0f17a8c877..d7cd645767 100644 +index bdbbd8b4c0..e27329e96d 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -252,8 +252,6 @@ @@ -20,10 +22,10 @@ index 0f17a8c877..d7cd645767 100644 -c org.apache.druid.extensions:druid-catalog diff --git a/pom.xml b/pom.xml -index cfca79dc6e..2acb812cbe 100644 +index 9051ed24c5..3ab467e468 100644 --- a/pom.xml +++ b/pom.xml -@@ -199,7 +199,6 @@ +@@ -198,7 +198,6 @@ extensions-core/simple-client-sslcontext extensions-core/druid-basic-security extensions-core/google-extensions diff --git a/druid/stackable/patches/30.0.0/02-prometheus-emitter-from-source.patch b/druid/stackable/patches/30.0.0/0002-Include-Prometheus-emitter-in-distribution.patch similarity index 90% rename from druid/stackable/patches/30.0.0/02-prometheus-emitter-from-source.patch rename to druid/stackable/patches/30.0.0/0002-Include-Prometheus-emitter-in-distribution.patch index 8f0ca6795..f0f99bd7e 100644 --- a/druid/stackable/patches/30.0.0/02-prometheus-emitter-from-source.patch +++ b/druid/stackable/patches/30.0.0/0002-Include-Prometheus-emitter-in-distribution.patch @@ -1,13 +1,14 @@ -Include Prometheus emitter in distribution - +From 0cee640900bb8119ba6be12911578927aeb191ed Mon Sep 17 00:00:00 2001 From: Lars Francke - +Date: Mon, 17 Feb 2025 16:42:34 +0100 +Subject: Include Prometheus emitter in distribution --- - 0 files changed + distribution/pom.xml | 46 ++++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 46 insertions(+) diff --git a/distribution/pom.xml b/distribution/pom.xml -index d7cd645767..eda1ddcfab 100644 +index e27329e96d..d5918710ef 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -464,6 +464,52 @@ diff --git a/druid/stackable/patches/30.0.0/03-stop-building-unused-extensions.patch b/druid/stackable/patches/30.0.0/0003-Stop-building-unused-extensions.patch similarity index 89% rename from druid/stackable/patches/30.0.0/03-stop-building-unused-extensions.patch rename to druid/stackable/patches/30.0.0/0003-Stop-building-unused-extensions.patch index 7d0f91dd1..9ff62ae78 100644 --- a/druid/stackable/patches/30.0.0/03-stop-building-unused-extensions.patch +++ b/druid/stackable/patches/30.0.0/0003-Stop-building-unused-extensions.patch @@ -1,18 +1,20 @@ -Stop building unused extensions. - +From 62f340d66dae20da8d7566f992b56223d29d4174 Mon Sep 17 00:00:00 2001 From: Lars Francke +Date: Mon, 17 Feb 2025 16:42:34 +0100 +Subject: Stop building unused extensions. By default Druid builds all community extensions and then discards them while assembling the final distribution. This patch removes unused extensions from the build. --- - 0 files changed + pom.xml | 32 +------------------------------- + 1 file changed, 1 insertion(+), 31 deletions(-) diff --git a/pom.xml b/pom.xml -index 2acb812cbe..38e0ddc61a 100644 +index 3ab467e468..171a98f803 100644 --- a/pom.xml +++ b/pom.xml -@@ -201,39 +201,9 @@ +@@ -200,39 +200,9 @@ extensions-core/google-extensions extensions-core/druid-catalog extensions-core/testing-tools diff --git a/druid/stackable/patches/30.0.0/04-update-patch-dependencies.patch b/druid/stackable/patches/30.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch similarity index 85% rename from druid/stackable/patches/30.0.0/04-update-patch-dependencies.patch rename to druid/stackable/patches/30.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch index 989b42620..7a78bdf3b 100644 --- a/druid/stackable/patches/30.0.0/04-update-patch-dependencies.patch +++ b/druid/stackable/patches/30.0.0/0004-Updates-all-dependencies-that-have-a-new-patch-relea.patch @@ -1,13 +1,19 @@ -Updates all dependencies that have a new patch release available. - +From 59ce2824b13ad95f728a5a5afc1e5b18ff547a38 Mon Sep 17 00:00:00 2001 From: Lars Francke - +Date: Mon, 17 Feb 2025 16:42:49 +0100 +Subject: Updates all dependencies that have a new patch release available. --- - 0 files changed + extensions-core/druid-pac4j/pom.xml | 5 +++- + extensions-core/kubernetes-extensions/pom.xml | 2 +- + extensions-core/orc-extensions/pom.xml | 2 +- + extensions-core/parquet-extensions/pom.xml | 2 +- + pom.xml | 29 ++++++++++--------- + processing/pom.xml | 2 +- + 6 files changed, 24 insertions(+), 18 deletions(-) diff --git a/extensions-core/druid-pac4j/pom.xml b/extensions-core/druid-pac4j/pom.xml -index 282e0e5b15..523a2ca305 100644 +index 88a570fd72..d3714ca0e2 100644 --- a/extensions-core/druid-pac4j/pom.xml +++ b/extensions-core/druid-pac4j/pom.xml @@ -38,7 +38,10 @@ @@ -23,10 +29,10 @@ index 282e0e5b15..523a2ca305 100644 diff --git a/extensions-core/kubernetes-extensions/pom.xml b/extensions-core/kubernetes-extensions/pom.xml -index e3e77a99af..1304740ff3 100644 +index 8bf105ea86..d445888da1 100644 --- a/extensions-core/kubernetes-extensions/pom.xml +++ b/extensions-core/kubernetes-extensions/pom.xml -@@ -35,7 +35,7 @@ +@@ -34,7 +34,7 @@ @@ -36,7 +42,7 @@ index e3e77a99af..1304740ff3 100644 diff --git a/extensions-core/orc-extensions/pom.xml b/extensions-core/orc-extensions/pom.xml -index b7eb007979..2c210c42c0 100644 +index ade9bd8f37..db0ad77894 100644 --- a/extensions-core/orc-extensions/pom.xml +++ b/extensions-core/orc-extensions/pom.xml @@ -31,7 +31,7 @@ @@ -49,7 +55,7 @@ index b7eb007979..2c210c42c0 100644 diff --git a/extensions-core/parquet-extensions/pom.xml b/extensions-core/parquet-extensions/pom.xml -index 371d2e7673..ad0b874db0 100644 +index d3404a62d2..9cbb6d51d5 100644 --- a/extensions-core/parquet-extensions/pom.xml +++ b/extensions-core/parquet-extensions/pom.xml @@ -201,7 +201,7 @@ @@ -62,7 +68,7 @@ index 371d2e7673..ad0b874db0 100644 diff --git a/pom.xml b/pom.xml -index 73fb14c1fc..26b24b8c14 100644 +index 171a98f803..38c6b8dbb5 100644 --- a/pom.xml +++ b/pom.xml @@ -74,7 +74,7 @@ @@ -110,9 +116,9 @@ index 73fb14c1fc..26b24b8c14 100644 1.3.1 1.7.36 5.13.0 -@@ -120,17 +120,17 @@ - however it is required in some cases when running against mockito 4.x (mockito 4.x is required for Java <11. - We use the following property to pick the proper artifact based on Java version (see pre-java-11 profile) --> +@@ -119,17 +122,17 @@ + core - 1.12.638 - 2.8.0 @@ -134,10 +140,10 @@ index 73fb14c1fc..26b24b8c14 100644 diff --git a/processing/pom.xml b/processing/pom.xml -index affd900fe6..0daad4fa56 100644 +index 3a62790fb0..c3afa9fd27 100644 --- a/processing/pom.xml +++ b/processing/pom.xml -@@ -37,7 +37,7 @@ +@@ -36,7 +36,7 @@ 1.6.5 ${sigar.base.version}.132 5.3.4 diff --git a/druid/stackable/patches/30.0.0/05-xmllayout-dependencies.patch b/druid/stackable/patches/30.0.0/0005-Include-jackson-dataformat-xml-dependency.patch similarity index 77% rename from druid/stackable/patches/30.0.0/05-xmllayout-dependencies.patch rename to druid/stackable/patches/30.0.0/0005-Include-jackson-dataformat-xml-dependency.patch index 29d325dfc..ac37f765a 100644 --- a/druid/stackable/patches/30.0.0/05-xmllayout-dependencies.patch +++ b/druid/stackable/patches/30.0.0/0005-Include-jackson-dataformat-xml-dependency.patch @@ -1,16 +1,18 @@ -Include jackson-dataformat-xml dependency. - +From 062c9f9bf3b27752bb4546e468e3b48befc893fe Mon Sep 17 00:00:00 2001 From: Lars Francke +Date: Mon, 17 Feb 2025 16:42:49 +0100 +Subject: Include jackson-dataformat-xml dependency. This allows us to use XmlLayout for Log4jV2. By including it here as a dependency we can make sure that we always have the matching version and we don't need to include it manually later in the build. --- - 0 files changed + server/pom.xml | 5 +++++ + 1 file changed, 5 insertions(+) diff --git a/server/pom.xml b/server/pom.xml -index 410b51480e..b7dcf46111 100644 +index ec2f4dec4f..3861f18ed1 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -205,6 +205,11 @@ diff --git a/druid/stackable/patches/30.0.0/06-dont-build-targz.patch b/druid/stackable/patches/30.0.0/0006-Stop-building-the-tar.gz-distribution.patch similarity index 78% rename from druid/stackable/patches/30.0.0/06-dont-build-targz.patch rename to druid/stackable/patches/30.0.0/0006-Stop-building-the-tar.gz-distribution.patch index 1bed79fd1..d5913d90f 100644 --- a/druid/stackable/patches/30.0.0/06-dont-build-targz.patch +++ b/druid/stackable/patches/30.0.0/0006-Stop-building-the-tar.gz-distribution.patch @@ -1,11 +1,12 @@ -Stop building the tar.gz distribution. - +From c2a67767f6e06dcf7ea2bad4f7616cf69717b3b6 Mon Sep 17 00:00:00 2001 From: Lars Francke +Date: Mon, 17 Feb 2025 16:42:49 +0100 +Subject: Stop building the tar.gz distribution. All we do is build Druid tar and gzip it only to immediately uncompress it again. So, instead we just skip the compression step entirely. --- - distribution/src/assembly/assembly.xml | 2 +- + distribution/src/assembly/assembly.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/assembly/assembly.xml b/distribution/src/assembly/assembly.xml diff --git a/druid/stackable/patches/30.0.0/07-cyclonedx-plugin.patch b/druid/stackable/patches/30.0.0/0007-Update-CycloneDX-plugin.patch similarity index 63% rename from druid/stackable/patches/30.0.0/07-cyclonedx-plugin.patch rename to druid/stackable/patches/30.0.0/0007-Update-CycloneDX-plugin.patch index b2ddeebf6..6d94c190d 100644 --- a/druid/stackable/patches/30.0.0/07-cyclonedx-plugin.patch +++ b/druid/stackable/patches/30.0.0/0007-Update-CycloneDX-plugin.patch @@ -1,8 +1,17 @@ +From eed0684b8097203e9d61c52093eb6dfe0960850d Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Update CycloneDX plugin + +--- + pom.xml | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + diff --git a/pom.xml b/pom.xml -index 9051ed2..10a2c85 100644 +index 38c6b8dbb5..de1a3bddc4 100644 --- a/pom.xml +++ b/pom.xml -@@ -1728,7 +1728,11 @@ +@@ -1700,7 +1700,11 @@ org.cyclonedx cyclonedx-maven-plugin diff --git a/druid/stackable/patches/30.0.0/08-CVE-2024-36114-bump-aircompressor-0-27.patch b/druid/stackable/patches/30.0.0/0008-Fix-CVE-2024-36114.patch similarity index 84% rename from druid/stackable/patches/30.0.0/08-CVE-2024-36114-bump-aircompressor-0-27.patch rename to druid/stackable/patches/30.0.0/0008-Fix-CVE-2024-36114.patch index 04999a574..59243a398 100644 --- a/druid/stackable/patches/30.0.0/08-CVE-2024-36114-bump-aircompressor-0-27.patch +++ b/druid/stackable/patches/30.0.0/0008-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 0d9d6f564c52234c1eba4762c465a95f52cf8f0a Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Thu, 12 Dec 2024 17:59:17 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,12 +21,15 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + pom.xml | 6 ++++++ + 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml -index 9051ed24c5..e839295b61 100644 +index de1a3bddc4..7beda6238f 100644 --- a/pom.xml +++ b/pom.xml -@@ -283,6 +283,12 @@ +@@ -255,6 +255,12 @@ diff --git a/druid/stackable/patches/30.0.0/0009-Update-FMPP-version.patch b/druid/stackable/patches/30.0.0/0009-Update-FMPP-version.patch new file mode 100644 index 000000000..98b0297c4 --- /dev/null +++ b/druid/stackable/patches/30.0.0/0009-Update-FMPP-version.patch @@ -0,0 +1,31 @@ +From f6634ac02aa2fab11811915283e92c7b9ae1af5e Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 12 Dec 2024 06:35:21 +0100 +Subject: Update FMPP version + +This is because FMPP Maven Plugin depends on FMPP in version 0.9.14 +which itself depends on a Freemarker version that has not been pinned. +Instead it specifies a "range" which resolves to a SNAPSHOT version +which we don't want. +--- + sql/pom.xml | 7 +++++++ + 1 file changed, 7 insertions(+) + +diff --git a/sql/pom.xml b/sql/pom.xml +index 6669d43e52..1d02e4ef58 100644 +--- a/sql/pom.xml ++++ b/sql/pom.xml +@@ -384,6 +384,13 @@ + + com.googlecode.fmpp-maven-plugin + fmpp-maven-plugin ++ ++ ++ net.sourceforge.fmpp ++ fmpp ++ 0.9.16 ++ ++ + + + generate-fmpp-sources diff --git a/druid/stackable/patches/30.0.0/10-cve-2023-34455-rm-snappy.patch b/druid/stackable/patches/30.0.0/0010-Fix-CVE-2023-34455.patch similarity index 86% rename from druid/stackable/patches/30.0.0/10-cve-2023-34455-rm-snappy.patch rename to druid/stackable/patches/30.0.0/0010-Fix-CVE-2023-34455.patch index e4e440d0d..b681c5649 100644 --- a/druid/stackable/patches/30.0.0/10-cve-2023-34455-rm-snappy.patch +++ b/druid/stackable/patches/30.0.0/0010-Fix-CVE-2023-34455.patch @@ -1,4 +1,8 @@ -Fix CVE-2023-34455 +From 4832e1270c2f541ad3724455034cbec394ba6263 Mon Sep 17 00:00:00 2001 +From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> +Date: Tue, 28 Jan 2025 17:29:59 +0100 +Subject: Fix CVE-2023-34455 + see https://github.com/stackabletech/vulnerabilities/issues/558 At the end of build process, Druid downloads dependencies directly from a remote @@ -8,6 +12,9 @@ The hadoop client depends on a vulnerable version of the snappy library which is then also downloaded even though a newer version is already on the system. This patch removes the vulnerable jars. +--- + distribution/pom.xml | 14 ++++++++++++++ + 1 file changed, 14 insertions(+) diff --git a/distribution/pom.xml b/distribution/pom.xml index d5918710ef..2d5bfc6ab4 100644 diff --git a/druid/stackable/patches/30.0.0/09-update-fmpp.patch b/druid/stackable/patches/30.0.0/09-update-fmpp.patch deleted file mode 100644 index 3abb818da..000000000 --- a/druid/stackable/patches/30.0.0/09-update-fmpp.patch +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/10-update-fmpp.patch b/10-update-fmpp.patch -new file mode 100644 -index 0000000000..e69de29bb2 -diff --git a/sql/pom.xml b/sql/pom.xml -index bdd29f3f91..e5ba89f655 100644 ---- a/sql/pom.xml -+++ b/sql/pom.xml -@@ -322,6 +322,13 @@ - - com.googlecode.fmpp-maven-plugin - fmpp-maven-plugin -+ -+ -+ net.sourceforge.fmpp -+ fmpp -+ 0.9.16 -+ -+ - - - generate-fmpp-sources diff --git a/druid/stackable/patches/30.0.0/patchable.toml b/druid/stackable/patches/30.0.0/patchable.toml new file mode 100644 index 000000000..892b6fab4 --- /dev/null +++ b/druid/stackable/patches/30.0.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/druid.git" +base = "09d36ee324747f1407705c27618b6d415c3fa8a9" diff --git a/druid/stackable/patches/30.0.0/series b/druid/stackable/patches/30.0.0/series deleted file mode 100644 index 0dc0d4cac..000000000 --- a/druid/stackable/patches/30.0.0/series +++ /dev/null @@ -1,9 +0,0 @@ -# This series applies on Git commit 0f4a8032d4f3c17fc8a7d3dba5fc272c1bd76c2b -01-remove-ranger-security.patch -02-prometheus-emitter-from-source.patch -03-stop-building-unused-extensions.patch -04-update-patch-dependencies.patch -05-xmllayout-dependencies.patch -06-dont-build-targz.patch -07-cyclonedx-plugin.patch -08-CVE-2024-36114-bump-aircompressor-0-27.patch diff --git a/hadoop/stackable/patches/3.2.2/001-HADOOP-15767-3.2.2.patch b/hadoop/stackable/patches/3.2.2/001-HADOOP-15767-3.2.2.patch deleted file mode 100644 index 8b7c9c7b4..000000000 --- a/hadoop/stackable/patches/3.2.2/001-HADOOP-15767-3.2.2.patch +++ /dev/null @@ -1,180 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml -index fa4a838babb2..708228c90daa 100644 ---- a/hadoop-common-project/hadoop-common/pom.xml -+++ b/hadoop-common-project/hadoop-common/pom.xml -@@ -612,48 +612,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.io.compress.zlib.ZlibCompressor -- org.apache.hadoop.io.compress.zlib.ZlibDecompressor -- org.apache.hadoop.io.compress.bzip2.Bzip2Compressor -- org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor -- org.apache.hadoop.security.JniBasedUnixGroupsMapping -- org.apache.hadoop.io.nativeio.NativeIO -- org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory -- org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping -- org.apache.hadoop.io.compress.snappy.SnappyCompressor -- org.apache.hadoop.io.compress.snappy.SnappyDecompressor -- org.apache.hadoop.io.compress.zstd.ZStandardCompressor -- org.apache.hadoop.io.compress.zstd.ZStandardDecompressor -- org.apache.hadoop.io.compress.lz4.Lz4Compressor -- org.apache.hadoop.io.compress.lz4.Lz4Decompressor -- org.apache.hadoop.io.erasurecode.ErasureCodeNative -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder -- org.apache.hadoop.crypto.OpensslCipher -- org.apache.hadoop.crypto.random.OpensslSecureRandom -- org.apache.hadoop.util.NativeCrc32 -- org.apache.hadoop.net.unix.DomainSocket -- org.apache.hadoop.net.unix.DomainSocketWatcher -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.apache.hadoop - hadoop-maven-plugins -@@ -770,43 +728,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.io.compress.zlib.ZlibCompressor -- org.apache.hadoop.io.compress.zlib.ZlibDecompressor -- org.apache.hadoop.security.JniBasedUnixGroupsMapping -- org.apache.hadoop.io.nativeio.NativeIO -- org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping -- org.apache.hadoop.io.compress.snappy.SnappyCompressor -- org.apache.hadoop.io.compress.snappy.SnappyDecompressor -- org.apache.hadoop.io.compress.zstd.ZStandardCompressor -- org.apache.hadoop.io.compress.zstd.ZStandardDecompressor -- org.apache.hadoop.io.compress.lz4.Lz4Compressor -- org.apache.hadoop.io.compress.lz4.Lz4Decompressor -- org.apache.hadoop.io.erasurecode.ErasureCodeNative -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder -- org.apache.hadoop.crypto.OpensslCipher -- org.apache.hadoop.crypto.random.OpensslSecureRandom -- org.apache.hadoop.util.NativeCrc32 -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.codehaus.mojo - exec-maven-plugin -diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -index 1c9f5ee67ea7..c16a798ad956 100644 ---- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -@@ -139,26 +139,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.mapred.nativetask.NativeBatchProcessor -- org.apache.hadoop.mapred.nativetask.NativeRuntime -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.apache.maven.plugins - maven-antrun-plugin -diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index e76347962b29..e1ee4b117682 100644 ---- a/hadoop-project/pom.xml -+++ b/hadoop-project/pom.xml -@@ -171,7 +171,6 @@ - 1.9 - 1.3.1 - 1.0-beta-1 -- 1.0-alpha-8 - 900 - 1.11.563 - 2.3.4 -@@ -1609,11 +1608,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- ${native-maven-plugin.version} -- - - org.codehaus.mojo - make-maven-plugin -@@ -2079,6 +2073,27 @@ - - - -+ -+ native -+ -+ false -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -h -+ ${project.build.directory}/native/javah/ -+ -+ -+ -+ -+ -+ - - - diff --git a/hadoop/stackable/patches/3.2.4/001-HADOOP-15767-3.2.4.patch b/hadoop/stackable/patches/3.2.4/001-HADOOP-15767-3.2.4.patch deleted file mode 100644 index 821ada355..000000000 --- a/hadoop/stackable/patches/3.2.4/001-HADOOP-15767-3.2.4.patch +++ /dev/null @@ -1,180 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml -index 4acc2a4ef4a0..eaf5a545fd3d 100644 ---- a/hadoop-common-project/hadoop-common/pom.xml -+++ b/hadoop-common-project/hadoop-common/pom.xml -@@ -617,48 +617,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.io.compress.zlib.ZlibCompressor -- org.apache.hadoop.io.compress.zlib.ZlibDecompressor -- org.apache.hadoop.io.compress.bzip2.Bzip2Compressor -- org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor -- org.apache.hadoop.security.JniBasedUnixGroupsMapping -- org.apache.hadoop.io.nativeio.NativeIO -- org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory -- org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping -- org.apache.hadoop.io.compress.snappy.SnappyCompressor -- org.apache.hadoop.io.compress.snappy.SnappyDecompressor -- org.apache.hadoop.io.compress.zstd.ZStandardCompressor -- org.apache.hadoop.io.compress.zstd.ZStandardDecompressor -- org.apache.hadoop.io.compress.lz4.Lz4Compressor -- org.apache.hadoop.io.compress.lz4.Lz4Decompressor -- org.apache.hadoop.io.erasurecode.ErasureCodeNative -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder -- org.apache.hadoop.crypto.OpensslCipher -- org.apache.hadoop.crypto.random.OpensslSecureRandom -- org.apache.hadoop.util.NativeCrc32 -- org.apache.hadoop.net.unix.DomainSocket -- org.apache.hadoop.net.unix.DomainSocketWatcher -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.apache.hadoop - hadoop-maven-plugins -@@ -775,43 +733,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.io.compress.zlib.ZlibCompressor -- org.apache.hadoop.io.compress.zlib.ZlibDecompressor -- org.apache.hadoop.security.JniBasedUnixGroupsMapping -- org.apache.hadoop.io.nativeio.NativeIO -- org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping -- org.apache.hadoop.io.compress.snappy.SnappyCompressor -- org.apache.hadoop.io.compress.snappy.SnappyDecompressor -- org.apache.hadoop.io.compress.zstd.ZStandardCompressor -- org.apache.hadoop.io.compress.zstd.ZStandardDecompressor -- org.apache.hadoop.io.compress.lz4.Lz4Compressor -- org.apache.hadoop.io.compress.lz4.Lz4Decompressor -- org.apache.hadoop.io.erasurecode.ErasureCodeNative -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawEncoder -- org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder -- org.apache.hadoop.crypto.OpensslCipher -- org.apache.hadoop.crypto.random.OpensslSecureRandom -- org.apache.hadoop.util.NativeCrc32 -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.codehaus.mojo - exec-maven-plugin -diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -index 2d2336e7e244..2454a84daa8a 100644 ---- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml -@@ -139,26 +139,6 @@ - - - -- -- org.codehaus.mojo -- native-maven-plugin -- -- -- compile -- -- javah -- -- -- ${env.JAVA_HOME}/bin/javah -- -- org.apache.hadoop.mapred.nativetask.NativeBatchProcessor -- org.apache.hadoop.mapred.nativetask.NativeRuntime -- -- ${project.build.directory}/native/javah -- -- -- -- - - org.apache.maven.plugins - maven-antrun-plugin -diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index 23d1c875b713..1cd4e11ef3f5 100644 ---- a/hadoop-project/pom.xml -+++ b/hadoop-project/pom.xml -@@ -172,7 +172,6 @@ - 1.9 - 1.3.1 - 1.0-beta-1 -- 1.0-alpha-8 - 900 - 1.11.901 - 2.3.4 -@@ -1704,11 +1703,6 @@ - maven-war-plugin - ${maven-war-plugin.version} - -- -- org.codehaus.mojo -- native-maven-plugin -- ${native-maven-plugin.version} -- - - org.codehaus.mojo - make-maven-plugin -@@ -2174,6 +2168,27 @@ - - - -+ -+ native -+ -+ false -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -h -+ ${project.build.directory}/native/javah/ -+ -+ -+ -+ -+ -+ - - - diff --git a/hadoop/stackable/patches/3.2.4/002-HADOOP-18055-3.2.4.patch b/hadoop/stackable/patches/3.2.4/002-HADOOP-18055-3.2.4.patch deleted file mode 100644 index 1d618ddcc..000000000 --- a/hadoop/stackable/patches/3.2.4/002-HADOOP-18055-3.2.4.patch +++ /dev/null @@ -1,999 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index 705f9980ffbb..39ca69c85f4f 100644 ---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -27,6 +27,9 @@ - import java.net.MalformedURLException; - import java.net.URI; - import java.net.URL; -+import java.nio.file.Files; -+import java.nio.file.Path; -+import java.nio.file.Paths; - import java.util.ArrayList; - import java.util.Collections; - import java.util.Enumeration; -@@ -635,6 +638,8 @@ private void initializeWebServer(String name, String hostName, - addFilterPathMapping(path, webAppContext); - } - } -+ -+ addAsyncProfilerServlet(contexts); - } - - private void addListener(ServerConnector connector) { -@@ -781,6 +786,25 @@ protected void addDefaultServlets() { - addServlet("conf", "/conf", ConfServlet.class); - } - -+ private void addAsyncProfilerServlet(ContextHandlerCollection contexts) throws IOException { -+ final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); -+ if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { -+ addServlet("prof", "/prof", ProfileServlet.class); -+ Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR); -+ if (Files.notExists(tmpDir)) { -+ Files.createDirectories(tmpDir); -+ } -+ ServletContextHandler genCtx = new ServletContextHandler(contexts, "/prof-output-hadoop"); -+ genCtx.addServlet(ProfileOutputServlet.class, "/*"); -+ genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); -+ genCtx.setDisplayName("prof-output-hadoop"); -+ } else { -+ addServlet("prof", "/prof", ProfilerDisabledServlet.class); -+ LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " -+ + "not specified. Disabling /prof endpoint."); -+ } -+ } -+ - public void addContext(ServletContextHandler ctxt, boolean isFiltered) { - handlers.addHandler(ctxt); - addNoCacheFilter(ctxt); -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java -new file mode 100644 -index 000000000000..1ecc21f3753c ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java -@@ -0,0 +1,87 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * "License"); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an "AS IS" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.http; -+ -+import java.io.File; -+import java.io.IOException; -+import java.util.regex.Pattern; -+import javax.servlet.ServletException; -+import javax.servlet.http.HttpServletRequest; -+import javax.servlet.http.HttpServletResponse; -+ -+import org.eclipse.jetty.servlet.DefaultServlet; -+import org.slf4j.Logger; -+import org.slf4j.LoggerFactory; -+ -+import org.apache.hadoop.classification.InterfaceAudience; -+ -+/** -+ * Servlet to serve files generated by {@link ProfileServlet}. -+ */ -+@InterfaceAudience.Private -+public class ProfileOutputServlet extends DefaultServlet { -+ -+ private static final long serialVersionUID = 1L; -+ -+ private static final Logger LOG = LoggerFactory.getLogger(ProfileOutputServlet.class); -+ // default refresh period 2 sec -+ private static final int REFRESH_PERIOD = 2; -+ // Alphanumeric characters, plus percent (url-encoding), equals, ampersand, dot and hyphen -+ private static final Pattern ALPHA_NUMERIC = Pattern.compile("[a-zA-Z0-9%=&.\\-]*"); -+ -+ @Override -+ protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) -+ throws ServletException, IOException { -+ if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), req, resp)) { -+ resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); -+ ProfileServlet.setResponseHeader(resp); -+ resp.getWriter().write("Unauthorized: Instrumentation access is not allowed!"); -+ return; -+ } -+ -+ String absoluteDiskPath = getServletContext().getRealPath(req.getPathInfo()); -+ File requestedFile = new File(absoluteDiskPath); -+ // async-profiler version 1.4 writes 'Started [cpu] profiling' to output file when profiler is -+ // running which gets replaced by final output. If final output is not ready yet, the file size -+ // will be <100 bytes (in all modes). -+ if (requestedFile.length() < 100) { -+ LOG.info("{} is incomplete. Sending auto-refresh header.", requestedFile); -+ String refreshUrl = req.getRequestURI(); -+ // Rebuild the query string (if we have one) -+ if (req.getQueryString() != null) { -+ refreshUrl += "?" + sanitize(req.getQueryString()); -+ } -+ ProfileServlet.setResponseHeader(resp); -+ resp.setHeader("Refresh", REFRESH_PERIOD + ";" + refreshUrl); -+ resp.getWriter().write("This page will be auto-refreshed every " + REFRESH_PERIOD -+ + " seconds until the output file is ready. Redirecting to " + refreshUrl); -+ } else { -+ super.doGet(req, resp); -+ } -+ } -+ -+ static String sanitize(String input) { -+ // Basic test to try to avoid any XSS attacks or HTML content showing up. -+ // Duplicates HtmlQuoting a little, but avoid destroying ampersand. -+ if (ALPHA_NUMERIC.matcher(input).matches()) { -+ return input; -+ } -+ throw new RuntimeException("Non-alphanumeric data found in input, aborting."); -+ } -+} -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -new file mode 100644 -index 000000000000..3e19dcde35d7 ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -@@ -0,0 +1,394 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * "License"); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an "AS IS" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.http; -+ -+import java.io.File; -+import java.io.IOException; -+import java.util.ArrayList; -+import java.util.List; -+import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicInteger; -+import java.util.concurrent.locks.Lock; -+import java.util.concurrent.locks.ReentrantLock; -+import javax.servlet.http.HttpServlet; -+import javax.servlet.http.HttpServletRequest; -+import javax.servlet.http.HttpServletResponse; -+ -+import com.google.common.base.Joiner; -+import org.slf4j.Logger; -+import org.slf4j.LoggerFactory; -+ -+import org.apache.hadoop.classification.InterfaceAudience; -+import org.apache.hadoop.util.ProcessUtils; -+ -+/** -+ * Servlet that runs async-profiler as web-endpoint. -+ *

-+ * Following options from async-profiler can be specified as query paramater. -+ * // -e event profiling event: cpu|alloc|lock|cache-misses etc. -+ * // -d duration run profiling for 'duration' seconds (integer) -+ * // -i interval sampling interval in nanoseconds (long) -+ * // -j jstackdepth maximum Java stack depth (integer) -+ * // -b bufsize frame buffer size (long) -+ * // -t profile different threads separately -+ * // -s simple class names instead of FQN -+ * // -o fmt[,fmt...] output format: summary|traces|flat|collapsed|svg|tree|jfr|html -+ * // --width px SVG width pixels (integer) -+ * // --height px SVG frame height pixels (integer) -+ * // --minwidth px skip frames smaller than px (double) -+ * // --reverse generate stack-reversed FlameGraph / Call tree -+ *

-+ * Example: -+ * If Namenode http address is localhost:9870, and ResourceManager http address is localhost:8088, -+ * ProfileServlet running with async-profiler setup can be accessed with -+ * http://localhost:9870/prof and http://localhost:8088/prof for Namenode and ResourceManager -+ * processes respectively. -+ * Deep dive into some params: -+ * - To collect 10 second CPU profile of current process i.e. Namenode (returns FlameGraph svg) -+ * curl "http://localhost:9870/prof" -+ * - To collect 10 second CPU profile of pid 12345 (returns FlameGraph svg) -+ * curl "http://localhost:9870/prof?pid=12345" (For instance, provide pid of Datanode) -+ * - To collect 30 second CPU profile of pid 12345 (returns FlameGraph svg) -+ * curl "http://localhost:9870/prof?pid=12345&duration=30" -+ * - To collect 1 minute CPU profile of current process and output in tree format (html) -+ * curl "http://localhost:9870/prof?output=tree&duration=60" -+ * - To collect 10 second heap allocation profile of current process (returns FlameGraph svg) -+ * curl "http://localhost:9870/prof?event=alloc" -+ * - To collect lock contention profile of current process (returns FlameGraph svg) -+ * curl "http://localhost:9870/prof?event=lock" -+ *

-+ * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) -+ * // Perf events: -+ * // cpu -+ * // page-faults -+ * // context-switches -+ * // cycles -+ * // instructions -+ * // cache-references -+ * // cache-misses -+ * // branches -+ * // branch-misses -+ * // bus-cycles -+ * // L1-dcache-load-misses -+ * // LLC-load-misses -+ * // dTLB-load-misses -+ * // mem:breakpoint -+ * // trace:tracepoint -+ * // Java events: -+ * // alloc -+ * // lock -+ */ -+@InterfaceAudience.Private -+public class ProfileServlet extends HttpServlet { -+ -+ private static final long serialVersionUID = 1L; -+ private static final Logger LOG = LoggerFactory.getLogger(ProfileServlet.class); -+ -+ static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; -+ static final String ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin"; -+ private static final String ALLOWED_METHODS = "GET"; -+ private static final String CONTENT_TYPE_TEXT = "text/plain; charset=utf-8"; -+ private static final String ASYNC_PROFILER_HOME_ENV = "ASYNC_PROFILER_HOME"; -+ private static final String ASYNC_PROFILER_HOME_SYSTEM_PROPERTY = "async.profiler.home"; -+ private static final String PROFILER_SCRIPT = "/profiler.sh"; -+ private static final int DEFAULT_DURATION_SECONDS = 10; -+ private static final AtomicInteger ID_GEN = new AtomicInteger(0); -+ -+ static final String OUTPUT_DIR = System.getProperty("java.io.tmpdir") + "/prof-output-hadoop"; -+ -+ private enum Event { -+ -+ CPU("cpu"), -+ ALLOC("alloc"), -+ LOCK("lock"), -+ PAGE_FAULTS("page-faults"), -+ CONTEXT_SWITCHES("context-switches"), -+ CYCLES("cycles"), -+ INSTRUCTIONS("instructions"), -+ CACHE_REFERENCES("cache-references"), -+ CACHE_MISSES("cache-misses"), -+ BRANCHES("branches"), -+ BRANCH_MISSES("branch-misses"), -+ BUS_CYCLES("bus-cycles"), -+ L1_DCACHE_LOAD_MISSES("L1-dcache-load-misses"), -+ LLC_LOAD_MISSES("LLC-load-misses"), -+ DTLB_LOAD_MISSES("dTLB-load-misses"), -+ MEM_BREAKPOINT("mem:breakpoint"), -+ TRACE_TRACEPOINT("trace:tracepoint"); -+ -+ private final String internalName; -+ -+ Event(final String internalName) { -+ this.internalName = internalName; -+ } -+ -+ public String getInternalName() { -+ return internalName; -+ } -+ -+ public static Event fromInternalName(final String name) { -+ for (Event event : values()) { -+ if (event.getInternalName().equalsIgnoreCase(name)) { -+ return event; -+ } -+ } -+ -+ return null; -+ } -+ } -+ -+ private enum Output { -+ SUMMARY, -+ TRACES, -+ FLAT, -+ COLLAPSED, -+ // No SVG in 2.x asyncprofiler. -+ SVG, -+ TREE, -+ JFR, -+ // In 2.x asyncprofiler, this is how you get flamegraphs. -+ HTML -+ } -+ -+ private final Lock profilerLock = new ReentrantLock(); -+ private transient volatile Process process; -+ private final String asyncProfilerHome; -+ private Integer pid; -+ -+ public ProfileServlet() { -+ this.asyncProfilerHome = getAsyncProfilerHome(); -+ this.pid = ProcessUtils.getPid(); -+ LOG.info("Servlet process PID: {} asyncProfilerHome: {}", pid, asyncProfilerHome); -+ } -+ -+ @Override -+ protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) -+ throws IOException { -+ if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), req, resp)) { -+ resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); -+ setResponseHeader(resp); -+ resp.getWriter().write("Unauthorized: Instrumentation access is not allowed!"); -+ return; -+ } -+ -+ // make sure async profiler home is set -+ if (asyncProfilerHome == null || asyncProfilerHome.trim().isEmpty()) { -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ setResponseHeader(resp); -+ resp.getWriter().write("ASYNC_PROFILER_HOME env is not set.\n\n" -+ + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" -+ + "environment is properly configured."); -+ return; -+ } -+ -+ // if pid is explicitly specified, use it else default to current process -+ pid = getInteger(req, "pid", pid); -+ -+ // if pid is not specified in query param and if current process pid cannot be determined -+ if (pid == null) { -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ setResponseHeader(resp); -+ resp.getWriter().write( -+ "'pid' query parameter unspecified or unable to determine PID of current process."); -+ return; -+ } -+ -+ final int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); -+ final Output output = getOutput(req); -+ final Event event = getEvent(req); -+ final Long interval = getLong(req, "interval"); -+ final Integer jstackDepth = getInteger(req, "jstackdepth", null); -+ final Long bufsize = getLong(req, "bufsize"); -+ final boolean thread = req.getParameterMap().containsKey("thread"); -+ final boolean simple = req.getParameterMap().containsKey("simple"); -+ final Integer width = getInteger(req, "width", null); -+ final Integer height = getInteger(req, "height", null); -+ final Double minwidth = getMinWidth(req); -+ final boolean reverse = req.getParameterMap().containsKey("reverse"); -+ -+ if (process == null || !process.isAlive()) { -+ try { -+ int lockTimeoutSecs = 3; -+ if (profilerLock.tryLock(lockTimeoutSecs, TimeUnit.SECONDS)) { -+ try { -+ File outputFile = new File(OUTPUT_DIR, -+ "async-prof-pid-" + pid + "-" + event.name().toLowerCase() + "-" + ID_GEN -+ .incrementAndGet() + "." + output.name().toLowerCase()); -+ List cmd = new ArrayList<>(); -+ cmd.add(asyncProfilerHome + PROFILER_SCRIPT); -+ cmd.add("-e"); -+ cmd.add(event.getInternalName()); -+ cmd.add("-d"); -+ cmd.add("" + duration); -+ cmd.add("-o"); -+ cmd.add(output.name().toLowerCase()); -+ cmd.add("-f"); -+ cmd.add(outputFile.getAbsolutePath()); -+ if (interval != null) { -+ cmd.add("-i"); -+ cmd.add(interval.toString()); -+ } -+ if (jstackDepth != null) { -+ cmd.add("-j"); -+ cmd.add(jstackDepth.toString()); -+ } -+ if (bufsize != null) { -+ cmd.add("-b"); -+ cmd.add(bufsize.toString()); -+ } -+ if (thread) { -+ cmd.add("-t"); -+ } -+ if (simple) { -+ cmd.add("-s"); -+ } -+ if (width != null) { -+ cmd.add("--width"); -+ cmd.add(width.toString()); -+ } -+ if (height != null) { -+ cmd.add("--height"); -+ cmd.add(height.toString()); -+ } -+ if (minwidth != null) { -+ cmd.add("--minwidth"); -+ cmd.add(minwidth.toString()); -+ } -+ if (reverse) { -+ cmd.add("--reverse"); -+ } -+ cmd.add(pid.toString()); -+ process = ProcessUtils.runCmdAsync(cmd); -+ -+ // set response and set refresh header to output location -+ setResponseHeader(resp); -+ resp.setStatus(HttpServletResponse.SC_ACCEPTED); -+ String relativeUrl = "/prof-output-hadoop/" + outputFile.getName(); -+ resp.getWriter().write("Started [" + event.getInternalName() -+ + "] profiling. This page will automatically redirect to " + relativeUrl + " after " -+ + duration + " seconds. " -+ + "If empty diagram and Linux 4.6+, see 'Basic Usage' section on the Async " -+ + "Profiler Home Page, https://github.com/jvm-profiling-tools/async-profiler." -+ + "\n\nCommand:\n" + Joiner.on(" ").join(cmd)); -+ -+ // to avoid auto-refresh by ProfileOutputServlet, refreshDelay can be specified -+ // via url param -+ int refreshDelay = getInteger(req, "refreshDelay", 0); -+ -+ // instead of sending redirect, set auto-refresh so that browsers will refresh -+ // with redirected url -+ resp.setHeader("Refresh", (duration + refreshDelay) + ";" + relativeUrl); -+ resp.getWriter().flush(); -+ } finally { -+ profilerLock.unlock(); -+ } -+ } else { -+ setResponseHeader(resp); -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ resp.getWriter() -+ .write("Unable to acquire lock. Another instance of profiler might be running."); -+ LOG.warn("Unable to acquire lock in {} seconds. Another instance of profiler might be" -+ + " running.", lockTimeoutSecs); -+ } -+ } catch (InterruptedException e) { -+ LOG.warn("Interrupted while acquiring profile lock.", e); -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ } -+ } else { -+ setResponseHeader(resp); -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ resp.getWriter().write("Another instance of profiler is already running."); -+ } -+ } -+ -+ private Integer getInteger(final HttpServletRequest req, final String param, -+ final Integer defaultValue) { -+ final String value = req.getParameter(param); -+ if (value != null) { -+ try { -+ return Integer.valueOf(value); -+ } catch (NumberFormatException e) { -+ return defaultValue; -+ } -+ } -+ return defaultValue; -+ } -+ -+ private Long getLong(final HttpServletRequest req, final String param) { -+ final String value = req.getParameter(param); -+ if (value != null) { -+ try { -+ return Long.valueOf(value); -+ } catch (NumberFormatException e) { -+ return null; -+ } -+ } -+ return null; -+ } -+ -+ private Double getMinWidth(final HttpServletRequest req) { -+ final String value = req.getParameter("minwidth"); -+ if (value != null) { -+ try { -+ return Double.valueOf(value); -+ } catch (NumberFormatException e) { -+ return null; -+ } -+ } -+ return null; -+ } -+ -+ private Event getEvent(final HttpServletRequest req) { -+ final String eventArg = req.getParameter("event"); -+ if (eventArg != null) { -+ Event event = Event.fromInternalName(eventArg); -+ return event == null ? Event.CPU : event; -+ } -+ return Event.CPU; -+ } -+ -+ private Output getOutput(final HttpServletRequest req) { -+ final String outputArg = req.getParameter("output"); -+ if (req.getParameter("output") != null) { -+ try { -+ return Output.valueOf(outputArg.trim().toUpperCase()); -+ } catch (IllegalArgumentException e) { -+ return Output.HTML; -+ } -+ } -+ return Output.HTML; -+ } -+ -+ static void setResponseHeader(final HttpServletResponse response) { -+ response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, ALLOWED_METHODS); -+ response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); -+ response.setContentType(CONTENT_TYPE_TEXT); -+ } -+ -+ static String getAsyncProfilerHome() { -+ String asyncProfilerHome = System.getenv(ASYNC_PROFILER_HOME_ENV); -+ // if ENV is not set, see if -Dasync.profiler.home=/path/to/async/profiler/home is set -+ if (asyncProfilerHome == null || asyncProfilerHome.trim().isEmpty()) { -+ asyncProfilerHome = System.getProperty(ASYNC_PROFILER_HOME_SYSTEM_PROPERTY); -+ } -+ -+ return asyncProfilerHome; -+ } -+ -+} -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -new file mode 100644 -index 000000000000..459485ffa5b5 ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -@@ -0,0 +1,44 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * "License"); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an "AS IS" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.http; -+ -+import java.io.IOException; -+import javax.servlet.http.HttpServlet; -+import javax.servlet.http.HttpServletRequest; -+import javax.servlet.http.HttpServletResponse; -+ -+import org.apache.hadoop.classification.InterfaceAudience; -+ -+/** -+ * Servlet for disabled async-profiler. -+ */ -+@InterfaceAudience.Private -+public class ProfilerDisabledServlet extends HttpServlet { -+ -+ @Override -+ protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) -+ throws IOException { -+ resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -+ ProfileServlet.setResponseHeader(resp); -+ resp.getWriter().write("The profiler servlet was disabled at startup.\n\n" -+ + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" -+ + "environment is properly configured."); -+ } -+ -+} -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java -new file mode 100644 -index 000000000000..cf653b9c912c ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java -@@ -0,0 +1,74 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * "License"); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an "AS IS" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.util; -+ -+import java.io.IOException; -+import java.lang.management.ManagementFactory; -+import java.util.List; -+ -+import org.slf4j.Logger; -+import org.slf4j.LoggerFactory; -+ -+import org.apache.hadoop.classification.InterfaceAudience; -+ -+/** -+ * Process related utilities. -+ */ -+@InterfaceAudience.Private -+public final class ProcessUtils { -+ -+ private static final Logger LOG = LoggerFactory.getLogger(ProcessUtils.class); -+ -+ private ProcessUtils() { -+ // no-op -+ } -+ -+ public static Integer getPid() { -+ // JVM_PID can be exported in service start script -+ String pidStr = System.getenv("JVM_PID"); -+ -+ // In case if it is not set correctly, fallback to mxbean which is implementation specific. -+ if (pidStr == null || pidStr.trim().isEmpty()) { -+ String name = ManagementFactory.getRuntimeMXBean().getName(); -+ if (name != null) { -+ int idx = name.indexOf("@"); -+ if (idx != -1) { -+ pidStr = name.substring(0, name.indexOf("@")); -+ } -+ } -+ } -+ try { -+ if (pidStr != null) { -+ return Integer.valueOf(pidStr); -+ } -+ } catch (NumberFormatException ignored) { -+ // ignore -+ } -+ return null; -+ } -+ -+ public static Process runCmdAsync(List cmd) { -+ try { -+ LOG.info("Running command async: {}", cmd); -+ return new ProcessBuilder(cmd).inheritIO().start(); -+ } catch (IOException e) { -+ throw new IllegalStateException(e); -+ } -+ } -+} -diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -index 5296e882df55..b1ab3e390793 100644 ---- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -@@ -69,7 +69,7 @@ - false - - Indicates if administrator ACLs are required to access -- instrumentation servlets (JMX, METRICS, CONF, STACKS). -+ instrumentation servlets (JMX, METRICS, CONF, STACKS, PROF). - - - -diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md -new file mode 100644 -index 000000000000..4b93cc219a5e ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md -@@ -0,0 +1,145 @@ -+ -+ -+Async Profiler Servlet for Hadoop -+======================================== -+ -+ -+ -+Purpose -+------- -+ -+This document describes how to configure and use async profiler -+with Hadoop applications. -+Async profiler is a low overhead sampling profiler for Java that -+does not suffer from Safepoint bias problem. It features -+HotSpot-specific APIs to collect stack traces and to track memory -+allocations. The profiler works with OpenJDK, Oracle JDK and other -+Java runtimes based on the HotSpot JVM. -+ -+Hadoop profiler servlet supports Async Profiler major versions -+1.x and 2.x. -+ -+Prerequisites -+------------- -+ -+Make sure Hadoop is installed, configured and setup correctly. -+For more information see: -+ -+* [Single Node Setup](./SingleCluster.html) for first-time users. -+* [Cluster Setup](./ClusterSetup.html) for large, distributed clusters. -+ -+Go to https://github.com/jvm-profiling-tools/async-profiler, -+download a release appropriate for your platform, and install -+on every cluster host. -+ -+Set `ASYNC_PROFILER_HOME` in the environment (put it in hadoop-env.sh) -+to the root directory of the async-profiler install location, or pass -+it on the Hadoop daemon's command line as a system property as -+`-Dasync.profiler.home=/path/to/async-profiler`. -+ -+ -+Usage -+-------- -+ -+Once the prerequisites have been satisfied, access to the async-profiler -+is available by using Namenode or ResourceManager UI. -+ -+Following options from async-profiler can be specified as query paramater. -+* `-e event` profiling event: cpu|alloc|lock|cache-misses etc. -+* `-d duration` run profiling for 'duration' seconds (integer) -+* `-i interval` sampling interval in nanoseconds (long) -+* `-j jstackdepth` maximum Java stack depth (integer) -+* `-b bufsize` frame buffer size (long) -+* `-t` profile different threads separately -+* `-s` simple class names instead of FQN -+* `-o fmt[,fmt...]` output format: summary|traces|flat|collapsed|svg|tree|jfr|html -+* `--width px` SVG width pixels (integer) -+* `--height px` SVG frame height pixels (integer) -+* `--minwidth px` skip frames smaller than px (double) -+* `--reverse` generate stack-reversed FlameGraph / Call tree -+ -+ -+Example: -+If Namenode http address is localhost:9870, and ResourceManager http -+address is localhost:8088, ProfileServlet running with async-profiler -+setup can be accessed with http://localhost:9870/prof and -+http://localhost:8088/prof for Namenode and ResourceManager processes -+respectively. -+ -+Diving deep into some params: -+ -+* To collect 10 second CPU profile of current process -+ (returns FlameGraph svg) -+ * `curl http://localhost:9870/prof` (FlameGraph svg for Namenode) -+ * `curl http://localhost:8088/prof` (FlameGraph svg for ResourceManager) -+* To collect 10 second CPU profile of pid 12345 (returns FlameGraph svg) -+ * `curl http://localhost:9870/prof?pid=12345` (For instance, provide -+ pid of Datanode here) -+* To collect 30 second CPU profile of pid 12345 (returns FlameGraph svg) -+ * `curl http://localhost:9870/prof?pid=12345&duration=30` -+* To collect 1 minute CPU profile of current process and output in tree -+ format (html) -+ * `curl http://localhost:9870/prof?output=tree&duration=60` -+* To collect 10 second heap allocation profile of current process -+ (returns FlameGraph svg) -+ * `curl http://localhost:9870/prof?event=alloc` -+* To collect lock contention profile of current process -+ (returns FlameGraph svg) -+ * `curl http://localhost:9870/prof?event=lock` -+ -+ -+The following event types are supported by async-profiler. -+Use the 'event' parameter to specify. Default is 'cpu'. -+Not all operating systems will support all types. -+ -+Perf events: -+ -+* cpu -+* page-faults -+* context-switches -+* cycles -+* instructions -+* cache-references -+* cache-misses -+* branches -+* branch-misses -+* bus-cycles -+* L1-dcache-load-misses -+* LLC-load-misses -+* dTLB-load-misses -+ -+Java events: -+ -+* alloc -+* lock -+ -+The following output formats are supported. -+Use the 'output' parameter to specify. Default is 'flamegraph'. -+ -+Output formats: -+ -+* summary: A dump of basic profiling statistics. -+* traces: Call traces. -+* flat: Flat profile (top N hot methods). -+* collapsed: Collapsed call traces in the format used by FlameGraph -+ script. This is a collection of call stacks, where each line is a -+ semicolon separated list of frames followed by a counter. -+* svg: FlameGraph in SVG format. -+* tree: Call tree in HTML format. -+* jfr: Call traces in Java Flight Recorder format. -+ -+The 'duration' parameter specifies how long to collect trace data -+before generating output, specified in seconds. The default is 10 seconds. -+ -diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java -new file mode 100644 -index 000000000000..ce068bb6f1cf ---- /dev/null -+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java -@@ -0,0 +1,95 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * "License"); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an "AS IS" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.http; -+ -+import java.io.IOException; -+import java.net.HttpURLConnection; -+import java.net.URL; -+import javax.servlet.http.HttpServletResponse; -+ -+import org.junit.AfterClass; -+import org.junit.BeforeClass; -+import org.junit.Test; -+ -+/** -+ * Small test to cover default disabled prof endpoint. -+ */ -+public class TestDisabledProfileServlet extends HttpServerFunctionalTest { -+ -+ private static HttpServer2 server; -+ private static URL baseUrl; -+ -+ @BeforeClass -+ public static void setup() throws Exception { -+ server = createTestServer(); -+ server.start(); -+ baseUrl = getServerURL(server); -+ } -+ -+ @AfterClass -+ public static void cleanup() throws Exception { -+ server.stop(); -+ } -+ -+ @Test -+ public void testQuery() throws Exception { -+ try { -+ readOutput(new URL(baseUrl, "/prof")); -+ throw new IllegalStateException("Should not reach here"); -+ } catch (IOException e) { -+ assertTrue(e.getMessage() -+ .contains(HttpServletResponse.SC_INTERNAL_SERVER_ERROR + " for URL: " + baseUrl)); -+ } -+ -+ // CORS headers -+ HttpURLConnection conn = -+ (HttpURLConnection) new URL(baseUrl, "/prof").openConnection(); -+ assertEquals("GET", conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_METHODS)); -+ assertNotNull(conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_ORIGIN)); -+ conn.disconnect(); -+ } -+ -+ @Test -+ public void testRequestMethods() throws IOException { -+ HttpURLConnection connection = getConnection("PUT"); -+ assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, -+ connection.getResponseCode()); -+ connection.disconnect(); -+ connection = getConnection("POST"); -+ assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, -+ connection.getResponseCode()); -+ connection.disconnect(); -+ connection = getConnection("DELETE"); -+ assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, -+ connection.getResponseCode()); -+ connection.disconnect(); -+ connection = getConnection("GET"); -+ assertEquals("Unexpected response code", HttpServletResponse.SC_INTERNAL_SERVER_ERROR, -+ connection.getResponseCode()); -+ connection.disconnect(); -+ } -+ -+ private HttpURLConnection getConnection(final String method) throws IOException { -+ URL url = new URL(baseUrl, "/prof"); -+ HttpURLConnection conn = (HttpURLConnection) url.openConnection(); -+ conn.setRequestMethod(method); -+ return conn; -+ } -+ -+} -diff --git a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm -index 0686e788fe0d..a06ccdd25eb2 100644 ---- a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm -+++ b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm -@@ -1205,9 +1205,10 @@ Name | Description - /logs | Display log files - /stacks | Display JVM stacks - /static/index.html | The static home page -+/prof | Async Profiler endpoint - - To control the access to servlet `/conf`, `/jmx`, `/logLevel`, `/logs`, --and `/stacks`, configure the following properties in `kms-site.xml`: -+`/stacks` and `/prof`, configure the following properties in `kms-site.xml`: - - ```xml - -@@ -1221,7 +1222,7 @@ and `/stacks`, configure the following properties in `kms-site.xml`: - true - - Indicates if administrator ACLs are required to access -- instrumentation servlets (JMX, METRICS, CONF, STACKS). -+ instrumentation servlets (JMX, METRICS, CONF, STACKS, PROF). - - - -diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm -index 2d0a5b8cd2e7..66f74d13d25a 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm -+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm -@@ -162,9 +162,10 @@ Name | Description - /logs | Display log files - /stacks | Display JVM stacks - /static/index.html | The static home page -+/prof | Async Profiler endpoint - - To control the access to servlet `/conf`, `/jmx`, `/logLevel`, `/logs`, --and `/stacks`, configure the following properties in `httpfs-site.xml`: -+`/stacks` and `/prof`, configure the following properties in `httpfs-site.xml`: - - ```xml - -@@ -178,7 +179,7 @@ and `/stacks`, configure the following properties in `httpfs-site.xml`: - true - - Indicates if administrator ACLs are required to access -- instrumentation servlets (JMX, METRICS, CONF, STACKS). -+ instrumentation servlets (JMX, METRICS, CONF, STACKS, PROF). - - - -diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml -index 5e0cf66b4f92..f74a5dd4f7b5 100644 ---- a/hadoop-project/src/site/site.xml -+++ b/hadoop-project/src/site/site.xml -@@ -72,6 +72,7 @@ - - - -+ - - -

diff --git a/hadoop/stackable/patches/3.2.4/003-HADOOP-18077-3.2.4.patch b/hadoop/stackable/patches/3.2.4/003-HADOOP-18077-3.2.4.patch deleted file mode 100644 index a26392f4c..000000000 --- a/hadoop/stackable/patches/3.2.4/003-HADOOP-18077-3.2.4.patch +++ /dev/null @@ -1,52 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index 39ca69c85f4f..ff07076ef614 100644 ---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -639,7 +639,7 @@ private void initializeWebServer(String name, String hostName, - } - } - -- addAsyncProfilerServlet(contexts); -+ addAsyncProfilerServlet(contexts, conf); - } - - private void addListener(ServerConnector connector) { -@@ -786,7 +786,8 @@ protected void addDefaultServlets() { - addServlet("conf", "/conf", ConfServlet.class); - } - -- private void addAsyncProfilerServlet(ContextHandlerCollection contexts) throws IOException { -+ private void addAsyncProfilerServlet(ContextHandlerCollection contexts, Configuration conf) -+ throws IOException { - final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); - if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { - addServlet("prof", "/prof", ProfileServlet.class); -@@ -798,6 +799,7 @@ private void addAsyncProfilerServlet(ContextHandlerCollection contexts) throws I - genCtx.addServlet(ProfileOutputServlet.class, "/*"); - genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); - genCtx.setDisplayName("prof-output-hadoop"); -+ setContextAttributes(genCtx, conf); - } else { - addServlet("prof", "/prof", ProfilerDisabledServlet.class); - LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -index 459485ffa5b5..c488b574990c 100644 ---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -@@ -36,9 +36,15 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res - throws IOException { - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - ProfileServlet.setResponseHeader(resp); -+ // TODO : Replace github.com link with -+ // https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/ -+ // AsyncProfilerServlet.html once Async profiler changes are released -+ // in 3.x (3.4.0 as of today). - resp.getWriter().write("The profiler servlet was disabled at startup.\n\n" - + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" -- + "environment is properly configured."); -+ + "environment is properly configured. \n\n" -+ + "For more details, please refer to: https://github.com/apache/hadoop/blob/trunk/" -+ + "hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md"); - } - - } diff --git a/hadoop/stackable/patches/3.2.4/004-add-perf-event-itimer-3.2.4.patch b/hadoop/stackable/patches/3.2.4/004-add-perf-event-itimer-3.2.4.patch deleted file mode 100644 index ff22ed151..000000000 --- a/hadoop/stackable/patches/3.2.4/004-add-perf-event-itimer-3.2.4.patch +++ /dev/null @@ -1,20 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -index fc0ec7736ed8..e324ad6d49fd 100644 ---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -@@ -76,6 +76,7 @@ - * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) - * // Perf events: - * // cpu -+ * // itimer - * // page-faults - * // context-switches - * // cycles -@@ -115,6 +116,7 @@ public class ProfileServlet extends HttpServlet { - private enum Event { - - CPU("cpu"), -+ ITIMER("itimer"), - ALLOC("alloc"), - LOCK("lock"), - PAGE_FAULTS("page-faults"), diff --git a/hadoop/stackable/patches/3.3.4/001-YARN-11527-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0001-YARN-11527-Update-node.js.patch similarity index 62% rename from hadoop/stackable/patches/3.3.4/001-YARN-11527-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0001-YARN-11527-Update-node.js.patch index 986a64b9a..d50669cc9 100644 --- a/hadoop/stackable/patches/3.3.4/001-YARN-11527-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0001-YARN-11527-Update-node.js.patch @@ -1,5 +1,14 @@ +From ebfaedd7b03927237db87a263d16c17b6aea00ad Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Thu, 21 Dec 2023 13:51:13 +0100 +Subject: YARN-11527: Update node.js + +--- + hadoop-project/pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index 0b2f6f17157d..9dc8b653eb93 100644 +index 0b2f6f1715..082834e261 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -212,7 +212,7 @@ diff --git a/hadoop/stackable/patches/3.3.4/002-datanode-registration-override-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0002-Allow-overriding-datanode-registration-addresses.patch similarity index 92% rename from hadoop/stackable/patches/3.3.4/002-datanode-registration-override-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0002-Allow-overriding-datanode-registration-addresses.patch index 2acdc9610..f7d355d84 100644 --- a/hadoop/stackable/patches/3.3.4/002-datanode-registration-override-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0002-Allow-overriding-datanode-registration-addresses.patch @@ -1,5 +1,17 @@ +From 570804ae570faed84b98ab67e9ff7534f458caec Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Natalie=20Klestrup=20R=C3=B6ijezon?= +Date: Thu, 11 Jan 2024 14:01:02 +0100 +Subject: Allow overriding datanode registration addresses + +--- + .../org/apache/hadoop/hdfs/DFSConfigKeys.java | 9 +++ + .../blockmanagement/DatanodeManager.java | 43 +++++++----- + .../hadoop/hdfs/server/datanode/DNConf.java | 70 +++++++++++++++++++ + .../hadoop/hdfs/server/datanode/DataNode.java | 35 ++++++++-- + 4 files changed, 135 insertions(+), 22 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -index 7196def4221..2c00fb4fb1a 100755 +index 7196def422..2c00fb4fb1 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -139,6 +139,13 @@ public class DFSConfigKeys extends CommonConfigurationKeys { @@ -26,7 +38,7 @@ index 7196def4221..2c00fb4fb1a 100755 public static final boolean DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT = true; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java -index 44dffcbed11..54f6d63fa78 100644 +index 44dffcbed1..54f6d63fa7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java @@ -179,6 +179,8 @@ public class DatanodeManager { @@ -50,7 +62,7 @@ index 44dffcbed11..54f6d63fa78 100644 this.checkIpHostnameInRegistration = conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_KEY, DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT); -@@ -1133,27 +1140,29 @@ void startAdminOperationIfNecessary(DatanodeDescriptor nodeReg) { +@@ -1133,27 +1140,29 @@ public class DatanodeManager { */ public void registerDatanode(DatanodeRegistration nodeReg) throws DisallowedDatanodeException, UnresolvedTopologyException { @@ -98,7 +110,7 @@ index 44dffcbed11..54f6d63fa78 100644 // it will be disallowed from registering. if (!hostConfigManager.isIncluded(nodeReg)) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java -index d61a17e83fe..eaf4a6d7c1d 100644 +index d61a17e83f..eaf4a6d7c1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java @@ -99,6 +99,11 @@ public class DNConf { @@ -113,7 +125,7 @@ index d61a17e83fe..eaf4a6d7c1d 100644 final boolean overwriteDownstreamDerivedQOP; private final boolean pmemCacheRecoveryEnabled; -@@ -187,6 +192,11 @@ public DNConf(final Configurable dn) { +@@ -187,6 +192,11 @@ public class DNConf { connectToDnViaHostname = getConf().getBoolean( DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); @@ -125,7 +137,7 @@ index d61a17e83fe..eaf4a6d7c1d 100644 this.blockReportInterval = getConf().getLong( DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT); -@@ -360,6 +370,66 @@ public boolean getConnectToDnViaHostname() { +@@ -360,6 +370,66 @@ public class DNConf { return connectToDnViaHostname; } @@ -193,10 +205,10 @@ index d61a17e83fe..eaf4a6d7c1d 100644 * Returns socket timeout * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -index c1507a45120..d253779e70d 100644 +index c1507a4512..2ff7c272cf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -@@ -82,6 +82,7 @@ +@@ -82,6 +82,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -204,7 +216,7 @@ index c1507a45120..d253779e70d 100644 import java.util.Map.Entry; import java.util.Set; import java.util.UUID; -@@ -1556,11 +1557,35 @@ DatanodeRegistration createBPRegistration(NamespaceInfo nsInfo) { +@@ -1556,11 +1557,35 @@ public class DataNode extends ReconfigurableBase NodeType.DATA_NODE); } diff --git a/hadoop/stackable/patches/3.3.6/003-HADOOP-18055-3.3.6.patch b/hadoop/stackable/patches/3.3.4/0003-HADOOP-18055-Add-async-profiler.patch similarity index 94% rename from hadoop/stackable/patches/3.3.6/003-HADOOP-18055-3.3.6.patch rename to hadoop/stackable/patches/3.3.4/0003-HADOOP-18055-Add-async-profiler.patch index b55f8cc90..f79e2a02a 100644 --- a/hadoop/stackable/patches/3.3.6/003-HADOOP-18055-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.4/0003-HADOOP-18055-Add-async-profiler.patch @@ -1,8 +1,33 @@ +From f4a68edacf8afbf51c9ac996fa50623dd71c12b9 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: HADOOP-18055: Add async-profiler + +--- + .../org/apache/hadoop/http/HttpServer2.java | 21 + + .../hadoop/http/ProfileOutputServlet.java | 87 ++++ + .../apache/hadoop/http/ProfileServlet.java | 394 ++++++++++++++++++ + .../hadoop/http/ProfilerDisabledServlet.java | 44 ++ + .../org/apache/hadoop/util/ProcessUtils.java | 74 ++++ + .../src/main/resources/core-default.xml | 2 +- + .../src/site/markdown/AsyncProfilerServlet.md | 145 +++++++ + .../http/TestDisabledProfileServlet.java | 95 +++++ + .../hadoop-kms/src/site/markdown/index.md.vm | 5 +- + .../src/site/markdown/ServerSetup.md.vm | 5 +- + hadoop-project/src/site/site.xml | 1 + + 11 files changed, 868 insertions(+), 5 deletions(-) + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java + create mode 100644 hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md + create mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index 8dadbe390a56..1f66a7e809c8 100644 +index b40d60cf50..a7777f85cc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -27,6 +27,7 @@ +@@ -27,6 +27,7 @@ import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; @@ -10,7 +35,7 @@ index 8dadbe390a56..1f66a7e809c8 100644 import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; -@@ -744,6 +745,26 @@ private void initializeWebServer(String name, String hostName, +@@ -734,6 +735,26 @@ public final class HttpServer2 implements FilterContainer { addDefaultServlets(); addPrometheusServlet(conf); @@ -39,7 +64,7 @@ index 8dadbe390a56..1f66a7e809c8 100644 private void addPrometheusServlet(Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java new file mode 100644 -index 000000000000..1ecc21f3753c +index 0000000000..1ecc21f375 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java @@ -0,0 +1,87 @@ @@ -132,7 +157,7 @@ index 000000000000..1ecc21f3753c +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java new file mode 100644 -index 000000000000..fc0ec7736ed8 +index 0000000000..fc0ec7736e --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java @@ -0,0 +1,394 @@ @@ -532,7 +557,7 @@ index 000000000000..fc0ec7736ed8 +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java new file mode 100644 -index 000000000000..459485ffa5b5 +index 0000000000..459485ffa5 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java @@ -0,0 +1,44 @@ @@ -582,7 +607,7 @@ index 000000000000..459485ffa5b5 +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java new file mode 100644 -index 000000000000..cf653b9c912c +index 0000000000..cf653b9c91 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java @@ -0,0 +1,74 @@ @@ -661,7 +686,7 @@ index 000000000000..cf653b9c912c + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -index b1a25ce1f008..8068bae96918 100644 +index f94fdebd03..56bec769c1 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -69,7 +69,7 @@ @@ -675,7 +700,7 @@ index b1a25ce1f008..8068bae96918 100644 diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md new file mode 100644 -index 000000000000..4b93cc219a5e +index 0000000000..4b93cc219a --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md @@ -0,0 +1,145 @@ @@ -826,7 +851,7 @@ index 000000000000..4b93cc219a5e + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java new file mode 100644 -index 000000000000..ce068bb6f1cf +index 0000000000..ce068bb6f1 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java @@ -0,0 +1,95 @@ @@ -926,7 +951,7 @@ index 000000000000..ce068bb6f1cf + +} diff --git a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm -index 6ea21d5cf407..09375d5aab52 100644 +index 6ea21d5cf4..09375d5aab 100644 --- a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm +++ b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm @@ -1208,9 +1208,10 @@ Name | Description @@ -951,7 +976,7 @@ index 6ea21d5cf407..09375d5aab52 100644 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm -index 2d0a5b8cd2e7..e97de0275ca2 100644 +index 2d0a5b8cd2..e97de0275c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm @@ -162,9 +162,10 @@ Name | Description @@ -976,7 +1001,7 @@ index 2d0a5b8cd2e7..e97de0275ca2 100644 diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml -index b53cbd2a0565..0793e9777128 100644 +index e2d149da2e..c5413d9089 100644 --- a/hadoop-project/src/site/site.xml +++ b/hadoop-project/src/site/site.xml @@ -74,6 +74,7 @@ diff --git a/hadoop/stackable/patches/3.3.6/004-HADOOP-18077-3.3.6.patch b/hadoop/stackable/patches/3.3.4/0004-Backport-HADOOP-18077.patch similarity index 79% rename from hadoop/stackable/patches/3.3.6/004-HADOOP-18077-3.3.6.patch rename to hadoop/stackable/patches/3.3.4/0004-Backport-HADOOP-18077.patch index 7c476925e..4f4712b40 100644 --- a/hadoop/stackable/patches/3.3.6/004-HADOOP-18077-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.4/0004-Backport-HADOOP-18077.patch @@ -1,8 +1,18 @@ +From 0ea496a764360c0d4143f9ee764b9c483ddf6d34 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: Backport HADOOP-18077 + +--- + .../src/main/java/org/apache/hadoop/http/HttpServer2.java | 6 ++++-- + .../org/apache/hadoop/http/ProfilerDisabledServlet.java | 8 +++++++- + 2 files changed, 11 insertions(+), 3 deletions(-) + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index 1f66a7e809c8..96794086cb87 100644 +index a7777f85cc..bbe0d9993d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -745,10 +745,11 @@ private void initializeWebServer(String name, String hostName, +@@ -735,10 +735,11 @@ public final class HttpServer2 implements FilterContainer { addDefaultServlets(); addPrometheusServlet(conf); @@ -16,7 +26,7 @@ index 1f66a7e809c8..96794086cb87 100644 final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { addServlet("prof", "/prof", ProfileServlet.class); -@@ -760,6 +761,7 @@ private void addAsyncProfilerServlet(ContextHandlerCollection contexts) throws I +@@ -750,6 +751,7 @@ public final class HttpServer2 implements FilterContainer { genCtx.addServlet(ProfileOutputServlet.class, "/*"); genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); genCtx.setDisplayName("prof-output-hadoop"); @@ -25,10 +35,10 @@ index 1f66a7e809c8..96794086cb87 100644 addServlet("prof", "/prof", ProfilerDisabledServlet.class); LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -index 459485ffa5b5..c488b574990c 100644 +index 459485ffa5..c488b57499 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -@@ -36,9 +36,15 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res +@@ -36,9 +36,15 @@ public class ProfilerDisabledServlet extends HttpServlet { throws IOException { resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); ProfileServlet.setResponseHeader(resp); diff --git a/hadoop/stackable/patches/3.3.4/005-add-perf-event-itimer-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0005-Async-profiler-also-grab-itimer-events.patch similarity index 66% rename from hadoop/stackable/patches/3.3.4/005-add-perf-event-itimer-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0005-Async-profiler-also-grab-itimer-events.patch index ff22ed151..1f2694209 100644 --- a/hadoop/stackable/patches/3.3.4/005-add-perf-event-itimer-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0005-Async-profiler-also-grab-itimer-events.patch @@ -1,8 +1,17 @@ +From 395c0da87fd16ca8e00febecbabdee1fb3f48895 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: Async-profiler: also grab itimer events + +--- + .../src/main/java/org/apache/hadoop/http/ProfileServlet.java | 2 ++ + 1 file changed, 2 insertions(+) + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -index fc0ec7736ed8..e324ad6d49fd 100644 +index fc0ec7736e..e324ad6d49 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -@@ -76,6 +76,7 @@ +@@ -76,6 +76,7 @@ import org.apache.hadoop.util.ProcessUtils; * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) * // Perf events: * // cpu diff --git a/hadoop/stackable/patches/3.3.4/006-HDFS-17378-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch similarity index 77% rename from hadoop/stackable/patches/3.3.4/006-HDFS-17378-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch index 2c8c7e533..057158e41 100644 --- a/hadoop/stackable/patches/3.3.4/006-HDFS-17378-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch @@ -1,8 +1,18 @@ +From b37250b77291531fea062ae1dc85429e95d854d1 Mon Sep 17 00:00:00 2001 +From: Sebastian Bernauer +Date: Thu, 15 Feb 2024 15:33:43 +0100 +Subject: HDFS-17378: Fix missing operationType for some operations in + authorizer + +--- + .../hdfs/server/namenode/FSNamesystem.java | 41 +++++++++++-------- + 1 file changed, 24 insertions(+), 17 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -index 243f62295ca4..ba3caa6b6c04 100644 +index 243f62295c..ba3caa6b6c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -@@ -2501,15 +2501,16 @@ void unsetStoragePolicy(String src) throws IOException { +@@ -2501,15 +2501,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, * @throws IOException */ BlockStoragePolicy getStoragePolicy(String src) throws IOException { @@ -21,7 +31,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 } } -@@ -2529,15 +2530,16 @@ BlockStoragePolicy[] getStoragePolicies() throws IOException { +@@ -2529,15 +2530,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } long getPreferredBlockSize(String src) throws IOException { @@ -40,7 +50,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 } } -@@ -2590,7 +2592,6 @@ HdfsFileStatus startFile(String src, PermissionStatus permissions, +@@ -2590,7 +2592,6 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean createParent, short replication, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -48,7 +58,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 HdfsFileStatus status; try { status = startFileInt(src, permissions, holder, clientMachine, flag, -@@ -2610,6 +2611,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2610,6 +2611,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -56,7 +66,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 if (NameNode.stateChangeLog.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("DIR* NameSystem.startFile: src=").append(src) -@@ -2647,7 +2649,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2647,7 +2649,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, checkOperation(OperationCategory.WRITE); final FSPermissionChecker pc = getPermissionChecker(); @@ -65,7 +75,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2711,7 +2713,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2711,7 +2713,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, dir.writeUnlock(); } } finally { @@ -74,7 +84,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -2740,10 +2742,11 @@ private HdfsFileStatus startFileInt(String src, +@@ -2740,10 +2742,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ boolean recoverLease(String src, String holder, String clientMachine) throws IOException { @@ -87,7 +97,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2764,7 +2767,7 @@ boolean recoverLease(String src, String holder, String clientMachine) +@@ -2764,7 +2767,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, skipSync = true; throw se; } finally { @@ -96,7 +106,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -2981,6 +2984,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -2981,6 +2984,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final Set excludes, final int numAdditionalNodes, final String clientName ) throws IOException { @@ -104,7 +114,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 //check if the feature is enabled dtpReplaceDatanodeOnFailure.checkEnabled(); -@@ -2992,7 +2996,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -2992,7 +2996,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final BlockType blockType; checkOperation(OperationCategory.READ); final FSPermissionChecker pc = getPermissionChecker(); @@ -113,7 +123,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 readLock(); try { checkOperation(OperationCategory.READ); -@@ -3015,7 +3019,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3015,7 +3019,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, "src=%s, fileId=%d, blk=%s, clientName=%s, clientMachine=%s", src, fileId, blk, clientName, clientMachine)); } finally { @@ -122,7 +132,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 } if (clientnode == null) { -@@ -3037,11 +3041,12 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3037,11 +3041,12 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) throws IOException { @@ -136,7 +146,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3050,7 +3055,7 @@ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) +@@ -3050,7 +3055,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, NameNode.stateChangeLog.debug("BLOCK* NameSystem.abandonBlock: {} is " + "removed from pendingCreates", b); } finally { @@ -145,7 +155,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 } getEditLog().logSync(); } -@@ -3104,10 +3109,11 @@ INodeFile checkLease(INodesInPath iip, String holder, long fileId) +@@ -3104,10 +3109,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean completeFile(final String src, String holder, ExtendedBlock last, long fileId) throws IOException { @@ -158,7 +168,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3115,7 +3121,7 @@ boolean completeFile(final String src, String holder, +@@ -3115,7 +3121,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, success = FSDirWriteFileOp.completeFile(this, pc, src, holder, last, fileId); } finally { @@ -167,7 +177,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 } getEditLog().logSync(); if (success) { -@@ -3536,10 +3542,11 @@ void setQuota(String src, long nsQuota, long ssQuota, StorageType type) +@@ -3536,10 +3542,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void fsync(String src, long fileId, String clientName, long lastBlockLength) throws IOException { @@ -180,7 +190,7 @@ index 243f62295ca4..ba3caa6b6c04 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3553,7 +3560,7 @@ void fsync(String src, long fileId, String clientName, long lastBlockLength) +@@ -3553,7 +3560,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } FSDirWriteFileOp.persistBlocks(dir, src, pendingFile, false); } finally { diff --git a/hadoop/stackable/patches/3.3.4/007-snappy-cves-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0007-Bump-Snappy-version-to-fix-CVEs.patch similarity index 59% rename from hadoop/stackable/patches/3.3.4/007-snappy-cves-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0007-Bump-Snappy-version-to-fix-CVEs.patch index 3cfb73599..f6d30bc74 100644 --- a/hadoop/stackable/patches/3.3.4/007-snappy-cves-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0007-Bump-Snappy-version-to-fix-CVEs.patch @@ -1,5 +1,14 @@ +From ad83ab139ffffabd99549ee5207a116f7acc7cf6 Mon Sep 17 00:00:00 2001 +From: Andrew Kenworthy +Date: Thu, 16 May 2024 16:44:14 +0200 +Subject: Bump Snappy version to fix CVEs + +--- + hadoop-project/pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index 0b2f6f17157..9e7d5cc3973 100644 +index 082834e261..70fb9a3b3f 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -142,7 +142,7 @@ @@ -9,5 +18,5 @@ index 0b2f6f17157..9e7d5cc3973 100644 - 1.1.8.2 + 1.1.10.4 1.7.1 - + diff --git a/hadoop/stackable/patches/3.3.4/008-patch-cyclonedx-plugin.patch b/hadoop/stackable/patches/3.3.4/0008-Add-CycloneDX-plugin.patch similarity index 81% rename from hadoop/stackable/patches/3.3.4/008-patch-cyclonedx-plugin.patch rename to hadoop/stackable/patches/3.3.4/0008-Add-CycloneDX-plugin.patch index c431a1ee8..864b35902 100644 --- a/hadoop/stackable/patches/3.3.4/008-patch-cyclonedx-plugin.patch +++ b/hadoop/stackable/patches/3.3.4/0008-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From d7fa1e47e724f2bc6f22456a0b7fbc509629d285 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 19 +++++++++++++++++++ + 1 file changed, 19 insertions(+) + diff --git a/pom.xml b/pom.xml -index f4e435c..f050218 100644 +index f4e435c749..f0502184a2 100644 --- a/pom.xml +++ b/pom.xml @@ -116,6 +116,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x diff --git a/hadoop/stackable/patches/3.3.4/009-HADOOP-18516-3.3.4.patch b/hadoop/stackable/patches/3.3.4/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch similarity index 92% rename from hadoop/stackable/patches/3.3.4/009-HADOOP-18516-3.3.4.patch rename to hadoop/stackable/patches/3.3.4/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch index 34777f68c..633a5339e 100644 --- a/hadoop/stackable/patches/3.3.4/009-HADOOP-18516-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.4/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch @@ -1,16 +1,35 @@ -commit 8c7c70b4b63990908687f6e49e9b0306ac12ef96 -Author: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> -Date: Fri Jun 7 19:03:23 2024 +0530 +From cfc358b6e36d9565076b325a2153be24bbf348ba Mon Sep 17 00:00:00 2001 +From: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> +Date: Fri, 7 Jun 2024 19:03:23 +0530 +Subject: HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS + Authentication (#6552) - HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS Authentication (#6552) - - Contributed by Anuj Modi +Contributed by Anuj Modi +--- + .../hadoop/fs/azurebfs/AbfsConfiguration.java | 69 +++++-- + .../fs/azurebfs/AzureBlobFileSystem.java | 3 +- + .../fs/azurebfs/AzureBlobFileSystemStore.java | 2 +- + .../azurebfs/constants/ConfigurationKeys.java | 5 +- + .../fs/azurebfs/services/AbfsClient.java | 8 +- + .../services/FixedSASTokenProvider.java | 65 +++++++ + .../hadoop-azure/src/site/markdown/abfs.md | 149 +++++++++++--- + .../azurebfs/AbstractAbfsIntegrationTest.java | 23 ++- + .../ITestAzureBlobFileSystemChooseSAS.java | 182 ++++++++++++++++++ + .../MockDelegationSASTokenProvider.java | 2 +- + .../extensions/MockSASTokenProvider.java | 16 +- + .../azurebfs/utils/AccountSASGenerator.java | 103 ++++++++++ + .../fs/azurebfs/utils/SASGenerator.java | 34 +++- + .../azurebfs/utils/ServiceSASGenerator.java | 15 +- + 14 files changed, 607 insertions(+), 69 deletions(-) + create mode 100644 hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -index 50cc57447f92..7e38da987ea4 100644 +index 50cc57447f..7e38da987e 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -@@ -59,6 +59,7 @@ +@@ -59,6 +59,7 @@ import org.apache.hadoop.fs.azurebfs.oauth2.UserPasswordTokenProvider; import org.apache.hadoop.fs.azurebfs.security.AbfsDelegationTokenManager; import org.apache.hadoop.fs.azurebfs.services.AuthType; import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy; @@ -18,7 +37,7 @@ index 50cc57447f92..7e38da987ea4 100644 import org.apache.hadoop.fs.azurebfs.services.KeyProvider; import org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider; import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat; -@@ -876,33 +877,63 @@ public AccessTokenProvider getTokenProvider() throws TokenAccessProviderExceptio +@@ -876,33 +877,63 @@ public class AbfsConfiguration{ } } @@ -102,10 +121,10 @@ index 50cc57447f92..7e38da987ea4 100644 } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -index 750306c4a983..955efb7b700a 100644 +index 750306c4a9..955efb7b70 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -@@ -1166,10 +1166,9 @@ public void access(final Path path, final FsAction mode) throws IOException { +@@ -1166,10 +1166,9 @@ public class AzureBlobFileSystem extends FileSystem /** * Incrementing exists() calls from superclass for statistic collection. @@ -118,10 +137,10 @@ index 750306c4a983..955efb7b700a 100644 @Override public boolean exists(Path f) throws IOException { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -index d86a3d968461..db50161885b6 100644 +index d86a3d9684..db50161885 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -@@ -1559,7 +1559,7 @@ private void initializeClient(URI uri, String fileSystemName, +@@ -1559,7 +1559,7 @@ public class AzureBlobFileSystemStore implements Closeable, ListingSupport { creds = new SharedKeyCredentials(accountName.substring(0, dotIndex), abfsConfiguration.getStorageAccountKey()); } else if (authType == AuthType.SAS) { @@ -131,10 +150,10 @@ index d86a3d968461..db50161885b6 100644 } else { LOG.trace("Fetching token provider"); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -index 12beb5a9bbab..73ddfc303d2c 100644 +index 12beb5a9bb..73ddfc303d 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -@@ -227,7 +227,10 @@ public static String accountProperty(String property, String account) { +@@ -227,7 +227,10 @@ public final class ConfigurationKeys { public static final String FS_AZURE_ENABLE_DELEGATION_TOKEN = "fs.azure.enable.delegation.token"; public static final String FS_AZURE_DELEGATION_TOKEN_PROVIDER_TYPE = "fs.azure.delegation.token.provider.type"; @@ -147,10 +166,10 @@ index 12beb5a9bbab..73ddfc303d2c 100644 /** For performance, AbfsInputStream/AbfsOutputStream re-use SAS tokens until the expiry is within this number of seconds. **/ diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -index 69ef0d01c782..982013fef3ea 100644 +index 69ef0d01c7..982013fef3 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -@@ -620,6 +620,7 @@ public AbfsRestOperation flush(final String path, final long position, +@@ -620,6 +620,7 @@ public class AbfsClient implements Closeable { abfsUriQueryBuilder.addQuery(QUERY_PARAM_POSITION, Long.toString(position)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_RETAIN_UNCOMMITTED_DATA, String.valueOf(retainUncommittedData)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_CLOSE, String.valueOf(isClose)); @@ -158,7 +177,7 @@ index 69ef0d01c782..982013fef3ea 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -701,6 +702,7 @@ public AbfsRestOperation read(final String path, final long position, final byte +@@ -701,6 +702,7 @@ public class AbfsClient implements Closeable { requestHeaders.add(new AbfsHttpHeader(IF_MATCH, eTag)); final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); @@ -166,7 +185,7 @@ index 69ef0d01c782..982013fef3ea 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.READ_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -995,12 +997,12 @@ private String appendSASTokenToQuery(String path, +@@ -995,12 +997,12 @@ public class AbfsClient implements Closeable { sasToken = cachedSasToken; LOG.trace("Using cached SAS token."); } @@ -184,7 +203,7 @@ index 69ef0d01c782..982013fef3ea 100644 } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java new file mode 100644 -index 000000000000..1a2614dcc1d2 +index 0000000000..1a2614dcc1 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java @@ -0,0 +1,65 @@ @@ -254,7 +273,7 @@ index 000000000000..1a2614dcc1d2 + } +} diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md -index dfb7f3f42a5c..9ea8903583e3 100644 +index dfb7f3f42a..9ea8903583 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md @@ -12,7 +12,7 @@ @@ -440,10 +459,10 @@ index dfb7f3f42a5c..9ea8903583e3 100644 ## Technical notes diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -index fd2f2690daea..3fe1c641cb96 100644 +index fd2f2690da..3fe1c641cb 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -@@ -265,12 +265,29 @@ public void loadConfiguredFileSystem() throws Exception { +@@ -265,12 +265,29 @@ public abstract class AbstractAbfsIntegrationTest extends useConfiguredFileSystem = true; } @@ -478,7 +497,7 @@ index fd2f2690daea..3fe1c641cb96 100644 } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java new file mode 100644 -index 000000000000..d8db901151fe +index 0000000000..d8db901151 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java @@ -0,0 +1,182 @@ @@ -665,10 +684,10 @@ index 000000000000..d8db901151fe + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -index cf7d51da4c44..d1e5dd4519d0 100644 +index cf7d51da4c..d1e5dd4519 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -@@ -40,7 +40,7 @@ +@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.azurebfs.utils.SASGenerator; import org.apache.hadoop.security.AccessControlException; /** @@ -678,10 +697,10 @@ index cf7d51da4c44..d1e5dd4519d0 100644 public class MockDelegationSASTokenProvider implements SASTokenProvider { diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -index 50ac20970f45..3fda128a9c01 100644 +index 50ac20970f..3fda128a9c 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -@@ -20,7 +20,11 @@ +@@ -20,7 +20,11 @@ package org.apache.hadoop.fs.azurebfs.extensions; import java.io.IOException; @@ -693,7 +712,7 @@ index 50ac20970f45..3fda128a9c01 100644 import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; -@@ -28,17 +32,25 @@ +@@ -28,17 +32,25 @@ import org.apache.hadoop.fs.azurebfs.utils.Base64; import org.apache.hadoop.fs.azurebfs.utils.ServiceSASGenerator; /** @@ -723,7 +742,7 @@ index 50ac20970f45..3fda128a9c01 100644 @Override diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java new file mode 100644 -index 000000000000..2af741b7a4c1 +index 0000000000..2af741b7a4 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java @@ -0,0 +1,103 @@ @@ -831,10 +850,10 @@ index 000000000000..2af741b7a4c1 + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -index 2e9289d8d44c..a80ddac5ed36 100644 +index 2e9289d8d4..a80ddac5ed 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -@@ -29,6 +29,10 @@ +@@ -29,6 +29,10 @@ import javax.crypto.spec.SecretKeySpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -845,7 +864,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 /** * Test SAS generator. */ -@@ -54,10 +58,8 @@ public String toString() { +@@ -54,10 +58,8 @@ public abstract class SASGenerator { protected static final Logger LOG = LoggerFactory.getLogger(SASGenerator.class); public static final Duration FIVE_MINUTES = Duration.ofMinutes(5); public static final Duration ONE_DAY = Duration.ofDays(1); @@ -858,7 +877,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 private Mac hmacSha256; private byte[] key; -@@ -68,7 +70,7 @@ private SASGenerator() { +@@ -68,7 +70,7 @@ public abstract class SASGenerator { /** * Called by subclasses to initialize the cryptographic SHA-256 HMAC provider. @@ -867,7 +886,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 */ protected SASGenerator(byte[] key) { this.key = key; -@@ -85,6 +87,26 @@ private void initializeMac() { +@@ -85,6 +87,26 @@ public abstract class SASGenerator { } } @@ -894,7 +913,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 protected String computeHmac256(final String stringToSign) { byte[] utf8Bytes; try { -@@ -98,4 +120,4 @@ protected String computeHmac256(final String stringToSign) { +@@ -98,4 +120,4 @@ public abstract class SASGenerator { } return Base64.encode(hmac); } @@ -902,10 +921,10 @@ index 2e9289d8d44c..a80ddac5ed36 100644 \ No newline at end of file +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -index 24a1cea255b4..0ae5239e8f2a 100644 +index 24a1cea255..0ae5239e8f 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -@@ -20,23 +20,26 @@ +@@ -20,23 +20,26 @@ package org.apache.hadoop.fs.azurebfs.utils; import java.time.Instant; @@ -936,7 +955,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 String sp = "rcwdl"; String sv = AuthenticationVersion.Feb20.toString(); String sr = "c"; -@@ -66,7 +69,7 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -66,7 +69,7 @@ public class ServiceSASGenerator extends SASGenerator { sb.append("\n"); sb.append(se); sb.append("\n"); @@ -945,7 +964,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 sb.append("/blob/"); sb.append(accountName); sb.append("/"); -@@ -93,4 +96,4 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -93,4 +96,4 @@ public class ServiceSASGenerator extends SASGenerator { LOG.debug("Service SAS stringToSign: " + stringToSign.replace("\n", ".")); return computeHmac256(stringToSign); } diff --git a/hadoop/stackable/patches/3.3.4/patchable.toml b/hadoop/stackable/patches/3.3.4/patchable.toml new file mode 100644 index 000000000..b35894cbc --- /dev/null +++ b/hadoop/stackable/patches/3.3.4/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hadoop.git" +base = "a585a73c3e02ac62350c136643a5e7f6095a3dbb" diff --git a/hadoop/stackable/patches/3.3.6/001-YARN-11527-3.3.6.patch b/hadoop/stackable/patches/3.3.6/0001-YARN-11527-Update-node.js.patch similarity index 63% rename from hadoop/stackable/patches/3.3.6/001-YARN-11527-3.3.6.patch rename to hadoop/stackable/patches/3.3.6/0001-YARN-11527-Update-node.js.patch index a823b7cd0..6b511dcfe 100644 --- a/hadoop/stackable/patches/3.3.6/001-YARN-11527-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.6/0001-YARN-11527-Update-node.js.patch @@ -1,5 +1,14 @@ +From bd2fa3a3a5ef57c5f6ca4f0e5535a1cd875e50d1 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Thu, 21 Dec 2023 13:51:13 +0100 +Subject: YARN-11527: Update node.js + +--- + hadoop-project/pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index f1ac43ed5b38..73d0c7580338 100644 +index f1ac43ed5b..9b01858e0e 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -213,7 +213,7 @@ diff --git a/hadoop/stackable/patches/3.4.0/002-datanode-registration-override-3.4.0.patch b/hadoop/stackable/patches/3.3.6/0002-Allow-overriding-datanode-registration-addresses.patch similarity index 92% rename from hadoop/stackable/patches/3.4.0/002-datanode-registration-override-3.4.0.patch rename to hadoop/stackable/patches/3.3.6/0002-Allow-overriding-datanode-registration-addresses.patch index 9e08b9227..56def7424 100644 --- a/hadoop/stackable/patches/3.4.0/002-datanode-registration-override-3.4.0.patch +++ b/hadoop/stackable/patches/3.3.6/0002-Allow-overriding-datanode-registration-addresses.patch @@ -1,5 +1,17 @@ +From 77312867feaf8931ce8650208ebcbdea5fcfdb0e Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Natalie=20Klestrup=20R=C3=B6ijezon?= +Date: Thu, 11 Jan 2024 14:01:02 +0100 +Subject: Allow overriding datanode registration addresses + +--- + .../org/apache/hadoop/hdfs/DFSConfigKeys.java | 9 +++ + .../blockmanagement/DatanodeManager.java | 43 +++++++----- + .../hadoop/hdfs/server/datanode/DNConf.java | 70 +++++++++++++++++++ + .../hadoop/hdfs/server/datanode/DataNode.java | 35 ++++++++-- + 4 files changed, 135 insertions(+), 22 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -index e3f4bfcde84..3d65bcad229 100755 +index e3f4bfcde8..3d65bcad22 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -147,6 +147,13 @@ public class DFSConfigKeys extends CommonConfigurationKeys { @@ -26,7 +38,7 @@ index e3f4bfcde84..3d65bcad229 100755 public static final boolean DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT = true; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java -index 07381fc696f..8aeb92cff11 100644 +index 07381fc696..8aeb92cff1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java @@ -180,6 +180,8 @@ public class DatanodeManager { @@ -50,7 +62,7 @@ index 07381fc696f..8aeb92cff11 100644 this.checkIpHostnameInRegistration = conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_KEY, DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT); -@@ -1138,27 +1145,29 @@ void startAdminOperationIfNecessary(DatanodeDescriptor nodeReg) { +@@ -1138,27 +1145,29 @@ public class DatanodeManager { */ public void registerDatanode(DatanodeRegistration nodeReg) throws DisallowedDatanodeException, UnresolvedTopologyException { @@ -98,7 +110,7 @@ index 07381fc696f..8aeb92cff11 100644 // it will be disallowed from registering. if (!hostConfigManager.isIncluded(nodeReg)) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java -index 9b5343321d3..790d508e5ea 100644 +index 9b5343321d..790d508e5e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java @@ -100,6 +100,11 @@ public class DNConf { @@ -113,7 +125,7 @@ index 9b5343321d3..790d508e5ea 100644 final boolean overwriteDownstreamDerivedQOP; private final boolean pmemCacheRecoveryEnabled; -@@ -188,6 +193,11 @@ public DNConf(final Configurable dn) { +@@ -188,6 +193,11 @@ public class DNConf { connectToDnViaHostname = getConf().getBoolean( DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); @@ -125,7 +137,7 @@ index 9b5343321d3..790d508e5ea 100644 this.blockReportInterval = getConf().getLong( DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT); -@@ -362,6 +372,66 @@ public boolean getConnectToDnViaHostname() { +@@ -362,6 +372,66 @@ public class DNConf { return connectToDnViaHostname; } @@ -193,10 +205,10 @@ index 9b5343321d3..790d508e5ea 100644 * Returns socket timeout * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -index 96c4ad9ae28..fdb8e631dc8 100644 +index 96c4ad9ae2..66b75cff3f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -@@ -117,6 +117,7 @@ +@@ -117,6 +117,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -204,7 +216,7 @@ index 96c4ad9ae28..fdb8e631dc8 100644 import java.util.Map.Entry; import java.util.Set; import java.util.UUID; -@@ -1876,11 +1877,35 @@ DatanodeRegistration createBPRegistration(NamespaceInfo nsInfo) { +@@ -1876,11 +1877,35 @@ public class DataNode extends ReconfigurableBase NodeType.DATA_NODE); } diff --git a/hadoop/stackable/patches/3.3.4/003-HADOOP-18055-3.3.4.patch b/hadoop/stackable/patches/3.3.6/0003-HADOOP-18055-Add-async-profiler.patch similarity index 94% rename from hadoop/stackable/patches/3.3.4/003-HADOOP-18055-3.3.4.patch rename to hadoop/stackable/patches/3.3.6/0003-HADOOP-18055-Add-async-profiler.patch index 127675e6d..9516f5c81 100644 --- a/hadoop/stackable/patches/3.3.4/003-HADOOP-18055-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.6/0003-HADOOP-18055-Add-async-profiler.patch @@ -1,8 +1,33 @@ +From 773ce66db817720e4f44ff307195014240b141a7 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: HADOOP-18055: Add async-profiler + +--- + .../org/apache/hadoop/http/HttpServer2.java | 21 + + .../hadoop/http/ProfileOutputServlet.java | 87 ++++ + .../apache/hadoop/http/ProfileServlet.java | 394 ++++++++++++++++++ + .../hadoop/http/ProfilerDisabledServlet.java | 44 ++ + .../org/apache/hadoop/util/ProcessUtils.java | 74 ++++ + .../src/main/resources/core-default.xml | 2 +- + .../src/site/markdown/AsyncProfilerServlet.md | 145 +++++++ + .../http/TestDisabledProfileServlet.java | 95 +++++ + .../hadoop-kms/src/site/markdown/index.md.vm | 5 +- + .../src/site/markdown/ServerSetup.md.vm | 5 +- + hadoop-project/src/site/site.xml | 1 + + 11 files changed, 868 insertions(+), 5 deletions(-) + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java + create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java + create mode 100644 hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md + create mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index 76e77560a58bf..fb090fe4385a1 100644 +index 8dadbe390a..1f66a7e809 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -27,6 +27,7 @@ +@@ -27,6 +27,7 @@ import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; @@ -10,7 +35,7 @@ index 76e77560a58bf..fb090fe4385a1 100644 import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; -@@ -771,6 +772,26 @@ private void initializeWebServer(String name, String hostName, +@@ -744,6 +745,26 @@ public final class HttpServer2 implements FilterContainer { addDefaultServlets(); addPrometheusServlet(conf); @@ -39,7 +64,7 @@ index 76e77560a58bf..fb090fe4385a1 100644 private void addPrometheusServlet(Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java new file mode 100644 -index 0000000000000..1ecc21f3753ce +index 0000000000..1ecc21f375 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileOutputServlet.java @@ -0,0 +1,87 @@ @@ -132,7 +157,7 @@ index 0000000000000..1ecc21f3753ce +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java new file mode 100644 -index 0000000000000..fc0ec7736ed8e +index 0000000000..fc0ec7736e --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java @@ -0,0 +1,394 @@ @@ -532,7 +557,7 @@ index 0000000000000..fc0ec7736ed8e +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java new file mode 100644 -index 0000000000000..459485ffa5b56 +index 0000000000..459485ffa5 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java @@ -0,0 +1,44 @@ @@ -582,7 +607,7 @@ index 0000000000000..459485ffa5b56 +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java new file mode 100644 -index 0000000000000..cf653b9c912c4 +index 0000000000..cf653b9c91 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProcessUtils.java @@ -0,0 +1,74 @@ @@ -661,10 +686,10 @@ index 0000000000000..cf653b9c912c4 + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -index 27c86bbc9ac8b..dc2a6ffd8375a 100644 +index b1a25ce1f0..8068bae969 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml -@@ -78,7 +78,7 @@ +@@ -69,7 +69,7 @@ false Indicates if administrator ACLs are required to access @@ -675,7 +700,7 @@ index 27c86bbc9ac8b..dc2a6ffd8375a 100644 diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md new file mode 100644 -index 0000000000000..4b93cc219a5ee +index 0000000000..4b93cc219a --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/site/markdown/AsyncProfilerServlet.md @@ -0,0 +1,145 @@ @@ -826,7 +851,7 @@ index 0000000000000..4b93cc219a5ee + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java new file mode 100644 -index 0000000000000..ce068bb6f1cf6 +index 0000000000..ce068bb6f1 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java @@ -0,0 +1,95 @@ @@ -926,7 +951,7 @@ index 0000000000000..ce068bb6f1cf6 + +} diff --git a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm -index 6ea21d5cf407d..09375d5aab528 100644 +index 6ea21d5cf4..09375d5aab 100644 --- a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm +++ b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm @@ -1208,9 +1208,10 @@ Name | Description @@ -951,7 +976,7 @@ index 6ea21d5cf407d..09375d5aab528 100644 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm -index 2d0a5b8cd2e7d..e97de0275ca22 100644 +index 2d0a5b8cd2..e97de0275c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm @@ -162,9 +162,10 @@ Name | Description @@ -976,10 +1001,10 @@ index 2d0a5b8cd2e7d..e97de0275ca22 100644 diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml -index a150385048f2f..6c0233877b053 100644 +index b53cbd2a05..0793e97771 100644 --- a/hadoop-project/src/site/site.xml +++ b/hadoop-project/src/site/site.xml -@@ -69,6 +69,7 @@ +@@ -74,6 +74,7 @@ diff --git a/hadoop/stackable/patches/3.3.4/004-HADOOP-18077-3.3.4.patch b/hadoop/stackable/patches/3.3.6/0004-Backport-HADOOP-18077.patch similarity index 79% rename from hadoop/stackable/patches/3.3.4/004-HADOOP-18077-3.3.4.patch rename to hadoop/stackable/patches/3.3.6/0004-Backport-HADOOP-18077.patch index 37d684adc..f94d482cd 100644 --- a/hadoop/stackable/patches/3.3.4/004-HADOOP-18077-3.3.4.patch +++ b/hadoop/stackable/patches/3.3.6/0004-Backport-HADOOP-18077.patch @@ -1,8 +1,18 @@ +From 14ea43b731dcbc77f3bedc26529198787b399dea Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: Backport HADOOP-18077 + +--- + .../src/main/java/org/apache/hadoop/http/HttpServer2.java | 6 ++++-- + .../org/apache/hadoop/http/ProfilerDisabledServlet.java | 8 +++++++- + 2 files changed, 11 insertions(+), 3 deletions(-) + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -index fb090fe4385a1..49807ac4b4597 100644 +index 1f66a7e809..96794086cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -772,10 +772,11 @@ private void initializeWebServer(String name, String hostName, +@@ -745,10 +745,11 @@ public final class HttpServer2 implements FilterContainer { addDefaultServlets(); addPrometheusServlet(conf); @@ -16,7 +26,7 @@ index fb090fe4385a1..49807ac4b4597 100644 final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { addServlet("prof", "/prof", ProfileServlet.class); -@@ -787,6 +788,7 @@ private void addAsyncProfilerServlet(ContextHandlerCollection contexts) throws I +@@ -760,6 +761,7 @@ public final class HttpServer2 implements FilterContainer { genCtx.addServlet(ProfileOutputServlet.class, "/*"); genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); genCtx.setDisplayName("prof-output-hadoop"); @@ -25,10 +35,10 @@ index fb090fe4385a1..49807ac4b4597 100644 addServlet("prof", "/prof", ProfilerDisabledServlet.class); LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -index 459485ffa5b56..c488b574990cc 100644 +index 459485ffa5..c488b57499 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfilerDisabledServlet.java -@@ -36,9 +36,15 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res +@@ -36,9 +36,15 @@ public class ProfilerDisabledServlet extends HttpServlet { throws IOException { resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); ProfileServlet.setResponseHeader(resp); diff --git a/hadoop/stackable/patches/3.3.6/005-add-perf-event-itimer-3.3.6.patch b/hadoop/stackable/patches/3.3.6/0005-Async-profiler-also-grab-itimer-events.patch similarity index 66% rename from hadoop/stackable/patches/3.3.6/005-add-perf-event-itimer-3.3.6.patch rename to hadoop/stackable/patches/3.3.6/0005-Async-profiler-also-grab-itimer-events.patch index ff22ed151..a4945dd9f 100644 --- a/hadoop/stackable/patches/3.3.6/005-add-perf-event-itimer-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.6/0005-Async-profiler-also-grab-itimer-events.patch @@ -1,8 +1,17 @@ +From 6b4eb2c72a71effdc2112567b750e24d5745c186 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: Async-profiler: also grab itimer events + +--- + .../src/main/java/org/apache/hadoop/http/ProfileServlet.java | 2 ++ + 1 file changed, 2 insertions(+) + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -index fc0ec7736ed8..e324ad6d49fd 100644 +index fc0ec7736e..e324ad6d49 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -@@ -76,6 +76,7 @@ +@@ -76,6 +76,7 @@ import org.apache.hadoop.util.ProcessUtils; * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) * // Perf events: * // cpu diff --git a/hadoop/stackable/patches/3.3.6/006-HDFS-17378-3.3.6.patch b/hadoop/stackable/patches/3.3.6/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch similarity index 78% rename from hadoop/stackable/patches/3.3.6/006-HDFS-17378-3.3.6.patch rename to hadoop/stackable/patches/3.3.6/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch index 5d43cfb93..2e3e33690 100644 --- a/hadoop/stackable/patches/3.3.6/006-HDFS-17378-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.6/0006-HDFS-17378-Fix-missing-operationType-for-some-operat.patch @@ -1,8 +1,18 @@ +From 36ed6731ce3afa4ccacb40c1c82dfc81e0e80483 Mon Sep 17 00:00:00 2001 +From: Sebastian Bernauer +Date: Thu, 15 Feb 2024 15:33:43 +0100 +Subject: HDFS-17378: Fix missing operationType for some operations in + authorizer + +--- + .../hdfs/server/namenode/FSNamesystem.java | 41 +++++++++++-------- + 1 file changed, 24 insertions(+), 17 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -index 9855b434e9c4..b3781ee1dd26 100644 +index 9855b434e9..b3781ee1dd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -@@ -2530,15 +2530,16 @@ void unsetStoragePolicy(String src) throws IOException { +@@ -2530,15 +2530,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, * @throws IOException */ BlockStoragePolicy getStoragePolicy(String src) throws IOException { @@ -21,7 +31,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 } } -@@ -2558,15 +2559,16 @@ BlockStoragePolicy[] getStoragePolicies() throws IOException { +@@ -2558,15 +2559,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } long getPreferredBlockSize(String src) throws IOException { @@ -40,7 +50,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 } } -@@ -2619,7 +2621,6 @@ HdfsFileStatus startFile(String src, PermissionStatus permissions, +@@ -2619,7 +2621,6 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean createParent, short replication, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -48,7 +58,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 HdfsFileStatus status; try { status = startFileInt(src, permissions, holder, clientMachine, flag, -@@ -2639,6 +2640,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2639,6 +2640,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -56,7 +66,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 if (NameNode.stateChangeLog.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("DIR* NameSystem.startFile: src=").append(src) -@@ -2676,7 +2678,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2676,7 +2678,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, checkOperation(OperationCategory.WRITE); final FSPermissionChecker pc = getPermissionChecker(); @@ -65,7 +75,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2740,7 +2742,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2740,7 +2742,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, dir.writeUnlock(); } } finally { @@ -74,7 +84,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -2769,10 +2771,11 @@ private HdfsFileStatus startFileInt(String src, +@@ -2769,10 +2771,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ boolean recoverLease(String src, String holder, String clientMachine) throws IOException { @@ -87,7 +97,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2793,7 +2796,7 @@ boolean recoverLease(String src, String holder, String clientMachine) +@@ -2793,7 +2796,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, skipSync = true; throw se; } finally { @@ -96,7 +106,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -3010,6 +3013,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3010,6 +3013,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final Set excludes, final int numAdditionalNodes, final String clientName ) throws IOException { @@ -104,7 +114,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 //check if the feature is enabled dtpReplaceDatanodeOnFailure.checkEnabled(); -@@ -3021,7 +3025,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3021,7 +3025,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final BlockType blockType; checkOperation(OperationCategory.WRITE); final FSPermissionChecker pc = getPermissionChecker(); @@ -113,7 +123,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 readLock(); try { // Changing this operation category to WRITE instead of making getAdditionalDatanode as a -@@ -3047,7 +3051,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3047,7 +3051,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, "src=%s, fileId=%d, blk=%s, clientName=%s, clientMachine=%s", src, fileId, blk, clientName, clientMachine)); } finally { @@ -122,7 +132,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 } if (clientnode == null) { -@@ -3069,11 +3073,12 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3069,11 +3073,12 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) throws IOException { @@ -136,7 +146,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3082,7 +3087,7 @@ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) +@@ -3082,7 +3087,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, NameNode.stateChangeLog.debug("BLOCK* NameSystem.abandonBlock: {} is " + "removed from pendingCreates", b); } finally { @@ -145,7 +155,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 } getEditLog().logSync(); } -@@ -3136,10 +3141,11 @@ INodeFile checkLease(INodesInPath iip, String holder, long fileId) +@@ -3136,10 +3141,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean completeFile(final String src, String holder, ExtendedBlock last, long fileId) throws IOException { @@ -158,7 +168,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3147,7 +3153,7 @@ boolean completeFile(final String src, String holder, +@@ -3147,7 +3153,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, success = FSDirWriteFileOp.completeFile(this, pc, src, holder, last, fileId); } finally { @@ -167,7 +177,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 } getEditLog().logSync(); if (success) { -@@ -3572,10 +3578,11 @@ void setQuota(String src, long nsQuota, long ssQuota, StorageType type) +@@ -3572,10 +3578,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void fsync(String src, long fileId, String clientName, long lastBlockLength) throws IOException { @@ -180,7 +190,7 @@ index 9855b434e9c4..b3781ee1dd26 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3589,7 +3596,7 @@ void fsync(String src, long fileId, String clientName, long lastBlockLength) +@@ -3589,7 +3596,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } FSDirWriteFileOp.persistBlocks(dir, src, pendingFile, false); } finally { diff --git a/hadoop/stackable/patches/3.3.6/007-snappy-cves-3.3.6.patch b/hadoop/stackable/patches/3.3.6/0007-Bump-Snappy-version-to-fix-CVEs.patch similarity index 59% rename from hadoop/stackable/patches/3.3.6/007-snappy-cves-3.3.6.patch rename to hadoop/stackable/patches/3.3.6/0007-Bump-Snappy-version-to-fix-CVEs.patch index a070eee76..a6711920a 100644 --- a/hadoop/stackable/patches/3.3.6/007-snappy-cves-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.6/0007-Bump-Snappy-version-to-fix-CVEs.patch @@ -1,5 +1,14 @@ +From 8cd8cdc424ff7cf410fb84941fd6d7777ec91913 Mon Sep 17 00:00:00 2001 +From: Andrew Kenworthy +Date: Thu, 16 May 2024 16:44:14 +0200 +Subject: Bump Snappy version to fix CVEs + +--- + hadoop-project/pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index f1ac43ed5b3..0d1e42acc75 100644 +index 9b01858e0e..da39c1e0ad 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -144,7 +144,7 @@ @@ -9,5 +18,5 @@ index f1ac43ed5b3..0d1e42acc75 100644 - 1.1.8.2 + 1.1.10.4 1.7.1 - + diff --git a/hadoop/stackable/patches/3.3.6/008-cyclonedx-plugin.patch b/hadoop/stackable/patches/3.3.6/0008-Update-CycloneDX-plugin.patch similarity index 82% rename from hadoop/stackable/patches/3.3.6/008-cyclonedx-plugin.patch rename to hadoop/stackable/patches/3.3.6/0008-Update-CycloneDX-plugin.patch index f9bd3d33a..ef27fb2a4 100644 --- a/hadoop/stackable/patches/3.3.6/008-cyclonedx-plugin.patch +++ b/hadoop/stackable/patches/3.3.6/0008-Update-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From bb767718387bcc1c49e5780e5d1a7a79fde99f15 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Update CycloneDX plugin + +--- + pom.xml | 10 ++++++---- + 1 file changed, 6 insertions(+), 4 deletions(-) + diff --git a/pom.xml b/pom.xml -index aaa4203..de00177 100644 +index aaa4203012..de001775ab 100644 --- a/pom.xml +++ b/pom.xml @@ -116,7 +116,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x diff --git a/hadoop/stackable/patches/3.3.6/009-HADOOP-18516-3.3.6.patch b/hadoop/stackable/patches/3.3.6/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch similarity index 92% rename from hadoop/stackable/patches/3.3.6/009-HADOOP-18516-3.3.6.patch rename to hadoop/stackable/patches/3.3.6/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch index 6cbd961cd..41d6c9447 100644 --- a/hadoop/stackable/patches/3.3.6/009-HADOOP-18516-3.3.6.patch +++ b/hadoop/stackable/patches/3.3.6/0009-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch @@ -1,16 +1,35 @@ -commit 88cc213c9610f65e2b43142ae0c5bce20a660cdd -Author: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> -Date: Fri Jun 7 19:03:23 2024 +0530 +From 3864664a22a8c75d79774c77a7c88f5d54085f5d Mon Sep 17 00:00:00 2001 +From: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> +Date: Fri, 7 Jun 2024 19:03:23 +0530 +Subject: HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS + Authentication (#6552) - HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS Authentication (#6552) - - Contributed by Anuj Modi +Contributed by Anuj Modi +--- + .../hadoop/fs/azurebfs/AbfsConfiguration.java | 69 +++++-- + .../fs/azurebfs/AzureBlobFileSystem.java | 3 +- + .../fs/azurebfs/AzureBlobFileSystemStore.java | 2 +- + .../azurebfs/constants/ConfigurationKeys.java | 5 +- + .../fs/azurebfs/services/AbfsClient.java | 9 +- + .../services/FixedSASTokenProvider.java | 65 +++++++ + .../hadoop-azure/src/site/markdown/abfs.md | 149 +++++++++++--- + .../azurebfs/AbstractAbfsIntegrationTest.java | 23 ++- + .../ITestAzureBlobFileSystemChooseSAS.java | 182 ++++++++++++++++++ + .../MockDelegationSASTokenProvider.java | 2 +- + .../extensions/MockSASTokenProvider.java | 16 +- + .../azurebfs/utils/AccountSASGenerator.java | 103 ++++++++++ + .../fs/azurebfs/utils/SASGenerator.java | 34 +++- + .../azurebfs/utils/ServiceSASGenerator.java | 15 +- + 14 files changed, 608 insertions(+), 69 deletions(-) + create mode 100644 hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -index 1bf7c569da13..98534f75e00a 100644 +index 1bf7c569da..98534f75e0 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -@@ -59,6 +59,7 @@ +@@ -59,6 +59,7 @@ import org.apache.hadoop.fs.azurebfs.oauth2.UserPasswordTokenProvider; import org.apache.hadoop.fs.azurebfs.security.AbfsDelegationTokenManager; import org.apache.hadoop.fs.azurebfs.services.AuthType; import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy; @@ -18,7 +37,7 @@ index 1bf7c569da13..98534f75e00a 100644 import org.apache.hadoop.fs.azurebfs.services.KeyProvider; import org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider; import org.apache.hadoop.fs.azurebfs.utils.TracingHeaderFormat; -@@ -927,33 +928,63 @@ public AccessTokenProvider getTokenProvider() throws TokenAccessProviderExceptio +@@ -927,33 +928,63 @@ public class AbfsConfiguration{ } } @@ -102,10 +121,10 @@ index 1bf7c569da13..98534f75e00a 100644 } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -index 5fb2c6e1700a..8bfaf2fa5e2a 100644 +index 5fb2c6e170..8bfaf2fa5e 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -@@ -1273,10 +1273,9 @@ public void access(final Path path, final FsAction mode) throws IOException { +@@ -1273,10 +1273,9 @@ public class AzureBlobFileSystem extends FileSystem /** * Incrementing exists() calls from superclass for statistic collection. @@ -118,10 +137,10 @@ index 5fb2c6e1700a..8bfaf2fa5e2a 100644 @Override public boolean exists(Path f) throws IOException { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -index cd33da401c9d..dc6d100173e8 100644 +index cd33da401c..dc6d100173 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -@@ -1611,7 +1611,7 @@ private void initializeClient(URI uri, String fileSystemName, +@@ -1611,7 +1611,7 @@ public class AzureBlobFileSystemStore implements Closeable, ListingSupport { creds = new SharedKeyCredentials(accountName.substring(0, dotIndex), abfsConfiguration.getStorageAccountKey()); } else if (authType == AuthType.SAS) { @@ -131,10 +150,10 @@ index cd33da401c9d..dc6d100173e8 100644 } else { LOG.trace("Fetching token provider"); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -index 872364a8e616..5458bf4d8bc1 100644 +index 872364a8e6..5458bf4d8b 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -@@ -248,7 +248,10 @@ public static String accountProperty(String property, String account) { +@@ -248,7 +248,10 @@ public final class ConfigurationKeys { public static final String FS_AZURE_ENABLE_DELEGATION_TOKEN = "fs.azure.enable.delegation.token"; public static final String FS_AZURE_DELEGATION_TOKEN_PROVIDER_TYPE = "fs.azure.delegation.token.provider.type"; @@ -147,10 +166,10 @@ index 872364a8e616..5458bf4d8bc1 100644 /** For performance, AbfsInputStream/AbfsOutputStream re-use SAS tokens until the expiry is within this number of seconds. **/ diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -index 1767274f3606..8255bbb76ca5 100644 +index 1767274f36..8255bbb76c 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -@@ -891,6 +891,7 @@ public AbfsRestOperation flush(final String path, final long position, +@@ -891,6 +891,7 @@ public class AbfsClient implements Closeable { abfsUriQueryBuilder.addQuery(QUERY_PARAM_POSITION, Long.toString(position)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_RETAIN_UNCOMMITTED_DATA, String.valueOf(retainUncommittedData)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_CLOSE, String.valueOf(isClose)); @@ -158,7 +177,7 @@ index 1767274f3606..8255bbb76ca5 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -972,6 +973,7 @@ public AbfsRestOperation read(final String path, final long position, final byte +@@ -972,6 +973,7 @@ public class AbfsClient implements Closeable { requestHeaders.add(new AbfsHttpHeader(IF_MATCH, eTag)); final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); @@ -166,7 +185,7 @@ index 1767274f3606..8255bbb76ca5 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.READ_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -1266,16 +1268,17 @@ private String appendSASTokenToQuery(String path, +@@ -1266,16 +1268,17 @@ public class AbfsClient implements Closeable { sasToken = cachedSasToken; LOG.trace("Using cached SAS token."); } @@ -189,7 +208,7 @@ index 1767274f3606..8255bbb76ca5 100644 } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java new file mode 100644 -index 000000000000..1a2614dcc1d2 +index 0000000000..1a2614dcc1 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java @@ -0,0 +1,65 @@ @@ -259,7 +278,7 @@ index 000000000000..1a2614dcc1d2 + } +} diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md -index aff1e32b83f2..a994b3892c4c 100644 +index aff1e32b83..a994b3892c 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md @@ -12,7 +12,7 @@ @@ -445,10 +464,10 @@ index aff1e32b83f2..a994b3892c4c 100644 ## Technical notes diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -index 74655fd57362..be4c4541eb05 100644 +index 74655fd573..be4c4541eb 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -@@ -282,13 +282,30 @@ public void loadConfiguredFileSystem() throws Exception { +@@ -282,13 +282,30 @@ public abstract class AbstractAbfsIntegrationTest extends useConfiguredFileSystem = true; } @@ -484,7 +503,7 @@ index 74655fd57362..be4c4541eb05 100644 } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java new file mode 100644 -index 000000000000..d8db901151fe +index 0000000000..d8db901151 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java @@ -0,0 +1,182 @@ @@ -671,10 +690,10 @@ index 000000000000..d8db901151fe + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -index cf7d51da4c44..d1e5dd4519d0 100644 +index cf7d51da4c..d1e5dd4519 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -@@ -40,7 +40,7 @@ +@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.azurebfs.utils.SASGenerator; import org.apache.hadoop.security.AccessControlException; /** @@ -684,10 +703,10 @@ index cf7d51da4c44..d1e5dd4519d0 100644 public class MockDelegationSASTokenProvider implements SASTokenProvider { diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -index 50ac20970f45..3fda128a9c01 100644 +index 50ac20970f..3fda128a9c 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -@@ -20,7 +20,11 @@ +@@ -20,7 +20,11 @@ package org.apache.hadoop.fs.azurebfs.extensions; import java.io.IOException; @@ -699,7 +718,7 @@ index 50ac20970f45..3fda128a9c01 100644 import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; -@@ -28,17 +32,25 @@ +@@ -28,17 +32,25 @@ import org.apache.hadoop.fs.azurebfs.utils.Base64; import org.apache.hadoop.fs.azurebfs.utils.ServiceSASGenerator; /** @@ -729,7 +748,7 @@ index 50ac20970f45..3fda128a9c01 100644 @Override diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java new file mode 100644 -index 000000000000..2af741b7a4c1 +index 0000000000..2af741b7a4 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java @@ -0,0 +1,103 @@ @@ -837,10 +856,10 @@ index 000000000000..2af741b7a4c1 + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -index 2e9289d8d44c..a80ddac5ed36 100644 +index 2e9289d8d4..a80ddac5ed 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -@@ -29,6 +29,10 @@ +@@ -29,6 +29,10 @@ import javax.crypto.spec.SecretKeySpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -851,7 +870,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 /** * Test SAS generator. */ -@@ -54,10 +58,8 @@ public String toString() { +@@ -54,10 +58,8 @@ public abstract class SASGenerator { protected static final Logger LOG = LoggerFactory.getLogger(SASGenerator.class); public static final Duration FIVE_MINUTES = Duration.ofMinutes(5); public static final Duration ONE_DAY = Duration.ofDays(1); @@ -864,7 +883,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 private Mac hmacSha256; private byte[] key; -@@ -68,7 +70,7 @@ private SASGenerator() { +@@ -68,7 +70,7 @@ public abstract class SASGenerator { /** * Called by subclasses to initialize the cryptographic SHA-256 HMAC provider. @@ -873,7 +892,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 */ protected SASGenerator(byte[] key) { this.key = key; -@@ -85,6 +87,26 @@ private void initializeMac() { +@@ -85,6 +87,26 @@ public abstract class SASGenerator { } } @@ -900,7 +919,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 protected String computeHmac256(final String stringToSign) { byte[] utf8Bytes; try { -@@ -98,4 +120,4 @@ protected String computeHmac256(final String stringToSign) { +@@ -98,4 +120,4 @@ public abstract class SASGenerator { } return Base64.encode(hmac); } @@ -908,10 +927,10 @@ index 2e9289d8d44c..a80ddac5ed36 100644 \ No newline at end of file +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -index 24a1cea255b4..0ae5239e8f2a 100644 +index 24a1cea255..0ae5239e8f 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -@@ -20,23 +20,26 @@ +@@ -20,23 +20,26 @@ package org.apache.hadoop.fs.azurebfs.utils; import java.time.Instant; @@ -942,7 +961,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 String sp = "rcwdl"; String sv = AuthenticationVersion.Feb20.toString(); String sr = "c"; -@@ -66,7 +69,7 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -66,7 +69,7 @@ public class ServiceSASGenerator extends SASGenerator { sb.append("\n"); sb.append(se); sb.append("\n"); @@ -951,7 +970,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 sb.append("/blob/"); sb.append(accountName); sb.append("/"); -@@ -93,4 +96,4 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -93,4 +96,4 @@ public class ServiceSASGenerator extends SASGenerator { LOG.debug("Service SAS stringToSign: " + stringToSign.replace("\n", ".")); return computeHmac256(stringToSign); } diff --git a/hadoop/stackable/patches/3.3.6/patchable.toml b/hadoop/stackable/patches/3.3.6/patchable.toml new file mode 100644 index 000000000..26e9adf44 --- /dev/null +++ b/hadoop/stackable/patches/3.3.6/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hadoop.git" +base = "1be78238728da9266a4f88195058f08fd012bf9c" diff --git a/hadoop/stackable/patches/3.4.0/001-YARN-11527-3.4.0.patch b/hadoop/stackable/patches/3.4.0/0001-YARN-11527-Update-node.js.patch similarity index 63% rename from hadoop/stackable/patches/3.4.0/001-YARN-11527-3.4.0.patch rename to hadoop/stackable/patches/3.4.0/0001-YARN-11527-Update-node.js.patch index b050e80dd..4d98247c7 100644 --- a/hadoop/stackable/patches/3.4.0/001-YARN-11527-3.4.0.patch +++ b/hadoop/stackable/patches/3.4.0/0001-YARN-11527-Update-node.js.patch @@ -1,5 +1,14 @@ +From a3096eeaece059cebe553d188f81f6864a056bdc Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Thu, 21 Dec 2023 13:51:13 +0100 +Subject: YARN-11527: Update node.js + +--- + hadoop-project/pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml -index 0ed96d087bc..9ebb6af4567 100644 +index 0ed96d087b..6f3b9371cd 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -217,7 +217,7 @@ diff --git a/hadoop/stackable/patches/3.3.6/002-datanode-registration-override-3.3.6.patch b/hadoop/stackable/patches/3.4.0/0002-Allow-overriding-datanode-registration-addresses.patch similarity index 90% rename from hadoop/stackable/patches/3.3.6/002-datanode-registration-override-3.3.6.patch rename to hadoop/stackable/patches/3.4.0/0002-Allow-overriding-datanode-registration-addresses.patch index 9e08b9227..55eaf6723 100644 --- a/hadoop/stackable/patches/3.3.6/002-datanode-registration-override-3.3.6.patch +++ b/hadoop/stackable/patches/3.4.0/0002-Allow-overriding-datanode-registration-addresses.patch @@ -1,8 +1,20 @@ +From 448b27ab25e4bf7f5aff97c256e9ebbe2d1ad181 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Natalie=20Klestrup=20R=C3=B6ijezon?= +Date: Thu, 11 Jan 2024 14:01:02 +0100 +Subject: Allow overriding datanode registration addresses + +--- + .../org/apache/hadoop/hdfs/DFSConfigKeys.java | 9 +++ + .../blockmanagement/DatanodeManager.java | 43 +++++++----- + .../hadoop/hdfs/server/datanode/DNConf.java | 70 +++++++++++++++++++ + .../hadoop/hdfs/server/datanode/DataNode.java | 35 ++++++++-- + 4 files changed, 135 insertions(+), 22 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -index e3f4bfcde84..3d65bcad229 100755 +index f92a2ad565..25bcd438c7 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -@@ -147,6 +147,13 @@ public class DFSConfigKeys extends CommonConfigurationKeys { +@@ -152,6 +152,13 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final boolean DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT = false; public static final String DFS_DATANODE_USE_DN_HOSTNAME = "dfs.datanode.use.datanode.hostname"; public static final boolean DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT = false; @@ -16,7 +28,7 @@ index e3f4bfcde84..3d65bcad229 100755 public static final String DFS_DATANODE_MAX_LOCKED_MEMORY_KEY = "dfs.datanode.max.locked.memory"; public static final long DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT = 0; public static final String DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_KEY = "dfs.datanode.fsdatasetcache.max.threads.per.volume"; -@@ -454,6 +461,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys { +@@ -491,6 +498,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final long DFS_DATANODE_PROCESS_COMMANDS_THRESHOLD_DEFAULT = TimeUnit.SECONDS.toMillis(2); @@ -26,10 +38,10 @@ index e3f4bfcde84..3d65bcad229 100755 public static final boolean DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT = true; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java -index 07381fc696f..8aeb92cff11 100644 +index ebd2fa992e..c56f254478 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java -@@ -180,6 +180,8 @@ public class DatanodeManager { +@@ -181,6 +181,8 @@ public class DatanodeManager { private boolean hasClusterEverBeenMultiRack = false; private final boolean checkIpHostnameInRegistration; @@ -38,7 +50,7 @@ index 07381fc696f..8aeb92cff11 100644 /** * Whether we should tell datanodes what to cache in replies to * heartbeat messages. -@@ -316,6 +318,11 @@ public class DatanodeManager { +@@ -314,6 +316,11 @@ public class DatanodeManager { // Block invalidate limit also has some dependency on heartbeat interval. // Check setBlockInvalidateLimit(). setBlockInvalidateLimit(configuredBlockInvalidateLimit); @@ -50,7 +62,7 @@ index 07381fc696f..8aeb92cff11 100644 this.checkIpHostnameInRegistration = conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_KEY, DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_DEFAULT); -@@ -1138,27 +1145,29 @@ void startAdminOperationIfNecessary(DatanodeDescriptor nodeReg) { +@@ -1158,27 +1165,29 @@ public class DatanodeManager { */ public void registerDatanode(DatanodeRegistration nodeReg) throws DisallowedDatanodeException, UnresolvedTopologyException { @@ -98,10 +110,10 @@ index 07381fc696f..8aeb92cff11 100644 // it will be disallowed from registering. if (!hostConfigManager.isIncluded(nodeReg)) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java -index 9b5343321d3..790d508e5ea 100644 +index 21b92db307..5d3437239c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java -@@ -100,6 +100,11 @@ public class DNConf { +@@ -101,6 +101,11 @@ public class DNConf { final boolean syncOnClose; final boolean encryptDataTransfer; final boolean connectToDnViaHostname; @@ -113,7 +125,7 @@ index 9b5343321d3..790d508e5ea 100644 final boolean overwriteDownstreamDerivedQOP; private final boolean pmemCacheRecoveryEnabled; -@@ -188,6 +193,11 @@ public DNConf(final Configurable dn) { +@@ -189,6 +194,11 @@ public class DNConf { connectToDnViaHostname = getConf().getBoolean( DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); @@ -125,7 +137,7 @@ index 9b5343321d3..790d508e5ea 100644 this.blockReportInterval = getConf().getLong( DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT); -@@ -362,6 +372,66 @@ public boolean getConnectToDnViaHostname() { +@@ -363,6 +373,66 @@ public class DNConf { return connectToDnViaHostname; } @@ -193,10 +205,10 @@ index 9b5343321d3..790d508e5ea 100644 * Returns socket timeout * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -index 96c4ad9ae28..fdb8e631dc8 100644 +index 956f5bbe51..22ae127d98 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -@@ -117,6 +117,7 @@ +@@ -135,6 +135,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -204,7 +216,7 @@ index 96c4ad9ae28..fdb8e631dc8 100644 import java.util.Map.Entry; import java.util.Set; import java.util.UUID; -@@ -1876,11 +1877,35 @@ DatanodeRegistration createBPRegistration(NamespaceInfo nsInfo) { +@@ -2076,11 +2077,35 @@ public class DataNode extends ReconfigurableBase NodeType.DATA_NODE); } diff --git a/hadoop/stackable/patches/3.4.0/003-add-perf-event-itimer-3.4.0.patch b/hadoop/stackable/patches/3.4.0/0003-Async-profiler-also-grab-itimer-events.patch similarity index 61% rename from hadoop/stackable/patches/3.4.0/003-add-perf-event-itimer-3.4.0.patch rename to hadoop/stackable/patches/3.4.0/0003-Async-profiler-also-grab-itimer-events.patch index ff22ed151..2bd82be74 100644 --- a/hadoop/stackable/patches/3.4.0/003-add-perf-event-itimer-3.4.0.patch +++ b/hadoop/stackable/patches/3.4.0/0003-Async-profiler-also-grab-itimer-events.patch @@ -1,8 +1,17 @@ +From e0e0e15d1cb8e686c72bbc6699e0b4789f6e334d Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: Async-profiler: also grab itimer events + +--- + .../src/main/java/org/apache/hadoop/http/ProfileServlet.java | 2 ++ + 1 file changed, 2 insertions(+) + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -index fc0ec7736ed8..e324ad6d49fd 100644 +index ce53274151..909892ff90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java -@@ -76,6 +76,7 @@ +@@ -76,6 +76,7 @@ import org.apache.hadoop.util.ProcessUtils; * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) * // Perf events: * // cpu @@ -10,7 +19,7 @@ index fc0ec7736ed8..e324ad6d49fd 100644 * // page-faults * // context-switches * // cycles -@@ -115,6 +116,7 @@ public class ProfileServlet extends HttpServlet { +@@ -118,6 +119,7 @@ public class ProfileServlet extends HttpServlet { private enum Event { CPU("cpu"), diff --git a/hadoop/stackable/patches/3.4.0/004-HDFS-17378-3.4.0.patch b/hadoop/stackable/patches/3.4.0/0004-HDFS-17378-Fix-missing-operationType-for-some-operat.patch similarity index 78% rename from hadoop/stackable/patches/3.4.0/004-HDFS-17378-3.4.0.patch rename to hadoop/stackable/patches/3.4.0/0004-HDFS-17378-Fix-missing-operationType-for-some-operat.patch index 8f32595c8..d8382bc21 100644 --- a/hadoop/stackable/patches/3.4.0/004-HDFS-17378-3.4.0.patch +++ b/hadoop/stackable/patches/3.4.0/0004-HDFS-17378-Fix-missing-operationType-for-some-operat.patch @@ -1,8 +1,18 @@ +From f6f99436ff36b8b56f3af105501a4b15ee4e8d44 Mon Sep 17 00:00:00 2001 +From: Sebastian Bernauer +Date: Thu, 15 Feb 2024 15:33:43 +0100 +Subject: HDFS-17378: Fix missing operationType for some operations in + authorizer + +--- + .../hdfs/server/namenode/FSNamesystem.java | 41 +++++++++++-------- + 1 file changed, 24 insertions(+), 17 deletions(-) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -index 277cc42222f..75766bcd696 100644 +index 277cc42222..75766bcd69 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -@@ -2618,15 +2618,16 @@ void unsetStoragePolicy(String src) throws IOException { +@@ -2618,15 +2618,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, * @throws IOException */ BlockStoragePolicy getStoragePolicy(String src) throws IOException { @@ -21,7 +31,7 @@ index 277cc42222f..75766bcd696 100644 } } -@@ -2646,15 +2647,16 @@ BlockStoragePolicy[] getStoragePolicies() throws IOException { +@@ -2646,15 +2647,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } long getPreferredBlockSize(String src) throws IOException { @@ -40,7 +50,7 @@ index 277cc42222f..75766bcd696 100644 } } -@@ -2707,7 +2709,6 @@ HdfsFileStatus startFile(String src, PermissionStatus permissions, +@@ -2707,7 +2709,6 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean createParent, short replication, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -48,7 +58,7 @@ index 277cc42222f..75766bcd696 100644 HdfsFileStatus status; try { status = startFileInt(src, permissions, holder, clientMachine, flag, -@@ -2727,6 +2728,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2727,6 +2728,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, long blockSize, CryptoProtocolVersion[] supportedVersions, String ecPolicyName, String storagePolicy, boolean logRetryCache) throws IOException { @@ -56,7 +66,7 @@ index 277cc42222f..75766bcd696 100644 if (NameNode.stateChangeLog.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("DIR* NameSystem.startFile: src=").append(src) -@@ -2764,7 +2766,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2764,7 +2766,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, checkOperation(OperationCategory.WRITE); final FSPermissionChecker pc = getPermissionChecker(); @@ -65,7 +75,7 @@ index 277cc42222f..75766bcd696 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2827,7 +2829,7 @@ private HdfsFileStatus startFileInt(String src, +@@ -2827,7 +2829,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, dir.writeUnlock(); } } finally { @@ -74,7 +84,7 @@ index 277cc42222f..75766bcd696 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -2856,10 +2858,11 @@ private HdfsFileStatus startFileInt(String src, +@@ -2856,10 +2858,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ boolean recoverLease(String src, String holder, String clientMachine) throws IOException { @@ -87,7 +97,7 @@ index 277cc42222f..75766bcd696 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -2880,7 +2883,7 @@ boolean recoverLease(String src, String holder, String clientMachine) +@@ -2880,7 +2883,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, skipSync = true; throw se; } finally { @@ -96,7 +106,7 @@ index 277cc42222f..75766bcd696 100644 // There might be transactions logged while trying to recover the lease. // They need to be sync'ed even when an exception was thrown. if (!skipSync) { -@@ -3096,6 +3099,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3096,6 +3099,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final Set excludes, final int numAdditionalNodes, final String clientName ) throws IOException { @@ -104,7 +114,7 @@ index 277cc42222f..75766bcd696 100644 //check if the feature is enabled dtpReplaceDatanodeOnFailure.checkEnabled(); -@@ -3107,7 +3111,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3107,7 +3111,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, final BlockType blockType; checkOperation(OperationCategory.WRITE); final FSPermissionChecker pc = getPermissionChecker(); @@ -113,7 +123,7 @@ index 277cc42222f..75766bcd696 100644 readLock(); try { // Changing this operation category to WRITE instead of making getAdditionalDatanode as a -@@ -3133,7 +3137,7 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3133,7 +3137,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, "src=%s, fileId=%d, blk=%s, clientName=%s, clientMachine=%s", src, fileId, blk, clientName, clientMachine)); } finally { @@ -122,7 +132,7 @@ index 277cc42222f..75766bcd696 100644 } if (clientnode == null) { -@@ -3155,10 +3159,11 @@ LocatedBlock getAdditionalDatanode(String src, long fileId, +@@ -3155,10 +3159,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) throws IOException { @@ -135,7 +145,7 @@ index 277cc42222f..75766bcd696 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3167,7 +3172,7 @@ void abandonBlock(ExtendedBlock b, long fileId, String src, String holder) +@@ -3167,7 +3172,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, NameNode.stateChangeLog.debug( "BLOCK* NameSystem.abandonBlock: {} is removed from pendingCreates", b); } finally { @@ -144,7 +154,7 @@ index 277cc42222f..75766bcd696 100644 } getEditLog().logSync(); } -@@ -3221,10 +3226,11 @@ INodeFile checkLease(INodesInPath iip, String holder, long fileId) +@@ -3221,10 +3226,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, boolean completeFile(final String src, String holder, ExtendedBlock last, long fileId) throws IOException { @@ -157,7 +167,7 @@ index 277cc42222f..75766bcd696 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3232,7 +3238,7 @@ boolean completeFile(final String src, String holder, +@@ -3232,7 +3238,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, success = FSDirWriteFileOp.completeFile(this, pc, src, holder, last, fileId); } finally { @@ -166,7 +176,7 @@ index 277cc42222f..75766bcd696 100644 } getEditLog().logSync(); if (success) { -@@ -3666,10 +3672,11 @@ void setQuota(String src, long nsQuota, long ssQuota, StorageType type) +@@ -3666,10 +3672,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, */ void fsync(String src, long fileId, String clientName, long lastBlockLength) throws IOException { @@ -179,7 +189,7 @@ index 277cc42222f..75766bcd696 100644 writeLock(); try { checkOperation(OperationCategory.WRITE); -@@ -3683,7 +3690,7 @@ void fsync(String src, long fileId, String clientName, long lastBlockLength) +@@ -3683,7 +3690,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, } FSDirWriteFileOp.persistBlocks(dir, src, pendingFile, false); } finally { diff --git a/hadoop/stackable/patches/3.4.0/005-cyclonedx-plugin.patch b/hadoop/stackable/patches/3.4.0/0005-Update-CycloneDX-plugin.patch similarity index 82% rename from hadoop/stackable/patches/3.4.0/005-cyclonedx-plugin.patch rename to hadoop/stackable/patches/3.4.0/0005-Update-CycloneDX-plugin.patch index 8eafa502f..57280c994 100644 --- a/hadoop/stackable/patches/3.4.0/005-cyclonedx-plugin.patch +++ b/hadoop/stackable/patches/3.4.0/0005-Update-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From c0f56826d39506f5edb1c4b7211f118de2324fca Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Update CycloneDX plugin + +--- + pom.xml | 10 ++++++---- + 1 file changed, 6 insertions(+), 4 deletions(-) + diff --git a/pom.xml b/pom.xml -index 0a7a478..3ceaf7a 100644 +index 0a7a478ad9..3ceaf7a4ad 100644 --- a/pom.xml +++ b/pom.xml @@ -118,7 +118,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x diff --git a/hadoop/stackable/patches/3.4.0/006-HADOOP-18516-3.4.0.patch b/hadoop/stackable/patches/3.4.0/0006-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch similarity index 92% rename from hadoop/stackable/patches/3.4.0/006-HADOOP-18516-3.4.0.patch rename to hadoop/stackable/patches/3.4.0/0006-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch index 6380b97f5..29d550ffb 100644 --- a/hadoop/stackable/patches/3.4.0/006-HADOOP-18516-3.4.0.patch +++ b/hadoop/stackable/patches/3.4.0/0006-HADOOP-18516-ABFS-Authentication-Support-Fixed-SAS-T.patch @@ -1,16 +1,35 @@ -commit 07f2c8e4029c9043a8c0dc62659f50aaf70a04d3 -Author: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> -Date: Fri Jun 7 19:03:23 2024 +0530 +From eaf09b92eeea2d52656529b38d778e2225d27e0e Mon Sep 17 00:00:00 2001 +From: Anuj Modi <128447756+anujmodi2021@users.noreply.github.com> +Date: Fri, 7 Jun 2024 19:03:23 +0530 +Subject: HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS + Authentication (#6552) - HADOOP-18516: [ABFS][Authentication] Support Fixed SAS Token for ABFS Authentication (#6552) - - Contributed by Anuj Modi +Contributed by Anuj Modi +--- + .../hadoop/fs/azurebfs/AbfsConfiguration.java | 75 +++++--- + .../fs/azurebfs/AzureBlobFileSystem.java | 3 +- + .../fs/azurebfs/AzureBlobFileSystemStore.java | 2 +- + .../azurebfs/constants/ConfigurationKeys.java | 5 +- + .../fs/azurebfs/services/AbfsClient.java | 9 +- + .../services/FixedSASTokenProvider.java | 65 +++++++ + .../hadoop-azure/src/site/markdown/abfs.md | 149 +++++++++++--- + .../azurebfs/AbstractAbfsIntegrationTest.java | 23 ++- + .../ITestAzureBlobFileSystemChooseSAS.java | 182 ++++++++++++++++++ + .../MockDelegationSASTokenProvider.java | 2 +- + .../extensions/MockSASTokenProvider.java | 16 +- + .../azurebfs/utils/AccountSASGenerator.java | 103 ++++++++++ + .../fs/azurebfs/utils/SASGenerator.java | 34 +++- + .../azurebfs/utils/ServiceSASGenerator.java | 15 +- + 14 files changed, 611 insertions(+), 72 deletions(-) + create mode 100644 hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java + create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -index eff8c0860544..5c14a4af5c71 100644 +index eff8c08605..5c14a4af5c 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java -@@ -22,6 +22,7 @@ +@@ -22,6 +22,7 @@ import java.io.IOException; import java.lang.reflect.Field; import org.apache.hadoop.classification.VisibleForTesting; @@ -18,7 +37,7 @@ index eff8c0860544..5c14a4af5c71 100644 import org.apache.hadoop.util.Preconditions; import org.apache.commons.lang3.StringUtils; -@@ -941,33 +942,63 @@ public AccessTokenProvider getTokenProvider() throws TokenAccessProviderExceptio +@@ -941,33 +942,63 @@ public class AbfsConfiguration{ } } @@ -101,7 +120,7 @@ index eff8c0860544..5c14a4af5c71 100644 } } -@@ -980,14 +1011,14 @@ public EncryptionContextProvider createEncryptionContextProvider() { +@@ -980,14 +1011,14 @@ public class AbfsConfiguration{ Class encryptionContextClass = getAccountSpecificClass(configKey, null, EncryptionContextProvider.class); @@ -120,10 +139,10 @@ index eff8c0860544..5c14a4af5c71 100644 LOG.trace("{} init complete", encryptionContextClass.getName()); return encryptionContextProvider; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -index b234f76d5d9d..0b6ed90658f4 100644 +index b234f76d5d..0b6ed90658 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java -@@ -1308,10 +1308,9 @@ public void access(final Path path, final FsAction mode) throws IOException { +@@ -1308,10 +1308,9 @@ public class AzureBlobFileSystem extends FileSystem /** * Incrementing exists() calls from superclass for statistic collection. @@ -136,10 +155,10 @@ index b234f76d5d9d..0b6ed90658f4 100644 @Override public boolean exists(Path f) throws IOException { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -index d9693dd7e1cd..dc4e585a7b5a 100644 +index d9693dd7e1..dc4e585a7b 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java -@@ -1728,7 +1728,7 @@ private void initializeClient(URI uri, String fileSystemName, +@@ -1728,7 +1728,7 @@ public class AzureBlobFileSystemStore implements Closeable, ListingSupport { creds = new SharedKeyCredentials(accountName.substring(0, dotIndex), abfsConfiguration.getStorageAccountKey()); } else if (authType == AuthType.SAS) { @@ -149,10 +168,10 @@ index d9693dd7e1cd..dc4e585a7b5a 100644 } else { LOG.trace("Fetching token provider"); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -index a27c7570265d..b85e8c31d56f 100644 +index a27c757026..b85e8c31d5 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java -@@ -269,7 +269,10 @@ public static String accountProperty(String property, String account) { +@@ -269,7 +269,10 @@ public final class ConfigurationKeys { public static final String FS_AZURE_ENABLE_DELEGATION_TOKEN = "fs.azure.enable.delegation.token"; public static final String FS_AZURE_DELEGATION_TOKEN_PROVIDER_TYPE = "fs.azure.delegation.token.provider.type"; @@ -165,10 +184,10 @@ index a27c7570265d..b85e8c31d56f 100644 /** For performance, AbfsInputStream/AbfsOutputStream re-use SAS tokens until the expiry is within this number of seconds. **/ diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -index 8eeb548f500b..a013af0a35d4 100644 +index 8eeb548f50..a013af0a35 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java -@@ -945,6 +945,7 @@ public AbfsRestOperation flush(final String path, final long position, +@@ -945,6 +945,7 @@ public class AbfsClient implements Closeable { abfsUriQueryBuilder.addQuery(QUERY_PARAM_POSITION, Long.toString(position)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_RETAIN_UNCOMMITTED_DATA, String.valueOf(retainUncommittedData)); abfsUriQueryBuilder.addQuery(QUERY_PARAM_CLOSE, String.valueOf(isClose)); @@ -176,7 +195,7 @@ index 8eeb548f500b..a013af0a35d4 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -1035,6 +1036,7 @@ public AbfsRestOperation read(final String path, +@@ -1035,6 +1036,7 @@ public class AbfsClient implements Closeable { requestHeaders.add(new AbfsHttpHeader(IF_MATCH, eTag)); final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); @@ -184,7 +203,7 @@ index 8eeb548f500b..a013af0a35d4 100644 // AbfsInputStream/AbfsOutputStream reuse SAS tokens for better performance String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.READ_OPERATION, abfsUriQueryBuilder, cachedSasToken); -@@ -1325,16 +1327,17 @@ private String appendSASTokenToQuery(String path, +@@ -1325,16 +1327,17 @@ public class AbfsClient implements Closeable { sasToken = cachedSasToken; LOG.trace("Using cached SAS token."); } @@ -207,7 +226,7 @@ index 8eeb548f500b..a013af0a35d4 100644 } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java new file mode 100644 -index 000000000000..1a2614dcc1d2 +index 0000000000..1a2614dcc1 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/FixedSASTokenProvider.java @@ -0,0 +1,65 @@ @@ -277,7 +296,7 @@ index 000000000000..1a2614dcc1d2 + } +} diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md -index 9021f3e3b1f9..78094b381391 100644 +index 9021f3e3b1..78094b3813 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md @@ -12,7 +12,7 @@ @@ -463,10 +482,10 @@ index 9021f3e3b1f9..78094b381391 100644 ## Technical notes diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -index 66a1b22da96b..c1750c848c17 100644 +index 66a1b22da9..c1750c848c 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java -@@ -284,13 +284,30 @@ public void loadConfiguredFileSystem() throws Exception { +@@ -284,13 +284,30 @@ public abstract class AbstractAbfsIntegrationTest extends useConfiguredFileSystem = true; } @@ -502,7 +521,7 @@ index 66a1b22da96b..c1750c848c17 100644 } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java new file mode 100644 -index 000000000000..d8db901151fe +index 0000000000..d8db901151 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemChooseSAS.java @@ -0,0 +1,182 @@ @@ -689,10 +708,10 @@ index 000000000000..d8db901151fe + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -index cf7d51da4c44..d1e5dd4519d0 100644 +index cf7d51da4c..d1e5dd4519 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java -@@ -40,7 +40,7 @@ +@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.azurebfs.utils.SASGenerator; import org.apache.hadoop.security.AccessControlException; /** @@ -702,10 +721,10 @@ index cf7d51da4c44..d1e5dd4519d0 100644 public class MockDelegationSASTokenProvider implements SASTokenProvider { diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -index 50ac20970f45..3fda128a9c01 100644 +index 50ac20970f..3fda128a9c 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java -@@ -20,7 +20,11 @@ +@@ -20,7 +20,11 @@ package org.apache.hadoop.fs.azurebfs.extensions; import java.io.IOException; @@ -717,7 +736,7 @@ index 50ac20970f45..3fda128a9c01 100644 import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; -@@ -28,17 +32,25 @@ +@@ -28,17 +32,25 @@ import org.apache.hadoop.fs.azurebfs.utils.Base64; import org.apache.hadoop.fs.azurebfs.utils.ServiceSASGenerator; /** @@ -747,7 +766,7 @@ index 50ac20970f45..3fda128a9c01 100644 @Override diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java new file mode 100644 -index 000000000000..2af741b7a4c1 +index 0000000000..2af741b7a4 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/AccountSASGenerator.java @@ -0,0 +1,103 @@ @@ -855,10 +874,10 @@ index 000000000000..2af741b7a4c1 + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -index 2e9289d8d44c..a80ddac5ed36 100644 +index 2e9289d8d4..a80ddac5ed 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/SASGenerator.java -@@ -29,6 +29,10 @@ +@@ -29,6 +29,10 @@ import javax.crypto.spec.SecretKeySpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -869,7 +888,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 /** * Test SAS generator. */ -@@ -54,10 +58,8 @@ public String toString() { +@@ -54,10 +58,8 @@ public abstract class SASGenerator { protected static final Logger LOG = LoggerFactory.getLogger(SASGenerator.class); public static final Duration FIVE_MINUTES = Duration.ofMinutes(5); public static final Duration ONE_DAY = Duration.ofDays(1); @@ -882,7 +901,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 private Mac hmacSha256; private byte[] key; -@@ -68,7 +70,7 @@ private SASGenerator() { +@@ -68,7 +70,7 @@ public abstract class SASGenerator { /** * Called by subclasses to initialize the cryptographic SHA-256 HMAC provider. @@ -891,7 +910,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 */ protected SASGenerator(byte[] key) { this.key = key; -@@ -85,6 +87,26 @@ private void initializeMac() { +@@ -85,6 +87,26 @@ public abstract class SASGenerator { } } @@ -918,7 +937,7 @@ index 2e9289d8d44c..a80ddac5ed36 100644 protected String computeHmac256(final String stringToSign) { byte[] utf8Bytes; try { -@@ -98,4 +120,4 @@ protected String computeHmac256(final String stringToSign) { +@@ -98,4 +120,4 @@ public abstract class SASGenerator { } return Base64.encode(hmac); } @@ -926,10 +945,10 @@ index 2e9289d8d44c..a80ddac5ed36 100644 \ No newline at end of file +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -index 24a1cea255b4..0ae5239e8f2a 100644 +index 24a1cea255..0ae5239e8f 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/ServiceSASGenerator.java -@@ -20,23 +20,26 @@ +@@ -20,23 +20,26 @@ package org.apache.hadoop.fs.azurebfs.utils; import java.time.Instant; @@ -960,7 +979,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 String sp = "rcwdl"; String sv = AuthenticationVersion.Feb20.toString(); String sr = "c"; -@@ -66,7 +69,7 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -66,7 +69,7 @@ public class ServiceSASGenerator extends SASGenerator { sb.append("\n"); sb.append(se); sb.append("\n"); @@ -969,7 +988,7 @@ index 24a1cea255b4..0ae5239e8f2a 100644 sb.append("/blob/"); sb.append(accountName); sb.append("/"); -@@ -93,4 +96,4 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv +@@ -93,4 +96,4 @@ public class ServiceSASGenerator extends SASGenerator { LOG.debug("Service SAS stringToSign: " + stringToSign.replace("\n", ".")); return computeHmac256(stringToSign); } diff --git a/hadoop/stackable/patches/3.4.0/patchable.toml b/hadoop/stackable/patches/3.4.0/patchable.toml new file mode 100644 index 000000000..ef364542d --- /dev/null +++ b/hadoop/stackable/patches/3.4.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hadoop.git" +base = "bd8b77f398f626bb7791783192ee7a5dfaeec760" diff --git a/hbase/stackable/patches/2.4.18/01-HBASE-27103.patch b/hbase/stackable/patches/2.4.18/0001-HBASE-27103-Fixes-HBase-build-on-arm64.patch similarity index 77% rename from hbase/stackable/patches/2.4.18/01-HBASE-27103.patch rename to hbase/stackable/patches/2.4.18/0001-HBASE-27103-Fixes-HBase-build-on-arm64.patch index a50137ee7..06a625ee3 100644 --- a/hbase/stackable/patches/2.4.18/01-HBASE-27103.patch +++ b/hbase/stackable/patches/2.4.18/0001-HBASE-27103-Fixes-HBase-build-on-arm64.patch @@ -1,10 +1,10 @@ -HBASE-27103 - fixes HBase build - -From: Lars Francke - +From 393df760d7beb6735e6b788501ddeb5f60615a4a Mon Sep 17 00:00:00 2001 +From: Maximilian Wittich +Date: Fri, 15 Dec 2023 13:14:28 +0100 +Subject: HBASE-27103: Fixes HBase build on arm64 --- - .../src/main/resources/supplemental-models.xml | 14 ++++++++++++++ + .../src/main/resources/supplemental-models.xml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml diff --git a/hbase/stackable/patches/2.4.18/02-HBASE-28242.patch b/hbase/stackable/patches/2.4.18/0002-HBASE-28242-Updates-async-profiler-support.patch similarity index 98% rename from hbase/stackable/patches/2.4.18/02-HBASE-28242.patch rename to hbase/stackable/patches/2.4.18/0002-HBASE-28242-Updates-async-profiler-support.patch index 6ccde08fa..e1a0b89ed 100644 --- a/hbase/stackable/patches/2.4.18/02-HBASE-28242.patch +++ b/hbase/stackable/patches/2.4.18/0002-HBASE-28242-Updates-async-profiler-support.patch @@ -1,10 +1,10 @@ -HBASE-28242 - Updates async-profiler support - -From: Lars Francke - +From 5e26329f7b75e2ee2aa797f7bdcf3524bd45e491 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: HBASE-28242: Updates async-profiler support --- - .../apache/hadoop/hbase/http/ProfileServlet.java | 205 ++++++++++++-------- + .../hadoop/hbase/http/ProfileServlet.java | 205 +++++++++++------- 1 file changed, 121 insertions(+), 84 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java diff --git a/hbase/stackable/patches/2.4.18/03-HBASE-28379.patch b/hbase/stackable/patches/2.4.18/0003-HBASE-28379-Upgrade-thirdparty-deps.patch similarity index 89% rename from hbase/stackable/patches/2.4.18/03-HBASE-28379.patch rename to hbase/stackable/patches/2.4.18/0003-HBASE-28379-Upgrade-thirdparty-deps.patch index 3a99c3be4..c96659868 100644 --- a/hbase/stackable/patches/2.4.18/03-HBASE-28379.patch +++ b/hbase/stackable/patches/2.4.18/0003-HBASE-28379-Upgrade-thirdparty-deps.patch @@ -1,12 +1,12 @@ -HBASE-28379 Upgrade thirdparty dep to 4.1.6 - -From: Lars Francke - +From 8720a56fe1727d3de52e937e9df1631ed7b3c476 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 11 Jul 2024 10:12:51 +0200 +Subject: HBASE-28379: Upgrade thirdparty deps --- - hbase-protocol-shaded/pom.xml | 2 +- - hbase-shaded/pom.xml | 2 +- - pom.xml | 16 +++++++++++++--- + hbase-protocol-shaded/pom.xml | 2 +- + hbase-shaded/pom.xml | 2 +- + pom.xml | 16 +++++++++++++--- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml diff --git a/hbase/stackable/patches/2.4.18/04-HBASE-28511.patch b/hbase/stackable/patches/2.4.18/0004-HBASE-28511-Update-hbase-thirdparty-to-4.1.7.patch similarity index 91% rename from hbase/stackable/patches/2.4.18/04-HBASE-28511.patch rename to hbase/stackable/patches/2.4.18/0004-HBASE-28511-Update-hbase-thirdparty-to-4.1.7.patch index b79fe8124..190fe6cfc 100644 --- a/hbase/stackable/patches/2.4.18/04-HBASE-28511.patch +++ b/hbase/stackable/patches/2.4.18/0004-HBASE-28511-Update-hbase-thirdparty-to-4.1.7.patch @@ -1,16 +1,16 @@ -HBASE-28511 Update hbase-thirdparty to 4.1.7 - -From: Lars Francke - +From 92521e5f2798dbb21af9af56f48eb649f97e48ac Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 11 Jul 2024 10:12:51 +0200 +Subject: HBASE-28511: Update hbase-thirdparty to 4.1.7 --- - .../hadoop/hbase/security/EncryptionUtil.java | 4 ++-- - .../hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 2 +- - .../mapreduce/TableSnapshotInputFormatImpl.java | 2 +- - hbase-protocol-shaded/pom.xml | 2 +- - .../hadoop/hbase/io/hfile/FixedFileTrailer.java | 2 +- - .../hbase/zookeeper/MasterAddressTracker.java | 2 +- - pom.xml | 8 ++++---- + .../org/apache/hadoop/hbase/security/EncryptionUtil.java | 4 ++-- + .../apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 2 +- + .../hbase/mapreduce/TableSnapshotInputFormatImpl.java | 2 +- + hbase-protocol-shaded/pom.xml | 2 +- + .../apache/hadoop/hbase/io/hfile/FixedFileTrailer.java | 2 +- + .../hadoop/hbase/zookeeper/MasterAddressTracker.java | 2 +- + pom.xml | 8 ++++---- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java diff --git a/hbase/stackable/patches/2.4.18/05-patch-updates.patch b/hbase/stackable/patches/2.4.18/0005-Update-all-available-dependencies-to-new-patch-versi.patch similarity index 95% rename from hbase/stackable/patches/2.4.18/05-patch-updates.patch rename to hbase/stackable/patches/2.4.18/0005-Update-all-available-dependencies-to-new-patch-versi.patch index 9e7b4307e..dc0199d0c 100644 --- a/hbase/stackable/patches/2.4.18/05-patch-updates.patch +++ b/hbase/stackable/patches/2.4.18/0005-Update-all-available-dependencies-to-new-patch-versi.patch @@ -1,10 +1,10 @@ -Updates all available dependencies to new patch versions - -From: Lars Francke - +From 6acfff9441d96c18c2a1ec7c0d0c40aec8e94b2f Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 11 Jul 2024 10:12:51 +0200 +Subject: Update all available dependencies to new patch versions --- - pom.xml | 46 +++++++++++++++++++++++----------------------- + pom.xml | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/pom.xml b/pom.xml diff --git a/hbase/stackable/patches/2.4.18/06-patch-cyclonedx-plugin.patch b/hbase/stackable/patches/2.4.18/0006-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch similarity index 66% rename from hbase/stackable/patches/2.4.18/06-patch-cyclonedx-plugin.patch rename to hbase/stackable/patches/2.4.18/0006-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch index 3b7fff078..78a5e3ee1 100644 --- a/hbase/stackable/patches/2.4.18/06-patch-cyclonedx-plugin.patch +++ b/hbase/stackable/patches/2.4.18/0006-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch @@ -1,14 +1,15 @@ -Bumps cyclonedx-maven-plugin to version 2.8.0 and tweaks its configuration - -From: dervoeti - +From 2aa0caa6a2c918f9f3bf21e9974f10fff1e1acc1 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Bump cyclonedx-maven-plugin to version 2.8.0 and tweak its + configuration --- - pom.xml | 6 +++++- + pom.xml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml -index 8bd80e81e2..a437d5c819 100755 +index 8bd80e81e2..58c29a9d90 100755 --- a/pom.xml +++ b/pom.xml @@ -3011,7 +3011,11 @@ diff --git a/hbase/stackable/patches/2.4.18/patchable.toml b/hbase/stackable/patches/2.4.18/patchable.toml new file mode 100644 index 000000000..e8de3270a --- /dev/null +++ b/hbase/stackable/patches/2.4.18/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hbase.git" +base = "a1767f4d76859c0068720a6c1e5cb78282ebfe1e" diff --git a/hbase/stackable/patches/2.4.18/series b/hbase/stackable/patches/2.4.18/series deleted file mode 100644 index f8b376856..000000000 --- a/hbase/stackable/patches/2.4.18/series +++ /dev/null @@ -1,7 +0,0 @@ -# This series applies on Git commit a1767f4d76859c0068720a6c1e5cb78282ebfe1e -01-HBASE-27103.patch -02-HBASE-28242.patch -03-HBASE-28379.patch -04-HBASE-28511.patch -05-patch-updates.patch -06-patch-cyclonedx-plugin.patch diff --git a/hbase/stackable/patches/2.6.0/01-HBASE-28242.patch b/hbase/stackable/patches/2.6.0/0001-HBASE-28242-Updates-async-profiler-support.patch similarity index 98% rename from hbase/stackable/patches/2.6.0/01-HBASE-28242.patch rename to hbase/stackable/patches/2.6.0/0001-HBASE-28242-Updates-async-profiler-support.patch index 686bbac13..fc7be53c9 100644 --- a/hbase/stackable/patches/2.6.0/01-HBASE-28242.patch +++ b/hbase/stackable/patches/2.6.0/0001-HBASE-28242-Updates-async-profiler-support.patch @@ -1,10 +1,10 @@ -HBASE-28242 - Updates async-profiler support - -From: Lars Francke - +From 5dedf99076578ec2d1a00f87b153ec7c04a40903 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: HBASE-28242: Updates async-profiler support --- - .../apache/hadoop/hbase/http/ProfileServlet.java | 205 ++++++++++++-------- + .../hadoop/hbase/http/ProfileServlet.java | 205 +++++++++++------- 1 file changed, 121 insertions(+), 84 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java diff --git a/hbase/stackable/patches/2.6.0/02-HBASE-28567.patch b/hbase/stackable/patches/2.6.0/0002-HBASE-28567-Race-condition-causes-MetaRegionLocation.patch similarity index 75% rename from hbase/stackable/patches/2.6.0/02-HBASE-28567.patch rename to hbase/stackable/patches/2.6.0/0002-HBASE-28567-Race-condition-causes-MetaRegionLocation.patch index dd669c6d4..5769e7c25 100644 --- a/hbase/stackable/patches/2.6.0/02-HBASE-28567.patch +++ b/hbase/stackable/patches/2.6.0/0002-HBASE-28567-Race-condition-causes-MetaRegionLocation.patch @@ -1,10 +1,11 @@ -HBASE-28567. +From d0b4d835981de37250ad076298ef236c411b7ce0 Mon Sep 17 00:00:00 2001 +From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> +Date: Fri, 12 Jul 2024 17:30:12 -0400 +Subject: HBASE-28567: Race condition causes MetaRegionLocationCache to never + set watcher to populate meta location -From: Lars Francke - -Race condition causes MetaRegionLocationCache to never set watcher to populate meta location --- - .../apache/hadoop/hbase/zookeeper/ZKWatcher.java | 5 +++++ + .../java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java diff --git a/hbase/stackable/patches/2.6.0/03-patch-updates.patch b/hbase/stackable/patches/2.6.0/0003-Update-all-dependencies-which-have-a-new-patch-updat.patch similarity index 94% rename from hbase/stackable/patches/2.6.0/03-patch-updates.patch rename to hbase/stackable/patches/2.6.0/0003-Update-all-dependencies-which-have-a-new-patch-updat.patch index 001580367..a8799977e 100644 --- a/hbase/stackable/patches/2.6.0/03-patch-updates.patch +++ b/hbase/stackable/patches/2.6.0/0003-Update-all-dependencies-which-have-a-new-patch-updat.patch @@ -1,10 +1,10 @@ -Update all dependencies which have a new patch update available. - -From: Lars Francke - +From fea1d27160dc1e8f53509ae0f57518404d16dae0 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 11 Jul 2024 10:12:51 +0200 +Subject: Update all dependencies which have a new patch update available. --- - pom.xml | 40 ++++++++++++++++++++-------------------- + pom.xml | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/pom.xml b/pom.xml diff --git a/hbase/stackable/patches/2.6.0/04-include-dataformat-xml.patch b/hbase/stackable/patches/2.6.0/0004-Include-jackson-dataformat-xml.patch similarity index 87% rename from hbase/stackable/patches/2.6.0/04-include-dataformat-xml.patch rename to hbase/stackable/patches/2.6.0/0004-Include-jackson-dataformat-xml.patch index 67d63af2c..6fe1ede4d 100644 --- a/hbase/stackable/patches/2.6.0/04-include-dataformat-xml.patch +++ b/hbase/stackable/patches/2.6.0/0004-Include-jackson-dataformat-xml.patch @@ -1,13 +1,14 @@ -Include jackson-dataformat-xml. - -From: Lars Francke +From 5aaae3665b8e50268810c5b3f443a204231b5d6a Mon Sep 17 00:00:00 2001 +From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> +Date: Fri, 12 Jul 2024 17:30:12 -0400 +Subject: Include jackson-dataformat-xml. This is needed for XmlLayout to work so our structured logging works. It is an optional dependency of log4j2 so we need to make sure to include it. --- - hbase-assembly/pom.xml | 5 +++++ - pom.xml | 12 ++++++++++++ + hbase-assembly/pom.xml | 5 +++++ + pom.xml | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml diff --git a/hbase/stackable/patches/2.6.0/05-patch-cyclonedx-plugin.patch b/hbase/stackable/patches/2.6.0/0005-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch similarity index 57% rename from hbase/stackable/patches/2.6.0/05-patch-cyclonedx-plugin.patch rename to hbase/stackable/patches/2.6.0/0005-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch index 6742011a8..65abd8c09 100644 --- a/hbase/stackable/patches/2.6.0/05-patch-cyclonedx-plugin.patch +++ b/hbase/stackable/patches/2.6.0/0005-Bump-cyclonedx-maven-plugin-to-version-2.8.0-and-twe.patch @@ -1,8 +1,18 @@ +From bcbdb378846206c42a602107e380c29af1ac9163 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Bump cyclonedx-maven-plugin to version 2.8.0 and tweak its + configuration + +--- + pom.xml | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + diff --git a/pom.xml b/pom.xml -index 918cdaa..2a83794 100644 +index 819e021d86..0bd6a69703 100644 --- a/pom.xml +++ b/pom.xml -@@ -3218,7 +3218,11 @@ +@@ -3230,7 +3230,11 @@ org.cyclonedx cyclonedx-maven-plugin diff --git a/hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch b/hbase/stackable/patches/2.6.0/0006-Fix-CVE-2024-36114.patch similarity index 86% rename from hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch rename to hbase/stackable/patches/2.6.0/0006-Fix-CVE-2024-36114.patch index d58164180..c0c080f27 100644 --- a/hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch +++ b/hbase/stackable/patches/2.6.0/0006-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 02affcbe2f4aa7a22493c9aaa72602c5520bf2a9 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Mon, 11 Nov 2024 10:00:15 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,6 +21,9 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 0bd6a69703..50948c2746 100644 diff --git a/hbase/stackable/patches/2.6.0/patchable.toml b/hbase/stackable/patches/2.6.0/patchable.toml new file mode 100644 index 000000000..0e7f1956f --- /dev/null +++ b/hbase/stackable/patches/2.6.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hbase.git" +base = "de99f8754135ea69adc39da48d2bc2b2710a5366" diff --git a/hbase/stackable/patches/2.6.0/series b/hbase/stackable/patches/2.6.0/series deleted file mode 100644 index f64737007..000000000 --- a/hbase/stackable/patches/2.6.0/series +++ /dev/null @@ -1,7 +0,0 @@ -# This series applies on Git commit de99f8754135ea69adc39da48d2bc2b2710a5366 -01-HBASE-28242.patch -02-HBASE-28567.patch -03-patch-updates.patch -04-include-dataformat-xml.patch -05-patch-cyclonedx-plugin.patch -06-CVE-2024-36114-bump-aircompressor-0-27.patch diff --git a/hbase/stackable/patches/2.6.1/01-HBASE-28242.patch b/hbase/stackable/patches/2.6.1/0001-HBASE-28242-Updates-async-profiler-support.patch similarity index 98% rename from hbase/stackable/patches/2.6.1/01-HBASE-28242.patch rename to hbase/stackable/patches/2.6.1/0001-HBASE-28242-Updates-async-profiler-support.patch index 686bbac13..2901aaf99 100644 --- a/hbase/stackable/patches/2.6.1/01-HBASE-28242.patch +++ b/hbase/stackable/patches/2.6.1/0001-HBASE-28242-Updates-async-profiler-support.patch @@ -1,10 +1,10 @@ -HBASE-28242 - Updates async-profiler support - -From: Lars Francke - +From 12fa4ad4f12adef53c4e9d782b18152dfb3f7075 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Tue, 6 Feb 2024 16:10:54 +0100 +Subject: HBASE-28242: Updates async-profiler support --- - .../apache/hadoop/hbase/http/ProfileServlet.java | 205 ++++++++++++-------- + .../hadoop/hbase/http/ProfileServlet.java | 205 +++++++++++------- 1 file changed, 121 insertions(+), 84 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java diff --git a/hbase/stackable/patches/2.6.1/02-patch-updates.patch b/hbase/stackable/patches/2.6.1/0002-Update-all-dependencies-which-have-a-new-patch-updat.patch similarity index 93% rename from hbase/stackable/patches/2.6.1/02-patch-updates.patch rename to hbase/stackable/patches/2.6.1/0002-Update-all-dependencies-which-have-a-new-patch-updat.patch index 380b10741..eba9d1c5d 100644 --- a/hbase/stackable/patches/2.6.1/02-patch-updates.patch +++ b/hbase/stackable/patches/2.6.1/0002-Update-all-dependencies-which-have-a-new-patch-updat.patch @@ -1,5 +1,14 @@ +From 8b8b17064695f7c631ef3abd79351d756b155dd1 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 11 Jul 2024 10:12:51 +0200 +Subject: Update all dependencies which have a new patch update available. + +--- + pom.xml | 48 ++++++++++++++++++++++++------------------------ + 1 file changed, 24 insertions(+), 24 deletions(-) + diff --git a/pom.xml b/pom.xml -index 01123cb..3734fb1 100644 +index 01123cb35b..3734fb1142 100644 --- a/pom.xml +++ b/pom.xml @@ -567,7 +567,7 @@ diff --git a/hbase/stackable/patches/2.6.1/03-include-dataformat-xml.patch b/hbase/stackable/patches/2.6.1/0003-Include-jackson-dataformat-xml.patch similarity index 83% rename from hbase/stackable/patches/2.6.1/03-include-dataformat-xml.patch rename to hbase/stackable/patches/2.6.1/0003-Include-jackson-dataformat-xml.patch index 67d63af2c..cf2edd4c7 100644 --- a/hbase/stackable/patches/2.6.1/03-include-dataformat-xml.patch +++ b/hbase/stackable/patches/2.6.1/0003-Include-jackson-dataformat-xml.patch @@ -1,13 +1,14 @@ -Include jackson-dataformat-xml. - -From: Lars Francke +From 056084630f50ba09e9a078aa9152b6c4f8550f3c Mon Sep 17 00:00:00 2001 +From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com> +Date: Fri, 12 Jul 2024 17:30:12 -0400 +Subject: Include jackson-dataformat-xml. This is needed for XmlLayout to work so our structured logging works. It is an optional dependency of log4j2 so we need to make sure to include it. --- - hbase-assembly/pom.xml | 5 +++++ - pom.xml | 12 ++++++++++++ + hbase-assembly/pom.xml | 5 +++++ + pom.xml | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml @@ -27,10 +28,10 @@ index 1564851b85..0786288a84 100644 org.apache.logging.log4j log4j-slf4j-impl diff --git a/pom.xml b/pom.xml -index b420025c6c..819e021d86 100644 +index 3734fb1142..ff1b35986b 100644 --- a/pom.xml +++ b/pom.xml -@@ -1149,6 +1149,11 @@ +@@ -1159,6 +1159,11 @@ log4j-core ${log4j2.version} @@ -42,7 +43,7 @@ index b420025c6c..819e021d86 100644 org.apache.logging.log4j log4j-slf4j-impl -@@ -1159,6 +1164,13 @@ +@@ -1169,6 +1174,13 @@ log4j-1.2-api ${log4j2.version} diff --git a/hbase/stackable/patches/2.6.1/04-patch-cyclonedx-plugin.patch b/hbase/stackable/patches/2.6.1/0004-Bump-cyclonedx-maven-plugin-to-version-2.9.1-and-twe.patch similarity index 57% rename from hbase/stackable/patches/2.6.1/04-patch-cyclonedx-plugin.patch rename to hbase/stackable/patches/2.6.1/0004-Bump-cyclonedx-maven-plugin-to-version-2.9.1-and-twe.patch index 9e417ff3a..a61f5af66 100644 --- a/hbase/stackable/patches/2.6.1/04-patch-cyclonedx-plugin.patch +++ b/hbase/stackable/patches/2.6.1/0004-Bump-cyclonedx-maven-plugin-to-version-2.9.1-and-twe.patch @@ -1,8 +1,18 @@ +From e8cfb60570a094f21d0ac3f13b600180770ad2bd Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Bump cyclonedx-maven-plugin to version 2.9.1 and tweak its + configuration + +--- + pom.xml | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + diff --git a/pom.xml b/pom.xml -index 918cdaa..2a83794 100644 +index ff1b35986b..5ce072ab36 100644 --- a/pom.xml +++ b/pom.xml -@@ -3218,7 +3218,11 @@ +@@ -3290,7 +3290,11 @@ org.cyclonedx cyclonedx-maven-plugin diff --git a/hbase/stackable/patches/2.6.1/patchable.toml b/hbase/stackable/patches/2.6.1/patchable.toml new file mode 100644 index 000000000..4a7b15c36 --- /dev/null +++ b/hbase/stackable/patches/2.6.1/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hbase.git" +base = "7ed50b4dd742269a78875fb32112215f831284ff" diff --git a/hive/stackable/patches/3.1.3/01-HIVE-26905.patch b/hive/stackable/patches/3.1.3/0001-HIVE-26905-exclude-pentaho-aggdesigner-algorithm.patch similarity index 74% rename from hive/stackable/patches/3.1.3/01-HIVE-26905.patch rename to hive/stackable/patches/3.1.3/0001-HIVE-26905-exclude-pentaho-aggdesigner-algorithm.patch index a61c8c94f..a313ac2fb 100644 --- a/hive/stackable/patches/3.1.3/01-HIVE-26905.patch +++ b/hive/stackable/patches/3.1.3/0001-HIVE-26905-exclude-pentaho-aggdesigner-algorithm.patch @@ -1,10 +1,11 @@ -HIVE-26905 - -From: Lars Francke +From 8fb9b1be9f04a2297349061ab444b2de736e3e3b Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 9 Apr 2024 17:29:07 +0200 +Subject: HIVE-26905: exclude pentaho-aggdesigner-algorithm Backport HIVE-25173 to 3.2.0: Exclude pentaho-aggdesigner-algorithm from upgrade-acid build. --- - upgrade-acid/pom.xml | 6 ++++++ + upgrade-acid/pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/upgrade-acid/pom.xml b/upgrade-acid/pom.xml diff --git a/hive/stackable/patches/3.1.3/02-HIVE-21939.patch b/hive/stackable/patches/3.1.3/0002-HIVE-21939-Update-protoc.patch similarity index 84% rename from hive/stackable/patches/3.1.3/02-HIVE-21939.patch rename to hive/stackable/patches/3.1.3/0002-HIVE-21939-Update-protoc.patch index 06b3fa61b..516f02d5d 100644 --- a/hive/stackable/patches/3.1.3/02-HIVE-21939.patch +++ b/hive/stackable/patches/3.1.3/0002-HIVE-21939-Update-protoc.patch @@ -1,10 +1,11 @@ -HIVE-21939 +From 1e55c21569178d5263216494e1e44565a3280849 Mon Sep 17 00:00:00 2001 +From: Sebastian Bernauer +Date: Thu, 11 Apr 2024 17:26:13 +0200 +Subject: HIVE-21939: Update protoc -From: Lars Francke - -protoc:2.5.0 dependence has broken building on aarch64 +protoc:2.5.0 dependency has broken building on aarch64 --- - standalone-metastore/pom.xml | 21 +++++++++++++++++++-- + standalone-metastore/pom.xml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml diff --git a/hive/stackable/patches/3.1.3/03-HIVE-26522.patch b/hive/stackable/patches/3.1.3/0003-HIVE-26522-Fix-delegation-token-renewal.patch similarity index 95% rename from hive/stackable/patches/3.1.3/03-HIVE-26522.patch rename to hive/stackable/patches/3.1.3/0003-HIVE-26522-Fix-delegation-token-renewal.patch index 58c688429..857e999cc 100644 --- a/hive/stackable/patches/3.1.3/03-HIVE-26522.patch +++ b/hive/stackable/patches/3.1.3/0003-HIVE-26522-Fix-delegation-token-renewal.patch @@ -1,6 +1,7 @@ -HIVE-26522 - -From: Lars Francke +From cf431bb2620eafe9d555576d7b4ae2503890096c Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: HIVE-26522: Fix delegation token renewal Test for HIVE-22033 and backport to 3.1 and 2.3. HiveServer2: fix delegation token renewal @@ -8,8 +9,8 @@ HiveServer2: fix delegation token renewal It says "HiveServer2" but the patch is in the metastore component hence I decided to include this patch --- - .../TokenStoreDelegationTokenSecretManager.java | 5 + - ...TestTokenStoreDelegationTokenSecretManager.java | 121 ++++++++++++++++++++ + ...okenStoreDelegationTokenSecretManager.java | 5 +- + ...okenStoreDelegationTokenSecretManager.java | 121 ++++++++++++++++++ 2 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/security/TestTokenStoreDelegationTokenSecretManager.java diff --git a/hive/stackable/patches/3.1.3/04-HIVE-26743.patch b/hive/stackable/patches/3.1.3/0004-HIVE-26743-Backport-resource-leak-fix.patch similarity index 96% rename from hive/stackable/patches/3.1.3/04-HIVE-26743.patch rename to hive/stackable/patches/3.1.3/0004-HIVE-26743-Backport-resource-leak-fix.patch index b017707ab..238e3629b 100644 --- a/hive/stackable/patches/3.1.3/04-HIVE-26743.patch +++ b/hive/stackable/patches/3.1.3/0004-HIVE-26743-Backport-resource-leak-fix.patch @@ -1,6 +1,7 @@ -HIVE-26743 - -From: Lars Francke +From b60efedc7459108335c83613aad5253bb489317c Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: HIVE-26743: Backport resource leak fix backport HIVE-24694 to 3.1.x Early connection close to release server resources during creating @@ -8,7 +9,7 @@ Early connection close to release server resources during creating Stackable note: Strictly speaking this is a HiveServer2 fix but it's in the JDBC part so if anyone reuses the JDBC jar this fix might be useful --- - .../java/org/apache/hive/jdbc/HiveConnection.java | 71 +++++++++++++++----- + .../org/apache/hive/jdbc/HiveConnection.java | 71 ++++++++++++++----- 1 file changed, 53 insertions(+), 18 deletions(-) diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java diff --git a/hive/stackable/patches/3.1.3/05-HIVE-26882.patch b/hive/stackable/patches/3.1.3/0005-HIVE-26882-Allow-transactional-check-of-Table-parame.patch similarity index 95% rename from hive/stackable/patches/3.1.3/05-HIVE-26882.patch rename to hive/stackable/patches/3.1.3/0005-HIVE-26882-Allow-transactional-check-of-Table-parame.patch index 8b0c77d07..754f434c3 100644 --- a/hive/stackable/patches/3.1.3/05-HIVE-26882.patch +++ b/hive/stackable/patches/3.1.3/0005-HIVE-26882-Allow-transactional-check-of-Table-parame.patch @@ -1,20 +1,21 @@ -HIVE-26882 +From 9cdc9b30678cfa48c15df19268fedc48241e7305 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: HIVE-26882: Allow transactional check of Table parameter before + altering the Table -From: Lars Francke - -Allow transactional check of Table parameter before altering the Table --- - .../thrift/gen-cpp/hive_metastore_constants.cpp | 5 + - .../gen/thrift/gen-cpp/hive_metastore_constants.h | 2 - .../metastore/api/hive_metastoreConstants.java | 4 + - .../src/gen/thrift/gen-php/metastore/Types.php | 10 ++ - .../gen/thrift/gen-py/hive_metastore/constants.py | 2 - .../gen/thrift/gen-rb/hive_metastore_constants.rb | 3 + - .../hadoop/hive/metastore/HiveAlterHandler.java | 19 ++++ - .../apache/hadoop/hive/metastore/ObjectStore.java | 28 +++++ - .../org/apache/hadoop/hive/metastore/RawStore.java | 16 +++ - .../src/main/thrift/hive_metastore.thrift | 3 + - .../client/TestTablesCreateDropAlterTruncate.java | 102 ++++++++++++++++++++ + .../gen-cpp/hive_metastore_constants.cpp | 5 +- + .../thrift/gen-cpp/hive_metastore_constants.h | 2 + + .../api/hive_metastoreConstants.java | 4 + + .../gen/thrift/gen-php/metastore/Types.php | 10 +- + .../thrift/gen-py/hive_metastore/constants.py | 2 + + .../thrift/gen-rb/hive_metastore_constants.rb | 3 + + .../hive/metastore/HiveAlterHandler.java | 19 +++- + .../hadoop/hive/metastore/ObjectStore.java | 28 ++++- + .../hadoop/hive/metastore/RawStore.java | 16 ++- + .../src/main/thrift/hive_metastore.thrift | 3 + + .../TestTablesCreateDropAlterTruncate.java | 102 ++++++++++++++++++ 11 files changed, 184 insertions(+), 10 deletions(-) diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp diff --git a/hive/stackable/patches/3.1.3/06-HIVE-27508.patch b/hive/stackable/patches/3.1.3/0006-HIVE-27508-Java-11-preparation-system-class-loader-i.patch similarity index 97% rename from hive/stackable/patches/3.1.3/06-HIVE-27508.patch rename to hive/stackable/patches/3.1.3/0006-HIVE-27508-Java-11-preparation-system-class-loader-i.patch index db7920135..7d4308db3 100644 --- a/hive/stackable/patches/3.1.3/06-HIVE-27508.patch +++ b/hive/stackable/patches/3.1.3/0006-HIVE-27508-Java-11-preparation-system-class-loader-i.patch @@ -1,24 +1,26 @@ -HIVE-27508 - -From: Lars Francke +From 3d8d7f1aa023647370e24e46f84e634732b64104 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: HIVE-27508: Java 11 preparation: system class loader is not + URLClassLoader Backport of HIVE-21584 to branch-3.1 HIVE-21584: Java 11 preparation: system class loader is not URLClassLoader --- - .../src/java/org/apache/hive/beeline/Commands.java | 2 - .../org/apache/hadoop/hive/common/JavaUtils.java | 57 +------- - .../hive/llap/daemon/impl/FunctionLocalizer.java | 18 ++- - .../hadoop/hive/ql/exec/AddToClassPathAction.java | 87 ++++++++++++ - .../org/apache/hadoop/hive/ql/exec/Utilities.java | 98 ++++++-------- - .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 8 + - .../apache/hadoop/hive/ql/exec/mr/ExecMapper.java | 20 +-- - .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java | 20 --- - .../hive/ql/exec/spark/SparkRecordHandler.java | 29 ++-- - .../hadoop/hive/ql/exec/tez/RecordProcessor.java | 16 -- - .../hadoop/hive/ql/session/SessionState.java | 57 ++++---- - .../hive/ql/exec/TestAddToClassPathAction.java | 142 ++++++++++++++++++++ - .../hive/spark/client/SparkClientUtilities.java | 23 +++ - .../hive/metastore/utils/MetaStoreUtils.java | 20 ++- + .../org/apache/hive/beeline/Commands.java | 2 +- + .../apache/hadoop/hive/common/JavaUtils.java | 57 +------ + .../llap/daemon/impl/FunctionLocalizer.java | 18 ++- + .../hive/ql/exec/AddToClassPathAction.java | 87 +++++++++++ + .../apache/hadoop/hive/ql/exec/Utilities.java | 98 ++++++------ + .../hadoop/hive/ql/exec/mr/ExecDriver.java | 8 +- + .../hadoop/hive/ql/exec/mr/ExecMapper.java | 20 +-- + .../hadoop/hive/ql/exec/mr/ExecReducer.java | 20 +-- + .../ql/exec/spark/SparkRecordHandler.java | 29 ++-- + .../hive/ql/exec/tez/RecordProcessor.java | 16 +- + .../hadoop/hive/ql/session/SessionState.java | 57 +++---- + .../ql/exec/TestAddToClassPathAction.java | 142 ++++++++++++++++++ + .../spark/client/SparkClientUtilities.java | 23 ++- + .../hive/metastore/utils/MetaStoreUtils.java | 20 ++- 14 files changed, 386 insertions(+), 211 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/AddToClassPathAction.java create mode 100644 ql/src/test/org/apache/hadoop/hive/ql/exec/TestAddToClassPathAction.java diff --git a/hive/stackable/patches/3.1.3/07-patch-updates.patch b/hive/stackable/patches/3.1.3/0007-Patch-updates.patch similarity index 96% rename from hive/stackable/patches/3.1.3/07-patch-updates.patch rename to hive/stackable/patches/3.1.3/0007-Patch-updates.patch index 0b01c647b..2fb22090a 100644 --- a/hive/stackable/patches/3.1.3/07-patch-updates.patch +++ b/hive/stackable/patches/3.1.3/0007-Patch-updates.patch @@ -1,12 +1,12 @@ -Patch updates - -From: Lars Francke - +From 49ff3be7f56e98b3566590b6d1b9324efbc138e9 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: Patch updates --- - druid-handler/pom.xml | 2 + - pom.xml | 64 +++++++++++++++++++++--------------------- - standalone-metastore/pom.xml | 26 +++++++++-------- + druid-handler/pom.xml | 2 +- + pom.xml | 64 ++++++++++++++++++------------------ + standalone-metastore/pom.xml | 26 +++++++-------- 3 files changed, 46 insertions(+), 46 deletions(-) diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml diff --git a/hive/stackable/patches/3.1.3/08-logging-dependencies.patch b/hive/stackable/patches/3.1.3/0008-Include-logging-dependencies.patch similarity index 84% rename from hive/stackable/patches/3.1.3/08-logging-dependencies.patch rename to hive/stackable/patches/3.1.3/0008-Include-logging-dependencies.patch index 5c4da4611..ecac69a12 100644 --- a/hive/stackable/patches/3.1.3/08-logging-dependencies.patch +++ b/hive/stackable/patches/3.1.3/0008-Include-logging-dependencies.patch @@ -1,10 +1,11 @@ -Include logging dependencies +From f291a34cb2ae7df4b8e15b5d88dd48dbe0eeb302 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: Include logging dependencies -From: Lars Francke - -This adds dependenciesr required for use of the XmlLayout for logging +This adds dependencies required for use of the XmlLayout for logging --- - standalone-metastore/pom.xml | 20 ++++++++++++++++++-- + standalone-metastore/pom.xml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml diff --git a/hive/stackable/patches/3.1.3/09-maven-warning.patch b/hive/stackable/patches/3.1.3/0009-Gets-rid-of-a-warning-during-build.patch similarity index 70% rename from hive/stackable/patches/3.1.3/09-maven-warning.patch rename to hive/stackable/patches/3.1.3/0009-Gets-rid-of-a-warning-during-build.patch index 56f371243..26e55828c 100644 --- a/hive/stackable/patches/3.1.3/09-maven-warning.patch +++ b/hive/stackable/patches/3.1.3/0009-Gets-rid-of-a-warning-during-build.patch @@ -1,10 +1,10 @@ -Gets rid of a warning during build - -From: Lars Francke - +From 005455cb5687d9c5429f538ed69ff77eeba766a5 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: Gets rid of a warning during build --- - serde/pom.xml | 5 ----- + serde/pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/serde/pom.xml b/serde/pom.xml diff --git a/hive/stackable/patches/3.1.3/10-postgres-driver.patch b/hive/stackable/patches/3.1.3/0010-Include-Postgres-driver-by-default.patch similarity index 82% rename from hive/stackable/patches/3.1.3/10-postgres-driver.patch rename to hive/stackable/patches/3.1.3/0010-Include-Postgres-driver-by-default.patch index a718a23df..c9c59efb5 100644 --- a/hive/stackable/patches/3.1.3/10-postgres-driver.patch +++ b/hive/stackable/patches/3.1.3/0010-Include-Postgres-driver-by-default.patch @@ -1,10 +1,10 @@ -Include Postgres driver by default - -From: Lars Francke - +From ae1ed987662ce6a6d065c81423fabdca192af692 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: Include Postgres driver by default --- - standalone-metastore/pom.xml | 9 +++++++-- + standalone-metastore/pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml diff --git a/hive/stackable/patches/3.1.3/11-cyclonedx-plugin.patch b/hive/stackable/patches/3.1.3/0011-Add-CycloneDX-plugin.patch similarity index 72% rename from hive/stackable/patches/3.1.3/11-cyclonedx-plugin.patch rename to hive/stackable/patches/3.1.3/0011-Add-CycloneDX-plugin.patch index cd7126390..c64fdb8ea 100644 --- a/hive/stackable/patches/3.1.3/11-cyclonedx-plugin.patch +++ b/hive/stackable/patches/3.1.3/0011-Add-CycloneDX-plugin.patch @@ -1,8 +1,17 @@ +From cbe6c05735995b9020f50443a144458eae8e1e97 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + standalone-metastore/pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml -index e36f1e6..eb14e69 100644 +index e2665473af..4f40e28f91 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml -@@ -773,6 +773,24 @@ +@@ -811,6 +811,24 @@ diff --git a/hive/stackable/patches/3.1.3/12-CVE-2024-36114-bump-aircompressor-0-27.patch b/hive/stackable/patches/3.1.3/0012-Fix-CVE-2024-36114.patch similarity index 83% rename from hive/stackable/patches/3.1.3/12-CVE-2024-36114-bump-aircompressor-0-27.patch rename to hive/stackable/patches/3.1.3/0012-Fix-CVE-2024-36114.patch index 5c51a882a..83fe172e9 100644 --- a/hive/stackable/patches/3.1.3/12-CVE-2024-36114-bump-aircompressor-0-27.patch +++ b/hive/stackable/patches/3.1.3/0012-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 36e9ce8f6cbaa8739319335f45089173745d6d0f Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Tue, 12 Nov 2024 11:49:57 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,12 +21,15 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + standalone-metastore/pom.xml | 6 ++++++ + 1 file changed, 6 insertions(+) diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml -index e36f1e64f0..7758f71859 100644 +index 4f40e28f91..5441855b29 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml -@@ -93,6 +93,12 @@ +@@ -102,6 +102,12 @@ diff --git a/hive/stackable/patches/3.1.3/patchable.toml b/hive/stackable/patches/3.1.3/patchable.toml new file mode 100644 index 000000000..bf3ab1b59 --- /dev/null +++ b/hive/stackable/patches/3.1.3/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hive.git" +base = "4df4d75bf1e16fe0af75aad0b4179c34c07fc975" diff --git a/hive/stackable/patches/3.1.3/series b/hive/stackable/patches/3.1.3/series deleted file mode 100644 index c05f5d512..000000000 --- a/hive/stackable/patches/3.1.3/series +++ /dev/null @@ -1,13 +0,0 @@ -# This series applies on Git commit 4df4d75bf1e16fe0af75aad0b4179c34c07fc975 -01-HIVE-26905.patch -02-HIVE-21939.patch -03-HIVE-26522.patch -04-HIVE-26743.patch -05-HIVE-26882.patch -06-HIVE-27508.patch -07-patch-updates.patch -08-logging-dependencies.patch -09-maven-warning.patch -10-postgres-driver.patch -11-cyclonedx-plugin.patch -12-CVE-2024-36114-bump-aircompressor-0-27.patch diff --git a/hive/stackable/patches/4.0.0/01-postgres-driver.patch b/hive/stackable/patches/4.0.0/0001-Include-Postgres-driver.patch similarity index 73% rename from hive/stackable/patches/4.0.0/01-postgres-driver.patch rename to hive/stackable/patches/4.0.0/0001-Include-Postgres-driver.patch index 5fc14aca4..50675ac24 100644 --- a/hive/stackable/patches/4.0.0/01-postgres-driver.patch +++ b/hive/stackable/patches/4.0.0/0001-Include-Postgres-driver.patch @@ -1,3 +1,13 @@ +From c5eb86648fe96b048723372024fa7278c9e108db Mon Sep 17 00:00:00 2001 +From: Sebastian Bernauer +Date: Tue, 3 Sep 2024 11:13:24 +0200 +Subject: Include Postgres driver + +--- + standalone-metastore/metastore-server/pom.xml | 1 - + standalone-metastore/pom.xml | 1 - + 2 files changed, 2 deletions(-) + diff --git a/standalone-metastore/metastore-server/pom.xml b/standalone-metastore/metastore-server/pom.xml index a8f680928c..7102f1b5ca 100644 --- a/standalone-metastore/metastore-server/pom.xml diff --git a/hive/stackable/patches/4.0.0/02-logging-dependencies.patch b/hive/stackable/patches/4.0.0/0002-Include-logging-dependencies.patch similarity index 57% rename from hive/stackable/patches/4.0.0/02-logging-dependencies.patch rename to hive/stackable/patches/4.0.0/0002-Include-logging-dependencies.patch index 1c2ed58a3..fc021b1e7 100644 --- a/hive/stackable/patches/4.0.0/02-logging-dependencies.patch +++ b/hive/stackable/patches/4.0.0/0002-Include-logging-dependencies.patch @@ -1,8 +1,18 @@ +From 69071d4d4525a8ceb27cbefa9a093d0678a1f3dd Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Tue, 13 Aug 2024 13:38:12 +0200 +Subject: Include logging dependencies + +This adds dependencies required for use of the XmlLayout for logging +--- + standalone-metastore/pom.xml | 5 +++++ + 1 file changed, 5 insertions(+) + diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml -index 28ac5ceb65..03097e7f40 100644 +index e3cbd821bd..205fc31ec7 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml -@@ -494,6 +494,11 @@ +@@ -493,6 +493,11 @@ com.fasterxml.jackson.core jackson-databind diff --git a/hive/stackable/patches/4.0.0/03-patch-cyclonedx-plugin.patch b/hive/stackable/patches/4.0.0/0003-Add-CycloneDX-plugin.patch similarity index 79% rename from hive/stackable/patches/4.0.0/03-patch-cyclonedx-plugin.patch rename to hive/stackable/patches/4.0.0/0003-Add-CycloneDX-plugin.patch index 220709c82..978de7fd3 100644 --- a/hive/stackable/patches/4.0.0/03-patch-cyclonedx-plugin.patch +++ b/hive/stackable/patches/4.0.0/0003-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 23995b6c1ef70e4e119ce0493e63ff3a75ea1378 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + standalone-metastore/pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml -index 28ac5ce..d6e2196 100644 +index 205fc31ec7..2982a45ca0 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml @@ -41,6 +41,7 @@ @@ -10,7 +19,7 @@ index 28ac5ce..d6e2196 100644 ${basedir}/${standalone.metastore.path.to.root}/checkstyle ${project.basedir}/src/test/resources -@@ -590,6 +591,23 @@ +@@ -594,6 +595,23 @@ diff --git a/hive/stackable/patches/4.0.0/04-CVE-2024-36114-bump-aircompressor-0-27.patch b/hive/stackable/patches/4.0.0/0004-Fix-CVE-2024-36114.patch similarity index 84% rename from hive/stackable/patches/4.0.0/04-CVE-2024-36114-bump-aircompressor-0-27.patch rename to hive/stackable/patches/4.0.0/0004-Fix-CVE-2024-36114.patch index 09561cf91..d9d293e75 100644 --- a/hive/stackable/patches/4.0.0/04-CVE-2024-36114-bump-aircompressor-0-27.patch +++ b/hive/stackable/patches/4.0.0/0004-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 4a85ad5ec7b0dbfb9f2c4524531ae0198a352b3d Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Tue, 12 Nov 2024 11:49:57 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,12 +21,15 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + standalone-metastore/pom.xml | 6 ++++++ + 1 file changed, 6 insertions(+) diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml -index 28ac5ceb65..8f2edd7b8e 100644 +index 2982a45ca0..cd34884e3b 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml -@@ -120,6 +120,12 @@ +@@ -121,6 +121,12 @@ diff --git a/hive/stackable/patches/4.0.0/patchable.toml b/hive/stackable/patches/4.0.0/patchable.toml new file mode 100644 index 000000000..7e70d2b90 --- /dev/null +++ b/hive/stackable/patches/4.0.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/hive.git" +base = "183f8cb41d3dbed961ffd27999876468ff06690c" diff --git a/kafka/stackable/patches/3.7.1/001-cyclonedx-plugin.patch b/kafka/stackable/patches/3.7.1/0001-Add-CycloneDX-plugin.patch similarity index 86% rename from kafka/stackable/patches/3.7.1/001-cyclonedx-plugin.patch rename to kafka/stackable/patches/3.7.1/0001-Add-CycloneDX-plugin.patch index 152a993f5..4eaaac32f 100644 --- a/kafka/stackable/patches/3.7.1/001-cyclonedx-plugin.patch +++ b/kafka/stackable/patches/3.7.1/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 55bcff9a298c339bd7eb10cdd733e600f671861b Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Thu, 17 Oct 2024 11:01:40 +0200 +Subject: Add CycloneDX plugin + +--- + build.gradle | 41 +++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 41 insertions(+) + diff --git a/build.gradle b/build.gradle -index 32e6e8f..13a0def 100644 +index 32e6e8fdca..13a0deffec 100644 --- a/build.gradle +++ b/build.gradle @@ -48,6 +48,47 @@ plugins { diff --git a/kafka/stackable/patches/3.7.1/002-use-stackable-repo.patch b/kafka/stackable/patches/3.7.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 77% rename from kafka/stackable/patches/3.7.1/002-use-stackable-repo.patch rename to kafka/stackable/patches/3.7.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index 6c18efdaa..50db37e2b 100644 --- a/kafka/stackable/patches/3.7.1/002-use-stackable-repo.patch +++ b/kafka/stackable/patches/3.7.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,14 +1,14 @@ -From e5102449fe825cfbba20ce6ace1f51cd91550780 Mon Sep 17 00:00:00 2001 +From 06c6dd7309cc6f2a75089bf91124cff88beed776 Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Thu, 12 Dec 2024 10:09:47 +0100 -Subject: [PATCH] Change Gradle to use the Nexus Build Repo +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle -index 92082fe7cf..3b56a2ad98 100644 +index 13a0deffec..abb465f890 100644 --- a/build.gradle +++ b/build.gradle @@ -20,7 +20,9 @@ import java.nio.charset.StandardCharsets @@ -22,7 +22,7 @@ index 92082fe7cf..3b56a2ad98 100644 } apply from: "$rootDir/gradle/dependencies.gradle" -@@ -126,7 +128,9 @@ ext { +@@ -160,7 +162,9 @@ ext { allprojects { repositories { @@ -33,6 +33,3 @@ index 92082fe7cf..3b56a2ad98 100644 } dependencyUpdates { --- -2.47.1 - diff --git a/kafka/stackable/patches/3.7.1/patchable.toml b/kafka/stackable/patches/3.7.1/patchable.toml new file mode 100644 index 000000000..55027684d --- /dev/null +++ b/kafka/stackable/patches/3.7.1/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/kafka.git" +base = "e2494e6ffb89f8288ed2aeb9b5596c755210bffd" diff --git a/kafka/stackable/patches/3.7.2/0001-Add-cyclonedx-plugin.patch b/kafka/stackable/patches/3.7.2/0001-Add-CycloneDX-plugin.patch similarity index 91% rename from kafka/stackable/patches/3.7.2/0001-Add-cyclonedx-plugin.patch rename to kafka/stackable/patches/3.7.2/0001-Add-CycloneDX-plugin.patch index 064eb850f..985e2fdfc 100644 --- a/kafka/stackable/patches/3.7.2/0001-Add-cyclonedx-plugin.patch +++ b/kafka/stackable/patches/3.7.2/0001-Add-CycloneDX-plugin.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 3 Feb 2025 13:54:54 +0100 -Subject: Add cyclonedx-plugin +From 629236f7f9eb4afc6fddceaeb27b4b1c48e13391 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Thu, 17 Oct 2024 11:01:40 +0200 +Subject: Add CycloneDX plugin --- build.gradle | 44 +++++++++++++++++++++++++++++++++++++++++++- @@ -69,8 +69,3 @@ index 3a9914a9cd..398a7c994e 100644 generator project(':generator') } - -base-commit: 79a8f2b5f44f9d5a6867190d1dfc463d08d60b82 --- -2.40.1 - diff --git a/kafka/stackable/patches/3.7.2/0002-Use-stackable-maven-mirror.patch b/kafka/stackable/patches/3.7.2/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 77% rename from kafka/stackable/patches/3.7.2/0002-Use-stackable-maven-mirror.patch rename to kafka/stackable/patches/3.7.2/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index 3d1eaef47..f8089ac11 100644 --- a/kafka/stackable/patches/3.7.2/0002-Use-stackable-maven-mirror.patch +++ b/kafka/stackable/patches/3.7.2/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 3 Feb 2025 13:56:21 +0100 -Subject: Use stackable maven mirror +From 1b71b13c3df17f9112b0f552e7be49c474b9dd40 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 12 Dec 2024 10:09:47 +0100 +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 8 ++++++-- @@ -33,6 +33,3 @@ index 398a7c994e..6fb5efe645 100644 } dependencyUpdates { --- -2.40.1 - diff --git a/kafka/stackable/patches/3.7.2/patchable.toml b/kafka/stackable/patches/3.7.2/patchable.toml index b6081fdaf..250b89189 100644 --- a/kafka/stackable/patches/3.7.2/patchable.toml +++ b/kafka/stackable/patches/3.7.2/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/apache/kafka" +upstream = "https://github.com/apache/kafka.git" base = "79a8f2b5f44f9d5a6867190d1dfc463d08d60b82" diff --git a/kafka/stackable/patches/3.8.1/0001-Add-cyclonedx-plugin.patch b/kafka/stackable/patches/3.8.1/0001-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 91% rename from kafka/stackable/patches/3.8.1/0001-Add-cyclonedx-plugin.patch rename to kafka/stackable/patches/3.8.1/0001-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index aff3492af..7ce534d6c 100644 --- a/kafka/stackable/patches/3.8.1/0001-Add-cyclonedx-plugin.patch +++ b/kafka/stackable/patches/3.8.1/0001-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Thu, 6 Feb 2025 10:21:26 +0100 -Subject: Add cyclonedx plugin +From b03f930266c43347645b4e978fa97cf644b270f8 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 12 Dec 2024 10:09:47 +0100 +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 44 +++++++++++++++++++++++++++++++++++++++++++- @@ -69,8 +69,3 @@ index 8b44b09087..fadb64fda5 100644 generator project(':generator') } - -base-commit: 70d6ff42debf7e17478beb899fb5756bfbdbfbb5 --- -2.40.1 - diff --git a/kafka/stackable/patches/3.8.1/0002-Use-stackable-maven-mirror.patch b/kafka/stackable/patches/3.8.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 77% rename from kafka/stackable/patches/3.8.1/0002-Use-stackable-maven-mirror.patch rename to kafka/stackable/patches/3.8.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index 508cf8508..de47c472f 100644 --- a/kafka/stackable/patches/3.8.1/0002-Use-stackable-maven-mirror.patch +++ b/kafka/stackable/patches/3.8.1/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Thu, 6 Feb 2025 10:25:36 +0100 -Subject: Use stackable maven mirror +From 698bad00ce09a01ee17664f7f3b04209d21603bc Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 12 Dec 2024 10:09:47 +0100 +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 8 ++++++-- @@ -33,6 +33,3 @@ index fadb64fda5..34786c0673 100644 } dependencyUpdates { --- -2.40.1 - diff --git a/kafka/stackable/patches/3.8.1/patchable.toml b/kafka/stackable/patches/3.8.1/patchable.toml index 3b35f9279..b2055980d 100644 --- a/kafka/stackable/patches/3.8.1/patchable.toml +++ b/kafka/stackable/patches/3.8.1/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/apache/kafka" +upstream = "https://github.com/apache/kafka.git" base = "70d6ff42debf7e17478beb899fb5756bfbdbfbb5" diff --git a/kafka/stackable/patches/3.9.0/0001-Add-cyclonedx-plugin.patch b/kafka/stackable/patches/3.9.0/0001-Add-CycloneDX-plugin.patch similarity index 90% rename from kafka/stackable/patches/3.9.0/0001-Add-cyclonedx-plugin.patch rename to kafka/stackable/patches/3.9.0/0001-Add-CycloneDX-plugin.patch index a0e9473c6..87610bda7 100644 --- a/kafka/stackable/patches/3.9.0/0001-Add-cyclonedx-plugin.patch +++ b/kafka/stackable/patches/3.9.0/0001-Add-CycloneDX-plugin.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Wed, 29 Jan 2025 16:50:30 +0100 -Subject: Add cyclonedx-plugin +From f172eee080ac79daf38a039e1f5dd06ad3968ce8 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Thu, 17 Oct 2024 11:01:40 +0200 +Subject: Add CycloneDX plugin --- build.gradle | 42 ++++++++++++++++++++++++++++++++++++++++++ @@ -60,8 +60,3 @@ index 5b064f4203..2d7b442e7b 100644 } ext { - -base-commit: 84caaa6e9da06435411510a81fa321d4f99c351f --- -2.40.1 - diff --git a/kafka/stackable/patches/3.9.0/0002-Use-stackable-maven-mirror.patch b/kafka/stackable/patches/3.9.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch similarity index 77% rename from kafka/stackable/patches/3.9.0/0002-Use-stackable-maven-mirror.patch rename to kafka/stackable/patches/3.9.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch index 3b456d36a..79d30fd7e 100644 --- a/kafka/stackable/patches/3.9.0/0002-Use-stackable-maven-mirror.patch +++ b/kafka/stackable/patches/3.9.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Wed, 29 Jan 2025 17:00:48 +0100 -Subject: Use stackable maven mirror +From 933fb35d713c86e349175176c54fe83b03b491c6 Mon Sep 17 00:00:00 2001 +From: Lars Francke +Date: Thu, 12 Dec 2024 10:09:47 +0100 +Subject: Change Gradle to use the Nexus Build Repo --- build.gradle | 8 ++++++-- @@ -33,6 +33,3 @@ index 2d7b442e7b..3e727be2f0 100644 } dependencyUpdates { --- -2.40.1 - diff --git a/kafka/stackable/patches/3.9.0/patchable.toml b/kafka/stackable/patches/3.9.0/patchable.toml index 8c377193a..df24a8ca7 100644 --- a/kafka/stackable/patches/3.9.0/patchable.toml +++ b/kafka/stackable/patches/3.9.0/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/apache/kafka" +upstream = "https://github.com/apache/kafka.git" base = "84caaa6e9da06435411510a81fa321d4f99c351f" diff --git a/nifi/stackable/patches/1.27.0/001-NIFI-no-zip-assembly-1.27.0.patch b/nifi/stackable/patches/1.27.0/0001-Disable-zip-assembly-to-save-disk-space.patch similarity index 67% rename from nifi/stackable/patches/1.27.0/001-NIFI-no-zip-assembly-1.27.0.patch rename to nifi/stackable/patches/1.27.0/0001-Disable-zip-assembly-to-save-disk-space.patch index 4aeeefbcc..db744bc27 100644 --- a/nifi/stackable/patches/1.27.0/001-NIFI-no-zip-assembly-1.27.0.patch +++ b/nifi/stackable/patches/1.27.0/0001-Disable-zip-assembly-to-save-disk-space.patch @@ -1,3 +1,12 @@ +From 94aa5703e1943de9cfdf58da3b67404f81b44b2f Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Fri, 17 May 2024 15:38:33 +0200 +Subject: Disable zip assembly to save disk space + +--- + nifi-assembly/pom.xml | 1 - + 1 file changed, 1 deletion(-) + diff --git a/nifi-assembly/pom.xml b/nifi-assembly/pom.xml index 08f89da15c..5b95d94c74 100644 --- a/nifi-assembly/pom.xml diff --git a/nifi/stackable/patches/1.28.1/0002-allow-bypassing-check-for-host-header.patch b/nifi/stackable/patches/1.27.0/0002-Allow-bypassing-check-for-host-header.patch similarity index 93% rename from nifi/stackable/patches/1.28.1/0002-allow-bypassing-check-for-host-header.patch rename to nifi/stackable/patches/1.27.0/0002-Allow-bypassing-check-for-host-header.patch index ea042b522..b984fa05c 100644 --- a/nifi/stackable/patches/1.28.1/0002-allow-bypassing-check-for-host-header.patch +++ b/nifi/stackable/patches/1.27.0/0002-Allow-bypassing-check-for-host-header.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 15:19:01 +0100 -Subject: allow bypassing check for host header +From eab66d5ff1db7f32062bc1eab252c19ddbf95797 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?S=C3=B6nke=20Liebau?= +Date: Mon, 24 Jun 2024 09:04:43 +0200 +Subject: Allow bypassing check for host header NiFi has the configuration option 'nifi.web.proxy.host' which controls allowed values for the host header field in any incoming request for the web ui. @@ -60,6 +60,3 @@ index dd4bbf54c0..ea1b5b2da1 100644 } @Override --- -2.40.1 - diff --git a/nifi/stackable/patches/1.27.0/003-cyclonedx-plugin.patch b/nifi/stackable/patches/1.27.0/0003-Add-CycloneDX-plugin.patch similarity index 75% rename from nifi/stackable/patches/1.27.0/003-cyclonedx-plugin.patch rename to nifi/stackable/patches/1.27.0/0003-Add-CycloneDX-plugin.patch index 04ec03ff7..96ca16281 100644 --- a/nifi/stackable/patches/1.27.0/003-cyclonedx-plugin.patch +++ b/nifi/stackable/patches/1.27.0/0003-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 1f6bc60943e4340e3fb77fd00a35cfaab62d12fc Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/pom.xml b/pom.xml -index 0437c2b..0684bd5 100644 +index 0437c2b949..0684bd5a22 100644 --- a/pom.xml +++ b/pom.xml @@ -1090,6 +1090,24 @@ diff --git a/nifi/stackable/patches/1.27.0/004-CVE-2024-36114-bump-aircompressor-0-27.patch b/nifi/stackable/patches/1.27.0/0004-Fix-CVE-2024-36114.patch similarity index 87% rename from nifi/stackable/patches/1.27.0/004-CVE-2024-36114-bump-aircompressor-0-27.patch rename to nifi/stackable/patches/1.27.0/0004-Fix-CVE-2024-36114.patch index aa6663c92..f5b67cb14 100644 --- a/nifi/stackable/patches/1.27.0/004-CVE-2024-36114-bump-aircompressor-0-27.patch +++ b/nifi/stackable/patches/1.27.0/0004-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 90ff88d3a98fbddbd779b1f010059cd4f48d6cc6 Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Tue, 12 Nov 2024 11:54:11 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,9 +21,12 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + pom.xml | 6 ++++++ + 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml -index 0437c2b949..1a06052b3b 100644 +index 0684bd5a22..d5b97acfcf 100644 --- a/pom.xml +++ b/pom.xml @@ -155,6 +155,12 @@ diff --git a/nifi/stackable/patches/1.27.0/patchable.toml b/nifi/stackable/patches/1.27.0/patchable.toml new file mode 100644 index 000000000..15e4563bc --- /dev/null +++ b/nifi/stackable/patches/1.27.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/nifi.git" +base = "e0c4461d90bd4f6e5f2b81765bcff5cd97ed3e18" diff --git a/nifi/stackable/patches/1.28.1/0001-no-zip-assembly.patch b/nifi/stackable/patches/1.28.1/0001-Disable-zip-assembly-to-save-disk-space.patch similarity index 72% rename from nifi/stackable/patches/1.28.1/0001-no-zip-assembly.patch rename to nifi/stackable/patches/1.28.1/0001-Disable-zip-assembly-to-save-disk-space.patch index d98d0559b..c9e5a8673 100644 --- a/nifi/stackable/patches/1.28.1/0001-no-zip-assembly.patch +++ b/nifi/stackable/patches/1.28.1/0001-Disable-zip-assembly-to-save-disk-space.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 15:13:39 +0100 -Subject: no zip assembly +From 2793399fdb0effd09aa682cfeeda855ee0b5b52d Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Fri, 17 May 2024 15:38:33 +0200 +Subject: Disable zip assembly to save disk space --- nifi-assembly/pom.xml | 1 - @@ -19,8 +19,3 @@ index 27928cf67e..d00154626a 100644 - -base-commit: 883338fe28883733417d10f6ffa9319e75f5ea06 --- -2.40.1 - diff --git a/nifi/stackable/patches/1.27.0/002-NIFI-no-host-header-check-1.27.0.patch b/nifi/stackable/patches/1.28.1/0002-Allow-bypassing-check-for-host-header.patch similarity index 89% rename from nifi/stackable/patches/1.27.0/002-NIFI-no-host-header-check-1.27.0.patch rename to nifi/stackable/patches/1.28.1/0002-Allow-bypassing-check-for-host-header.patch index 1af690e30..ce6147f65 100644 --- a/nifi/stackable/patches/1.27.0/002-NIFI-no-host-header-check-1.27.0.patch +++ b/nifi/stackable/patches/1.28.1/0002-Allow-bypassing-check-for-host-header.patch @@ -1,4 +1,8 @@ -Subject: [PATCH] Allow bypassing check for host header. +From 456e42ba95135d5a1174e3f543df07f35473e9b6 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?S=C3=B6nke=20Liebau?= +Date: Mon, 24 Jun 2024 09:04:43 +0200 +Subject: Allow bypassing check for host header + NiFi has the configuration option 'nifi.web.proxy.host' which controls allowed values for the host header field in any incoming request for the web ui. @@ -20,11 +24,9 @@ hostnames by just setting it to "*" and this will effectively bypass the hostnam This allows us to keep the default behavior in place for those users where it works and not remove security features, but also enables users to disable this check if they know what they are doing. --- -Index: nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java -IDEA additional info: -Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP -<+>UTF-8 -=================================================================== + .../org/apache/nifi/web/server/HostHeaderHandler.java | 8 +++++++- + 1 file changed, 7 insertions(+), 1 deletion(-) + diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java index dd4bbf54c0..ea1b5b2da1 100644 --- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java diff --git a/nifi/stackable/patches/1.28.1/0003-add-cyclonedx-plugin.patch b/nifi/stackable/patches/1.28.1/0003-Add-CycloneDX-plugin.patch similarity index 84% rename from nifi/stackable/patches/1.28.1/0003-add-cyclonedx-plugin.patch rename to nifi/stackable/patches/1.28.1/0003-Add-CycloneDX-plugin.patch index fabb944f1..cef05e836 100644 --- a/nifi/stackable/patches/1.28.1/0003-add-cyclonedx-plugin.patch +++ b/nifi/stackable/patches/1.28.1/0003-Add-CycloneDX-plugin.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 15:25:52 +0100 -Subject: add cyclonedx plugin +From a883c6b633311959c323dbd43cd731d4e41532cc Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin --- pom.xml | 18 ++++++++++++++++++ @@ -36,6 +36,3 @@ index 672c023277..641d772286 100644 --- -2.40.1 - diff --git a/nifi/stackable/patches/1.28.1/0004-CVE-2024-36114-bump-aircompressor-to-0.27.patch b/nifi/stackable/patches/1.28.1/0004-Fix-CVE-2024-36114.patch similarity index 90% rename from nifi/stackable/patches/1.28.1/0004-CVE-2024-36114-bump-aircompressor-to-0.27.patch rename to nifi/stackable/patches/1.28.1/0004-Fix-CVE-2024-36114.patch index fc3404c91..27650b8db 100644 --- a/nifi/stackable/patches/1.28.1/0004-CVE-2024-36114-bump-aircompressor-to-0.27.patch +++ b/nifi/stackable/patches/1.28.1/0004-Fix-CVE-2024-36114.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 15:27:01 +0100 -Subject: CVE-2024-36114 bump aircompressor to 0.27 +From 394424a8db4ef6ab4e7a774f4faf2a3bdc3fef35 Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Tue, 12 Nov 2024 11:54:11 +0100 +Subject: Fix CVE-2024-36114 see https://github.com/stackabletech/vulnerabilities/issues/834 @@ -42,6 +42,3 @@ index d00154626a..da38056c7a 100644 javax.servlet javax.servlet-api --- -2.40.1 - diff --git a/nifi/stackable/patches/1.28.1/patchable.toml b/nifi/stackable/patches/1.28.1/patchable.toml index d0d1eb71f..5c9d210be 100644 --- a/nifi/stackable/patches/1.28.1/patchable.toml +++ b/nifi/stackable/patches/1.28.1/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/apache/nifi" +upstream = "https://github.com/apache/nifi.git" base = "883338fe28883733417d10f6ffa9319e75f5ea06" diff --git a/nifi/stackable/patches/2.2.0/0001-no-zip-assembly.patch b/nifi/stackable/patches/2.2.0/0001-Disable-zip-assembly-to-save-disk-space.patch similarity index 76% rename from nifi/stackable/patches/2.2.0/0001-no-zip-assembly.patch rename to nifi/stackable/patches/2.2.0/0001-Disable-zip-assembly-to-save-disk-space.patch index b3e4f708e..25e389653 100644 --- a/nifi/stackable/patches/2.2.0/0001-no-zip-assembly.patch +++ b/nifi/stackable/patches/2.2.0/0001-Disable-zip-assembly-to-save-disk-space.patch @@ -1,7 +1,7 @@ -From afe4e4583747c2972d2590e9c1bd7de8b48aa300 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 17:26:20 +0100 -Subject: no zip assembly +From 450dee471deaad0d154cc722b11054ba5fa3a084 Mon Sep 17 00:00:00 2001 +From: Malte Sander +Date: Fri, 17 May 2024 15:38:33 +0200 +Subject: Disable zip assembly to save disk space --- nifi-assembly/pom.xml | 1 - diff --git a/nifi/stackable/patches/2.2.0/0002-allow-bypassing-check-for-host-header.patch b/nifi/stackable/patches/2.2.0/0002-Allow-bypassing-check-for-host-header.patch similarity index 93% rename from nifi/stackable/patches/2.2.0/0002-allow-bypassing-check-for-host-header.patch rename to nifi/stackable/patches/2.2.0/0002-Allow-bypassing-check-for-host-header.patch index e9acc70c7..caa3cca26 100644 --- a/nifi/stackable/patches/2.2.0/0002-allow-bypassing-check-for-host-header.patch +++ b/nifi/stackable/patches/2.2.0/0002-Allow-bypassing-check-for-host-header.patch @@ -1,7 +1,7 @@ -From f6888b73bf6c8b2889f8f7241cdce6714cd6a776 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 17:28:46 +0100 -Subject: allow bypassing check for host header +From 63c87c2fde2714836fe3f522605ac24a39b2c81f Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?S=C3=B6nke=20Liebau?= +Date: Mon, 24 Jun 2024 09:04:43 +0200 +Subject: Allow bypassing check for host header NiFi has the configuration option 'nifi.web.proxy.host' which controls allowed values for the host header field in any incoming request for the web ui. diff --git a/nifi/stackable/patches/2.2.0/0003-add-cyclonedx-plugin.patch b/nifi/stackable/patches/2.2.0/0003-Add-CycloneDX-plugin.patch similarity index 85% rename from nifi/stackable/patches/2.2.0/0003-add-cyclonedx-plugin.patch rename to nifi/stackable/patches/2.2.0/0003-Add-CycloneDX-plugin.patch index 76ddb2bd5..5e4cd897c 100644 --- a/nifi/stackable/patches/2.2.0/0003-add-cyclonedx-plugin.patch +++ b/nifi/stackable/patches/2.2.0/0003-Add-CycloneDX-plugin.patch @@ -1,7 +1,7 @@ -From fbf66d408aefd995a2ac4a2b213b25a12cb9e96c Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 17 Feb 2025 17:31:17 +0100 -Subject: add cyclonedx plugin +From 5fb37840eb45dc2b159089e75e2d0ef3054ac03d Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin --- pom.xml | 18 ++++++++++++++++++ diff --git a/nifi/stackable/patches/2.2.0/patchable.toml b/nifi/stackable/patches/2.2.0/patchable.toml index 7d7ff6a40..004d95779 100644 --- a/nifi/stackable/patches/2.2.0/patchable.toml +++ b/nifi/stackable/patches/2.2.0/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/apache/nifi" +upstream = "https://github.com/apache/nifi.git" base = "b33ffac8aa10992482f7fa54e6cfccc46a5e8e27" diff --git a/omid/stackable/patches/1.1.0/001-cyclonedx-plugin.patch b/omid/stackable/patches/1.1.0/0001-Add-CycloneDX-plugin.patch similarity index 74% rename from omid/stackable/patches/1.1.0/001-cyclonedx-plugin.patch rename to omid/stackable/patches/1.1.0/0001-Add-CycloneDX-plugin.patch index 1ab7ca109..a5d55124a 100644 --- a/omid/stackable/patches/1.1.0/001-cyclonedx-plugin.patch +++ b/omid/stackable/patches/1.1.0/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From c0faa62430ebcb80b0c2dba2acfbbe072e246ad4 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Sun, 8 Sep 2024 15:50:23 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/pom.xml b/pom.xml -index b7f4e47..1b6a282 100644 +index b7f4e4755..1b6a2825a 100644 --- a/pom.xml +++ b/pom.xml @@ -458,6 +458,24 @@ diff --git a/omid/stackable/patches/1.1.0/patchable.toml b/omid/stackable/patches/1.1.0/patchable.toml new file mode 100644 index 000000000..7441631bd --- /dev/null +++ b/omid/stackable/patches/1.1.0/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/phoenix-omid.git" +base = "3b9e16b7537adbc90a7403507fb8aabd8d1fab0c" diff --git a/omid/stackable/patches/1.1.1/001-cyclonedx-plugin.patch b/omid/stackable/patches/1.1.1/0001-Add-CycloneDX-plugin.patch similarity index 74% rename from omid/stackable/patches/1.1.1/001-cyclonedx-plugin.patch rename to omid/stackable/patches/1.1.1/0001-Add-CycloneDX-plugin.patch index c7f6ca9e9..39e49bf61 100644 --- a/omid/stackable/patches/1.1.1/001-cyclonedx-plugin.patch +++ b/omid/stackable/patches/1.1.1/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 058c598a2729a9f16a5229b9e50ebb6734ede082 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Sun, 8 Sep 2024 15:50:23 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/pom.xml b/pom.xml -index f9dc9e6..f2ab953 100644 +index f9dc9e602..f2ab95317 100644 --- a/pom.xml +++ b/pom.xml @@ -498,6 +498,24 @@ diff --git a/omid/stackable/patches/1.1.1/patchable.toml b/omid/stackable/patches/1.1.1/patchable.toml new file mode 100644 index 000000000..00d6fd49f --- /dev/null +++ b/omid/stackable/patches/1.1.1/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/phoenix-omid.git" +base = "cd546d58d93f380fec9bf65dbfa618f53493f662" diff --git a/omid/stackable/patches/1.1.2/001-cyclonedx-plugin.patch b/omid/stackable/patches/1.1.2/0001-Add-CycloneDX-plugin.patch similarity index 74% rename from omid/stackable/patches/1.1.2/001-cyclonedx-plugin.patch rename to omid/stackable/patches/1.1.2/0001-Add-CycloneDX-plugin.patch index 19e2d0257..91b47f135 100644 --- a/omid/stackable/patches/1.1.2/001-cyclonedx-plugin.patch +++ b/omid/stackable/patches/1.1.2/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 673d11cdf641c71c1d62b692ad68b37c894d4956 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Sun, 8 Sep 2024 15:50:23 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 17 +++++++++++++++++ + 1 file changed, 17 insertions(+) + diff --git a/pom.xml b/pom.xml -index eeaf883..3a62812 100644 +index eeaf883cd..3a6281219 100644 --- a/pom.xml +++ b/pom.xml @@ -563,6 +563,23 @@ diff --git a/omid/stackable/patches/1.1.2/patchable.toml b/omid/stackable/patches/1.1.2/patchable.toml new file mode 100644 index 000000000..1cc068888 --- /dev/null +++ b/omid/stackable/patches/1.1.2/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/phoenix-omid.git" +base = "88812c9e127063f3b3016262f81ea3e8b48ec157" diff --git a/rust/patchable/README.md b/rust/patchable/README.md index a683413d3..8bb56a04d 100644 --- a/rust/patchable/README.md +++ b/rust/patchable/README.md @@ -44,3 +44,7 @@ Instead of creating this manually, run `patchable init`: ```toml cargo patchable init druid 28.0.0 --upstream=https://github.com/apache/druid.git --base=druid-28.0.0 ``` + +## Glossary + +- Images repo/directory - The checkout of stackabletech/docker-images diff --git a/rust/patchable/src/main.rs b/rust/patchable/src/main.rs index e01dba052..06e199542 100644 --- a/rust/patchable/src/main.rs +++ b/rust/patchable/src/main.rs @@ -150,6 +150,23 @@ enum Cmd { #[clap(long)] base: String, }, + + /// Shows the patch directory for a given product version + PatchDir { + #[clap(flatten)] + pv: ProductVersion, + }, + + /// Shows the worktree directory for a given product version + /// + /// This is the same value as `cargo patchable checkout` returns, but does not perform a checkout. + WorktreeDir { + #[clap(flatten)] + pv: ProductVersion, + }, + + /// Shwos the images repository root + ImagesDir, } #[derive(Debug, Snafu)] @@ -181,7 +198,7 @@ pub enum Error { }, #[snafu(display("failed to find images repository"))] - FindImagesRepo { source: git2::Error }, + FindImagesRepo { source: repo::Error }, #[snafu(display("images repository has no work directory"))] NoImagesRepoWorkdir, @@ -246,7 +263,7 @@ fn main() -> Result<()> { .context(ConfigureGitLoggingSnafu)?; let opts = ::parse(); - let images_repo = Repository::discover(".").context(FindImagesRepoSnafu)?; + let images_repo = repo::discover_images_repo(".").context(FindImagesRepoSnafu)?; let images_repo_root = images_repo.workdir().context(NoImagesRepoWorkdirSnafu)?; match opts.cmd { Cmd::Checkout { pv, base_only } => { @@ -415,6 +432,24 @@ fn main() -> Result<()> { "created configuration for product version" ); } + + Cmd::PatchDir { pv } => { + let ctx = ProductVersionContext { + pv, + images_repo_root, + }; + println!("{}", ctx.patch_dir().display()); + } + + Cmd::WorktreeDir { pv } => { + let ctx = ProductVersionContext { + pv, + images_repo_root, + }; + println!("{}", ctx.worktree_root().display()); + } + + Cmd::ImagesDir => println!("{}", images_repo_root.display()), } Ok(()) diff --git a/rust/patchable/src/patch_mail.rs b/rust/patchable/src/patch_mail.rs index 1472addc3..680c32c2b 100644 --- a/rust/patchable/src/patch_mail.rs +++ b/rust/patchable/src/patch_mail.rs @@ -80,6 +80,8 @@ pub fn mailsplit(repo: &Repository, patch_file: &Path) -> Result is ignored anyway, so there's no point requiring it + .arg("-b") // mailsplit doesn't accept split arguments ("-o dir") .arg({ let mut output_arg = OsString::from("-o"); diff --git a/rust/patchable/src/repo.rs b/rust/patchable/src/repo.rs index f7efd2459..1b41f726c 100644 --- a/rust/patchable/src/repo.rs +++ b/rust/patchable/src/repo.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; use git2::{ FetchOptions, ObjectType, Oid, RemoteCallbacks, Repository, RepositoryInitOptions, @@ -19,6 +19,19 @@ pub enum Error { #[snafu(display("failed to open repository at {path:?}"))] Open { source: git2::Error, path: PathBuf }, + #[snafu(display("failed to absolutize path {path:?}"))] + Absolutize { + source: std::io::Error, + path: PathBuf, + }, + #[snafu(display("no .patchable file found in parent of {path:?}"))] + NoDotPatchableFound { path: PathBuf }, + #[snafu(display("failed to check if {path:?} contains a .patchable file"))] + CheckDotPatchable { + source: std::io::Error, + path: PathBuf, + }, + #[snafu(display( "failed to create worktree branch {branch:?} pointing at {commit} in {repo}" ))] @@ -295,3 +308,23 @@ pub fn ensure_worktree_is_at( }), } } + +/// Try to find the Git images repository in a parent directory of `path` (or `path` itself). +/// +/// The repository is detected by that it contains a file named `.patchable`. +pub fn discover_images_repo(path: impl AsRef) -> Result { + let full_path = path::absolute(&path).context(AbsolutizeSnafu { + path: path.as_ref(), + })?; + let mut path: &Path = &full_path; + loop { + match path.join(".patchable").try_exists() { + Ok(true) => break Repository::open(path).context(OpenSnafu { path }), + Ok(false) => match path.parent() { + Some(p) => path = p, + None => break NoDotPatchableFoundSnafu { path: full_path }.fail(), + }, + Err(err) => break Err(err).context(CheckDotPatchableSnafu { path }), + } + } +} diff --git a/spark-k8s/stackable/patches/3.5.2/001-cyclonedx-plugin.patch b/spark-k8s/stackable/patches/3.5.1/0001-Update-CycloneDX-plugin.patch similarity index 69% rename from spark-k8s/stackable/patches/3.5.2/001-cyclonedx-plugin.patch rename to spark-k8s/stackable/patches/3.5.1/0001-Update-CycloneDX-plugin.patch index 06989b142..b0cbf6b65 100644 --- a/spark-k8s/stackable/patches/3.5.2/001-cyclonedx-plugin.patch +++ b/spark-k8s/stackable/patches/3.5.1/0001-Update-CycloneDX-plugin.patch @@ -1,5 +1,15 @@ +From 08b7c02a497b8b3b70616281f810a898b1719a78 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Update CycloneDX plugin + +--- + dev/make-distribution.sh | 1 - + pom.xml | 7 ++++++- + 2 files changed, 6 insertions(+), 2 deletions(-) + diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh -index ef7c010..0f4c1c7 100755 +index ef7c010e93..0f4c1c74e4 100755 --- a/dev/make-distribution.sh +++ b/dev/make-distribution.sh @@ -171,7 +171,6 @@ BUILD_COMMAND=("$MVN" clean package \ @@ -11,7 +21,7 @@ index ef7c010..0f4c1c7 100755 # Actually build the jar diff --git a/pom.xml b/pom.xml -index 0f504db..6cca7db 100644 +index 0f504dbee8..6cca7db7bf 100644 --- a/pom.xml +++ b/pom.xml @@ -3482,7 +3482,12 @@ diff --git a/spark-k8s/stackable/patches/3.5.1/002-CVE-2024-36114-upgrade-aircompressor.patch b/spark-k8s/stackable/patches/3.5.1/0002-Fix-CVE-2024-36114.patch similarity index 83% rename from spark-k8s/stackable/patches/3.5.1/002-CVE-2024-36114-upgrade-aircompressor.patch rename to spark-k8s/stackable/patches/3.5.1/0002-Fix-CVE-2024-36114.patch index 29247f97c..e38fd4825 100644 --- a/spark-k8s/stackable/patches/3.5.1/002-CVE-2024-36114-upgrade-aircompressor.patch +++ b/spark-k8s/stackable/patches/3.5.1/0002-Fix-CVE-2024-36114.patch @@ -1,4 +1,8 @@ -Fix CVE-2024-36114 +From 3892892d934387d20c6d8fd45f126e054bef55b8 Mon Sep 17 00:00:00 2001 +From: Siegfried Weber +Date: Mon, 11 Nov 2024 10:00:15 +0100 +Subject: Fix CVE-2024-36114 + see https://github.com/stackabletech/vulnerabilities/issues/834 Aircompressor is a library with ports of the Snappy, LZO, LZ4, and @@ -17,9 +21,12 @@ have been fixed. When decompressing data from untrusted users, this can be exploited for a denial-of-service attack by crashing the JVM, or to leak other sensitive information from the Java process. There are no known workarounds for this issue. +--- + pom.xml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml -index 6cca7db7bf8..fb9254e0cd9 100644 +index 6cca7db7bf..fb9254e0cd 100644 --- a/pom.xml +++ b/pom.xml @@ -2558,7 +2558,7 @@ diff --git a/spark-k8s/stackable/patches/3.5.1/patchable.toml b/spark-k8s/stackable/patches/3.5.1/patchable.toml new file mode 100644 index 000000000..feaeca0cb --- /dev/null +++ b/spark-k8s/stackable/patches/3.5.1/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/spark.git" +base = "fd86f85e181fc2dc0f50a096855acf83a6cc5d9c" diff --git a/spark-k8s/stackable/patches/3.5.1/001-cyclonedx-plugin.patch b/spark-k8s/stackable/patches/3.5.2/0001-Update-CycloneDX-plugin.patch similarity index 67% rename from spark-k8s/stackable/patches/3.5.1/001-cyclonedx-plugin.patch rename to spark-k8s/stackable/patches/3.5.2/0001-Update-CycloneDX-plugin.patch index 06989b142..079885f0b 100644 --- a/spark-k8s/stackable/patches/3.5.1/001-cyclonedx-plugin.patch +++ b/spark-k8s/stackable/patches/3.5.2/0001-Update-CycloneDX-plugin.patch @@ -1,5 +1,15 @@ +From 2f95ba96e5894cfd07eca25aef5968e6a6d543fd Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Update CycloneDX plugin + +--- + dev/make-distribution.sh | 1 - + pom.xml | 7 ++++++- + 2 files changed, 6 insertions(+), 2 deletions(-) + diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh -index ef7c010..0f4c1c7 100755 +index ef7c010e930..0f4c1c74e40 100755 --- a/dev/make-distribution.sh +++ b/dev/make-distribution.sh @@ -171,7 +171,6 @@ BUILD_COMMAND=("$MVN" clean package \ @@ -11,10 +21,10 @@ index ef7c010..0f4c1c7 100755 # Actually build the jar diff --git a/pom.xml b/pom.xml -index 0f504db..6cca7db 100644 +index 8fe98c35846..36a800162da 100644 --- a/pom.xml +++ b/pom.xml -@@ -3482,7 +3482,12 @@ +@@ -3513,7 +3513,12 @@ org.cyclonedx cyclonedx-maven-plugin diff --git a/spark-k8s/stackable/patches/3.5.2/patchable.toml b/spark-k8s/stackable/patches/3.5.2/patchable.toml new file mode 100644 index 000000000..a8a860258 --- /dev/null +++ b/spark-k8s/stackable/patches/3.5.2/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/spark.git" +base = "bb7846dd487f259994fdc69e18e03382e3f64f42" diff --git a/superset/stackable/patches/4.0.2/patchable.toml b/superset/stackable/patches/4.0.2/patchable.toml new file mode 100644 index 000000000..739bf4b8b --- /dev/null +++ b/superset/stackable/patches/4.0.2/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/superset.git" +base = "f11fa091e261a35f4d39d8567a859fad07547d84" diff --git a/superset/stackable/patches/4.1.1/patchable.toml b/superset/stackable/patches/4.1.1/patchable.toml new file mode 100644 index 000000000..60c0eb205 --- /dev/null +++ b/superset/stackable/patches/4.1.1/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/superset.git" +base = "6264ff516532f0359d914bd72356f2007925109b" diff --git a/trino-storage-connector/stackable/patches/451/001-cyclonedx-plugin.patch b/trino-storage-connector/stackable/patches/451/0001-Add-CycloneDX-plugin.patch similarity index 77% rename from trino-storage-connector/stackable/patches/451/001-cyclonedx-plugin.patch rename to trino-storage-connector/stackable/patches/451/0001-Add-CycloneDX-plugin.patch index 390ae15b1..df87686e5 100644 --- a/trino-storage-connector/stackable/patches/451/001-cyclonedx-plugin.patch +++ b/trino-storage-connector/stackable/patches/451/0001-Add-CycloneDX-plugin.patch @@ -1,3 +1,12 @@ +From 6af1b7fa0f62a4413f1fc072a46ce1be3594d7bc Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 17 +++++++++++++++++ + 1 file changed, 17 insertions(+) + diff --git a/pom.xml b/pom.xml index 7304dac..5ba854d 100644 --- a/pom.xml diff --git a/trino-storage-connector/stackable/patches/451/patchable.toml b/trino-storage-connector/stackable/patches/451/patchable.toml new file mode 100644 index 000000000..eed17e752 --- /dev/null +++ b/trino-storage-connector/stackable/patches/451/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/snowlift/trino-storage.git" +base = "b6e5825bb84a4f1a3f89ff45ea39ce349313f60a" diff --git a/trino-storage-connector/stackable/patches/455/001-cyclonedx-plugin.patch b/trino-storage-connector/stackable/patches/455/0001-Add-CycloneDX-plugin.patch similarity index 77% rename from trino-storage-connector/stackable/patches/455/001-cyclonedx-plugin.patch rename to trino-storage-connector/stackable/patches/455/0001-Add-CycloneDX-plugin.patch index 387997d5d..9513dbbc1 100644 --- a/trino-storage-connector/stackable/patches/455/001-cyclonedx-plugin.patch +++ b/trino-storage-connector/stackable/patches/455/0001-Add-CycloneDX-plugin.patch @@ -1,3 +1,12 @@ +From 4d6d55cbc2e05d9b64186885da86ac367070d0f1 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 17 +++++++++++++++++ + 1 file changed, 17 insertions(+) + diff --git a/pom.xml b/pom.xml index 6471642..fc0f376 100644 --- a/pom.xml diff --git a/trino-storage-connector/stackable/patches/455/patchable.toml b/trino-storage-connector/stackable/patches/455/patchable.toml new file mode 100644 index 000000000..094a7ef26 --- /dev/null +++ b/trino-storage-connector/stackable/patches/455/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/snowlift/trino-storage.git" +base = "869a735d8be527117a19150e161ad8ca69317578" diff --git a/trino-storage-connector/stackable/patches/470/0001-Add-cyclonedx-plugin.patch b/trino-storage-connector/stackable/patches/470/0001-Add-CycloneDX-plugin.patch similarity index 80% rename from trino-storage-connector/stackable/patches/470/0001-Add-cyclonedx-plugin.patch rename to trino-storage-connector/stackable/patches/470/0001-Add-CycloneDX-plugin.patch index 5cbb7e969..938a026bf 100644 --- a/trino-storage-connector/stackable/patches/470/0001-Add-cyclonedx-plugin.patch +++ b/trino-storage-connector/stackable/patches/470/0001-Add-CycloneDX-plugin.patch @@ -1,7 +1,7 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 10 Feb 2025 12:20:07 +0100 -Subject: Add cyclonedx plugin +From 1f5f2f18056f650b89f0399c188e3446975e1764 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin --- pom.xml | 17 +++++++++++++++++ @@ -35,8 +35,3 @@ index ddd620e..bbfcb96 100644 - -base-commit: 1b25d617940f14a844a43ee34aa705f7d11fbaf9 --- -2.40.1 - diff --git a/trino-storage-connector/stackable/patches/470/patchable.toml b/trino-storage-connector/stackable/patches/470/patchable.toml index 8e4c218de..d5ec973f5 100644 --- a/trino-storage-connector/stackable/patches/470/patchable.toml +++ b/trino-storage-connector/stackable/patches/470/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/snowlift/trino-storage" +upstream = "https://github.com/snowlift/trino-storage.git" base = "1b25d617940f14a844a43ee34aa705f7d11fbaf9" diff --git a/trino/stackable/patches/451/001-cyclonedx-plugin.patch b/trino/stackable/patches/451/0001-Add-CycloneDX-plugin.patch similarity index 75% rename from trino/stackable/patches/451/001-cyclonedx-plugin.patch rename to trino/stackable/patches/451/0001-Add-CycloneDX-plugin.patch index 1bd3312ad..809cee2a5 100644 --- a/trino/stackable/patches/451/001-cyclonedx-plugin.patch +++ b/trino/stackable/patches/451/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From cbad32c6095469bb7c15e45d3b740ee35db47253 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 19 +++++++++++++++++++ + 1 file changed, 19 insertions(+) + diff --git a/pom.xml b/pom.xml -index 1c13fda..ceabf34 100644 +index 1c13fdab898..ceabf345e96 100644 --- a/pom.xml +++ b/pom.xml @@ -2642,6 +2642,25 @@ diff --git a/trino/stackable/patches/451/patchable.toml b/trino/stackable/patches/451/patchable.toml new file mode 100644 index 000000000..1bb3feba6 --- /dev/null +++ b/trino/stackable/patches/451/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/trinodb/trino.git" +base = "2c974f7cb1d71e1f9f466941a317190a474fc432" diff --git a/trino/stackable/patches/455/001-cyclonedx-plugin.patch b/trino/stackable/patches/455/0001-Add-CycloneDX-plugin.patch similarity index 75% rename from trino/stackable/patches/455/001-cyclonedx-plugin.patch rename to trino/stackable/patches/455/0001-Add-CycloneDX-plugin.patch index acbe7ea11..b2a353922 100644 --- a/trino/stackable/patches/455/001-cyclonedx-plugin.patch +++ b/trino/stackable/patches/455/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From c91df97de99bfaf1d1740374170d6cb53e24b0af Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + diff --git a/pom.xml b/pom.xml -index 41ca158..f2ed3f5 100644 +index 41ca1580021..f2ed3f54f40 100644 --- a/pom.xml +++ b/pom.xml @@ -2681,6 +2681,24 @@ diff --git a/trino/stackable/patches/455/patchable.toml b/trino/stackable/patches/455/patchable.toml new file mode 100644 index 000000000..39118758d --- /dev/null +++ b/trino/stackable/patches/455/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/trinodb/trino.git" +base = "e212460ea0aa663f0de9b16fecd480c4ad6490cc" diff --git a/trino/stackable/patches/470/0001-Add-cyclonedx-plugin.patch b/trino/stackable/patches/470/0001-Add-CycloneDX-plugin.patch similarity index 78% rename from trino/stackable/patches/470/0001-Add-cyclonedx-plugin.patch rename to trino/stackable/patches/470/0001-Add-CycloneDX-plugin.patch index 7c97791d4..cfce1af20 100644 --- a/trino/stackable/patches/470/0001-Add-cyclonedx-plugin.patch +++ b/trino/stackable/patches/470/0001-Add-CycloneDX-plugin.patch @@ -1,14 +1,14 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Nick Larsen -Date: Mon, 10 Feb 2025 12:16:43 +0100 -Subject: Add cyclonedx plugin +From 88bcb8d5ff689504f112b8b149a282a01e6b4212 Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin --- pom.xml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pom.xml b/pom.xml -index a1604d5ebe..3f9f794504 100644 +index a1604d5ebec..3f9f7945046 100644 --- a/pom.xml +++ b/pom.xml @@ -2814,6 +2814,24 @@ @@ -36,8 +36,3 @@ index a1604d5ebe..3f9f794504 100644 - -base-commit: 05bc059cf0c9263e4ee8be2c1ad69753d0dd4faf --- -2.40.1 - diff --git a/trino/stackable/patches/470/patchable.toml b/trino/stackable/patches/470/patchable.toml index 840233beb..42b04bed4 100644 --- a/trino/stackable/patches/470/patchable.toml +++ b/trino/stackable/patches/470/patchable.toml @@ -1,2 +1,2 @@ -upstream = "https://github.com/trinodb/trino" +upstream = "https://github.com/trinodb/trino.git" base = "05bc059cf0c9263e4ee8be2c1ad69753d0dd4faf" diff --git a/zookeeper/stackable/patches/3.9.2/001-cyclonedx-plugin.patch b/zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch similarity index 71% rename from zookeeper/stackable/patches/3.9.2/001-cyclonedx-plugin.patch rename to zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch index 0bcb0dd59..a37700a52 100644 --- a/zookeeper/stackable/patches/3.9.2/001-cyclonedx-plugin.patch +++ b/zookeeper/stackable/patches/3.9.2/0001-Add-CycloneDX-plugin.patch @@ -1,5 +1,14 @@ +From 0ac6dd909cbcb2d6d16ec7120ad65d5874ea0e8e Mon Sep 17 00:00:00 2001 +From: Lukas Voetmand +Date: Fri, 6 Sep 2024 17:53:52 +0200 +Subject: Add CycloneDX plugin + +--- + pom.xml | 7 ++++++- + 1 file changed, 6 insertions(+), 1 deletion(-) + diff --git a/pom.xml b/pom.xml -index 743b87f..3873e40 100644 +index 743b87f7..3873e403 100644 --- a/pom.xml +++ b/pom.xml @@ -925,7 +925,7 @@ diff --git a/zookeeper/stackable/patches/3.9.2/patchable.toml b/zookeeper/stackable/patches/3.9.2/patchable.toml new file mode 100644 index 000000000..f3ebf6062 --- /dev/null +++ b/zookeeper/stackable/patches/3.9.2/patchable.toml @@ -0,0 +1,2 @@ +upstream = "https://github.com/apache/zookeeper.git" +base = "e454e8c7283100c7caec6dcae2bc82aaecb63023"