From fa282e5041e78f018d5d667bac15dd866947a07c Mon Sep 17 00:00:00 2001 From: Rob Voyer Date: Thu, 24 Jan 2019 14:44:06 -0500 Subject: [PATCH 1/8] update dependencies --- phoenix-assembly/pom.xml | 2 +- phoenix-client/pom.xml | 2 +- phoenix-core/pom.xml | 6 +- phoenix-flume/pom.xml | 2 +- phoenix-hive/pom.xml | 2 +- phoenix-kafka/pom.xml | 2 +- phoenix-load-balancer/pom.xml | 2 +- phoenix-pherf/pom.xml | 2 +- phoenix-pig/pom.xml | 2 +- phoenix-queryserver-client/pom.xml | 2 +- phoenix-queryserver/pom.xml | 2 +- phoenix-server/pom.xml | 2 +- phoenix-spark/pom.xml | 2 +- phoenix-tracing-webapp/pom.xml | 2 +- pom.xml | 177 ++++++++++++++++++++++++++--- 15 files changed, 182 insertions(+), 27 deletions(-) diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index 77b1b83b11e..1f6826ddfbc 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-assembly Phoenix Assembly diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml index 7b53483e2bb..fb27d1e429c 100644 --- a/phoenix-client/pom.xml +++ b/phoenix-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-client Phoenix Client diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml index 96610fd9ae6..4d005615edd 100644 --- a/phoenix-core/pom.xml +++ b/phoenix-core/pom.xml @@ -4,7 +4,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-core Phoenix Core @@ -386,6 +386,10 @@ xom xom + + org.glassfish + javax.el + diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml index affd5ceb812..fec11a33fc6 100644 --- a/phoenix-flume/pom.xml +++ b/phoenix-flume/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-flume Phoenix - Flume diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml index d2c995afc71..c219ffe784e 100644 --- a/phoenix-hive/pom.xml +++ b/phoenix-hive/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-hive Phoenix - Hive diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml index 8abf6fe07ee..989ecf0036a 100644 --- a/phoenix-kafka/pom.xml +++ b/phoenix-kafka/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-kafka Phoenix - Kafka diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml index 53a370ef271..99d5b02c7cb 100644 --- a/phoenix-load-balancer/pom.xml +++ b/phoenix-load-balancer/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-load-balancer Phoenix Load Balancer diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml index b5d563133dd..32b7d59cedf 100644 --- a/phoenix-pherf/pom.xml +++ b/phoenix-pherf/pom.xml @@ -15,7 +15,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-pherf diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml index 90a8b9d994f..0984b02dcc3 100644 --- a/phoenix-pig/pom.xml +++ b/phoenix-pig/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-pig Phoenix - Pig diff --git a/phoenix-queryserver-client/pom.xml b/phoenix-queryserver-client/pom.xml index bddd2ac99ce..e348bf56409 100644 --- a/phoenix-queryserver-client/pom.xml +++ b/phoenix-queryserver-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-queryserver-client Phoenix Query Server Client diff --git a/phoenix-queryserver/pom.xml b/phoenix-queryserver/pom.xml index 7ab499d54dd..78747b5e496 100644 --- a/phoenix-queryserver/pom.xml +++ b/phoenix-queryserver/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-queryserver Phoenix Query Server diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml index aa234b7ea69..40917acd35f 100644 --- a/phoenix-server/pom.xml +++ b/phoenix-server/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-server Phoenix Server diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 8bb089537c8..fc40d9c605a 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -28,7 +28,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-spark Phoenix - Spark diff --git a/phoenix-tracing-webapp/pom.xml b/phoenix-tracing-webapp/pom.xml index 45032b44f43..9d26ed25af1 100755 --- a/phoenix-tracing-webapp/pom.xml +++ b/phoenix-tracing-webapp/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 phoenix-tracing-webapp diff --git a/pom.xml b/pom.xml index 210d80ead8e..4d1c4da737d 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 org.apache.phoenix phoenix - 5.0.0-HBase-2.0 + 5.0.0-HBase-2.0-cdh6.0.1 pom Apache Phoenix A SQL layer over HBase @@ -44,6 +44,19 @@ apache release https://repository.apache.org/content/repositories/releases/ + + cloudera + https://repository.cloudera.com/artifactory/cloudera-repos/ + + + cdh.releases.repo + https://repository.cloudera.com/content/groups/cdh-releases-rcs + CDH Releases Repository + + false + + + @@ -65,33 +78,164 @@ true ${project.basedir} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - 2.0.0 - 3.0.0 + 2.0.0-cdh6.0.1 + 3.0.0-cdh6.0.1 1.4 - 3.0.0 - 0.13.0 - 1.9.2 + 2.1.1-cdh6.0.1 + 0.17.0-cdh6.0.1 + 1.9.13 3.5.2 1.2.17 3.3.6 - 1.6.4 + 1.7.25 2.5.0 - 2.1 - 2.5 + 2.6 + 2.6 1.2 1.0 1.2.0 13.0.1 - 1.4.0 - 0.9.0.0 + 1.8.0-cdh6.0.1 + 1.0.1-cdh6.0.1 1.3.9-1 1.0-1 2.11 0.3 - 1.7 + 1.9 3.1.0-incubating 3.2.2 @@ -100,7 +244,7 @@ 1.11.0 9.3.19.v20170502 0.14.0-incubating - 2.3.0 + 2.2.0-cdh6.0.1 2.11.8 2.11 2.9.5 @@ -548,6 +692,13 @@ + + + + + + + org.apache.hbase hbase-annotations From d12b6c491298e03e8c014835b3af4e78cf122e88 Mon Sep 17 00:00:00 2001 From: Ankit Singhal Date: Mon, 30 Jul 2018 16:52:21 -0400 Subject: [PATCH 2/8] PHOENIX-4826 Changes to support HBase 2.0.1 (cherry picked from commit a4f93eb458c516206cc3ed25978fb025d752a2a7) --- .../covered/data/DelegateComparator.java | 83 +++++++++++++++++++ .../index/covered/data/IndexMemStore.java | 6 +- .../index/covered/data/TestIndexMemStore.java | 6 +- pom.xml | 18 ++++ 4 files changed, 107 insertions(+), 6 deletions(-) create mode 100644 phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/DelegateComparator.java diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/DelegateComparator.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/DelegateComparator.java new file mode 100644 index 00000000000..478d98bf2cc --- /dev/null +++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/DelegateComparator.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.hbase.index.covered.data; + +import java.util.Comparator; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; + +public class DelegateComparator implements CellComparator { + + private CellComparator delegate; + + public DelegateComparator(CellComparator delegate) { + this.delegate=delegate; + } + + @Override + public int compare(Cell leftCell, Cell rightCell) { + return delegate.compare(leftCell, rightCell); + } + + @Override + public int compareRows(Cell leftCell, Cell rightCell) { + return delegate.compareRows(leftCell, rightCell); + } + + @Override + public int compareRows(Cell cell, byte[] bytes, int offset, int length) { + return delegate.compareRows(cell, bytes, offset, length); + } + + @Override + public int compareWithoutRow(Cell leftCell, Cell rightCell) { + return delegate.compareWithoutRow(leftCell, rightCell); + } + + @Override + public int compareFamilies(Cell leftCell, Cell rightCell) { + return delegate.compareFamilies(leftCell, rightCell); + } + + @Override + public int compareQualifiers(Cell leftCell, Cell rightCell) { + return delegate.compareQualifiers(leftCell, rightCell); + } + + @Override + public int compareTimestamps(Cell leftCell, Cell rightCell) { + return delegate.compareTimestamps(leftCell, rightCell); + } + + @Override + public int compareTimestamps(long leftCellts, long rightCellts) { + return delegate.compareTimestamps(leftCellts, rightCellts); + } + + @Override + public int compare(Cell leftCell, Cell rightCell, boolean ignoreSequenceid) { + return delegate.compare(leftCell, rightCell, ignoreSequenceid); + } + + @Override + public Comparator getSimpleComparator() { + return delegate.getSimpleComparator(); + } + +} diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/IndexMemStore.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/IndexMemStore.java index 824749678b8..301d8257c39 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/IndexMemStore.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/IndexMemStore.java @@ -79,10 +79,10 @@ public class IndexMemStore implements KeyValueStore { private CellComparator comparator; public IndexMemStore() { - this(new CellComparatorImpl(){ + this(new DelegateComparator(new CellComparatorImpl()){ @Override - public int compare(Cell a, Cell b) { - return super.compare(a, b, true); + public int compare(Cell leftCell, Cell rightCell) { + return super.compare(leftCell, rightCell, true); } }); } diff --git a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java index 0f5f995a00e..e40cdd7f3c4 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java @@ -39,10 +39,10 @@ public class TestIndexMemStore { @Test public void testCorrectOverwritting() throws Exception { - IndexMemStore store = new IndexMemStore(new CellComparatorImpl(){ + IndexMemStore store = new IndexMemStore(new DelegateComparator(new CellComparatorImpl()){ @Override - public int compare(Cell a, Cell b) { - return super.compare(a, b, true); + public int compare(Cell leftCell, Cell rightCell) { + return super.compare(leftCell, rightCell, true); } }); long ts = 10; diff --git a/pom.xml b/pom.xml index 4d1c4da737d..5e1f04284ed 100644 --- a/pom.xml +++ b/pom.xml @@ -464,6 +464,24 @@ verify + + SplitSystemCatalogTests + + UTF-8 + ${numForkedIT} + alphabetical + false + -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ + ${test.output.tofile} + ${basedir}/src/it/java + org.apache.phoenix.end2end.SplitSystemCatalogTests + kill + + + integration-test + verify + + From e0929ce3892a3f3c89d2eea5e8bf333f261c7476 Mon Sep 17 00:00:00 2001 From: Rob Voyer Date: Thu, 24 Jan 2019 16:55:24 -0500 Subject: [PATCH 3/8] reverting a couple dependency changes that don't work and we won't fix for now --- pom.xml | 140 +++----------------------------------------------------- 1 file changed, 6 insertions(+), 134 deletions(-) diff --git a/pom.xml b/pom.xml index 5e1f04284ed..b14433acea5 100644 --- a/pom.xml +++ b/pom.xml @@ -78,145 +78,16 @@ true ${project.basedir} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 2.0.0-cdh6.0.1 3.0.0-cdh6.0.1 1.4 - 2.1.1-cdh6.0.1 - 0.17.0-cdh6.0.1 + + 3.0.0 + + 0.13.0 1.9.13 3.5.2 1.2.17 @@ -230,7 +101,8 @@ 1.2.0 13.0.1 1.8.0-cdh6.0.1 - 1.0.1-cdh6.0.1 + + 0.9.0.0 1.3.9-1 1.0-1 2.11 From a7fad83570e5d776ee05cb3f27b3a150418f8fff Mon Sep 17 00:00:00 2001 From: dmilan77 Date: Mon, 7 Jan 2019 12:49:06 -0500 Subject: [PATCH 4/8] parcel added --- 1 | 1087 +++++++++++++++++ .../components/all-common-dependencies.xml | 56 + .../src/build/components/all-common-files.xml | 84 ++ .../src/build/components/all-common-jars.xml | 217 ++++ .../src/build/manifest/make_manifest.py | 117 ++ phoenix-parcel/src/build/parcel.xml | 40 + .../src/parcel/bin/phoenix-performance.py | 39 + phoenix-parcel/src/parcel/bin/phoenix-psql.py | 39 + .../src/parcel/bin/phoenix-sqlline.py | 40 + .../src/parcel/bin/phoenix-utils.py | 39 + .../parcel/cloudera/cdh_version.properties | 19 + .../src/parcel/meta/alternatives.json | 26 + phoenix-parcel/src/parcel/meta/parcel.json | 34 + phoenix-parcel/src/parcel/meta/phoenix_env.sh | 47 + pom.xml | 1 + 15 files changed, 1885 insertions(+) create mode 100644 1 create mode 100644 phoenix-parcel/src/build/components/all-common-dependencies.xml create mode 100644 phoenix-parcel/src/build/components/all-common-files.xml create mode 100644 phoenix-parcel/src/build/components/all-common-jars.xml create mode 100644 phoenix-parcel/src/build/manifest/make_manifest.py create mode 100644 phoenix-parcel/src/build/parcel.xml create mode 100644 phoenix-parcel/src/parcel/bin/phoenix-performance.py create mode 100644 phoenix-parcel/src/parcel/bin/phoenix-psql.py create mode 100644 phoenix-parcel/src/parcel/bin/phoenix-sqlline.py create mode 100644 phoenix-parcel/src/parcel/bin/phoenix-utils.py create mode 100644 phoenix-parcel/src/parcel/cloudera/cdh_version.properties create mode 100644 phoenix-parcel/src/parcel/meta/alternatives.json create mode 100644 phoenix-parcel/src/parcel/meta/parcel.json create mode 100644 phoenix-parcel/src/parcel/meta/phoenix_env.sh diff --git a/1 b/1 new file mode 100644 index 00000000000..07799026921 --- /dev/null +++ b/1 @@ -0,0 +1,1087 @@ + + 4.0.0 + org.apache.phoenix + phoenix + CDH-6.0.1-HBase-2.0-SNAPSHOT + pom + Apache Phoenix + A SQL layer over HBase + + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + Apache Software Foundation + http://www.apache.org + + + + phoenix-core + phoenix-flume + phoenix-kafka + phoenix-pig + phoenix-queryserver-client + phoenix-queryserver + phoenix-pherf + phoenix-spark + phoenix-hive + phoenix-client + phoenix-server + phoenix-assembly + phoenix-tracing-webapp + phoenix-load-balancer + + + + + apache release + https://repository.apache.org/content/repositories/releases/ + + + cloudera + https://repository.cloudera.com/artifactory/cloudera-repos/ + + + + + org.apache + apache + 14 + + + + scm:git:http://git-wip-us.apache.org/repos/asf/phoenix.git + https://git-wip-us.apache.org/repos/asf/phoenix.git + scm:git:https://git-wip-us.apache.org/repos/asf/phoenix.git + + + + + src/main/antlr3 + target/generated-sources/antlr3 + true + ${project.basedir} + + + 2.0.0-cdh6.0.1 + 3.0.0-cdh6.0.1 + + + 1.4 + 3.0.0 + 0.13.0 + 1.9.2 + 3.5.2 + 1.2.17 + 3.3.6 + 1.6.4 + 2.5.0 + 2.1 + 3.8 + 1.2 + 1.0 + 1.2.0 + 13.0.1 + 1.4.0 + 0.9.0.0 + 1.3.9-1 + 1.0-1 + 2.11 + 0.3 + 1.7 + 3.1.0-incubating + 3.2.2 + + 1.6 + 2.1.2 + 1.12.0 + 9.3.19.v20170502 + 0.15.0-incubating + 1.0.0 + 2.3.0 + 2.11.8 + 2.11 + 2.9.5 + 1.0.4 + 3.1.0 + + 1.8.5 + 4.12 + 1.46 + + + 2.9 + 1.9.1 + 2.20 + 2.20 + + 2.1 + 2.5.2 + + + 8 + 7 + false + false + + + UTF-8 + UTF-8 + 4.0.0 + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.0 + + 1.8 + 1.8 + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.antlr + antlr3-maven-plugin + [3.5,) + + antlr + + + + + + + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.5.2 + + + org.apache.maven.plugins + maven-eclipse-plugin + ${maven-eclipse-plugin.version} + + + maven-assembly-plugin + ${maven.assembly.version} + + + org.apache.rat + apache-rat-plugin + + + + + org.codehaus.mojo + build-helper-maven-plugin + ${maven-build-helper-plugin.version} + + + add-test-source + validate + + add-test-source + + + + ${basedir}/src/it/java + + + + + add-test-resource + validate + + add-test-resource + + + + + ${basedir}/src/it/resources + + + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + + ParallelStatsEnabledTest + + UTF-8 + ${numForkedIT} + alphabetical + true + alphabetical + + -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. + ${test.output.tofile} + kill + ${basedir}/src/it/java + org.apache.phoenix.end2end.ParallelStatsEnabledTest + + + integration-test + verify + + + + ParallelStatsDisabledTest + + UTF-8 + ${numForkedIT} + alphabetical + true + alphabetical + + + + -Xmx3000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. + ${test.output.tofile} + kill + ${basedir}/src/it/java + org.apache.phoenix.end2end.ParallelStatsDisabledTest + + + integration-test + verify + + + + HBaseManagedTimeTests + + UTF-8 + ${numForkedIT} + alphabetical + true + -enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. + ${test.output.tofile} + ${basedir}/src/it/java + org.apache.phoenix.end2end.HBaseManagedTimeTest + kill + + + integration-test + verify + + + + NeedTheirOwnClusterTests + + UTF-8 + ${numForkedIT} + alphabetical + false + -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. + ${test.output.tofile} + ${basedir}/src/it/java + org.apache.phoenix.end2end.NeedsOwnMiniClusterTest + kill + + + integration-test + verify + + + + SplitSystemCatalogTests + + UTF-8 + ${numForkedIT} + alphabetical + false + -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ + ${test.output.tofile} + ${basedir}/src/it/java + org.apache.phoenix.end2end.SplitSystemCatalogTests + kill + + + integration-test + verify + + + + + + maven-dependency-plugin + ${maven-dependency-plugin.version} + + + create-mrapp-generated-classpath + generate-test-resources + + build-classpath + + + ${project.build.directory}/classes/mrapp-generated-classpath + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.1.1 + + + + org.apache.felix + maven-bundle-plugin + 2.5.3 + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 2.13 + + + validate + validate + + true + ${top.dir}/src/main/config/checkstyle/checker.xml + ${top.dir}/src/main/config/checkstyle/suppressions.xml + true + ${top.dir}/src/main/config/checkstyle/header.txt + false + false + + + check + + + + + + org.apache.maven.plugins + maven-source-plugin + 2.2.1 + + + attach-sources + prepare-package + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.9 + + true + + http://hbase.apache.org/apidocs/ + + + + + attach-javadocs + + + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + ${numForkedUT} + true + -enableassertions -Xmx2250m -XX:MaxPermSize=128m + -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ + ${test.output.tofile} + kill + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.4 + + + prepare-package + + + test-jar + + + + + + org.apache.maven.plugins + maven-site-plugin + 3.7.1 + + + org.apache.rat + apache-rat-plugin + + + + CHANGES + + dev/phoenix.importorder + + dev/release_files/LICENSE + dev/release_files/NOTICE + + docs/*.csv + examples/*.csv + + examples/*.sql + examples/pig/testdata + + **/patchprocess/** + + bin/argparse-1.4.0/argparse.py + + python/requests-kerberos/** + python/phoenixdb/phoenixdb/avatica/proto/* + python/phoenixdb/*.rst + python/phoenixdb/ci/** + python/phoenixdb/doc/*.rst + python/phoenixdb/doc/conf.py + python/phoenixdb/doc/Makefile + + + + + + org.apache.felix + maven-bundle-plugin + true + true + + + + + + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.phoenix + phoenix-core + ${project.version} + + + org.apache.phoenix + phoenix-core + ${project.version} + test-jar + test + + + org.apache.phoenix + phoenix-flume + ${project.version} + + + org.apache.phoenix + phoenix-kafka + ${project.version} + + + org.apache.phoenix + phoenix-pig + ${project.version} + + + org.apache.phoenix + phoenix-spark + ${project.version} + + + org.apache.phoenix + phoenix-queryserver + ${project.version} + + + org.apache.phoenix + phoenix-queryserver-client + ${project.version} + + + org.apache.phoenix + phoenix-load-balancer + ${project.version} + + + + + org.apache.hbase + hbase-annotations + ${hbase.version} + + + org.apache.hbase + hbase-testing-util + ${hbase.version} + test + true + + + org.jruby + jruby-complete + + + org.apache.hadoop + hadoop-hdfs + + + + + org.apache.hbase + hbase-it + ${hbase.version} + test-jar + test + + + org.jruby + jruby-complete + + + + + org.apache.hbase + hbase-protocol + ${hbase.version} + + + org.apache.hbase + hbase-common + ${hbase.version} + + + org.apache.hbase + hbase-common + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-client + ${hbase.version} + + + org.apache.hbase + hbase-client + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-server + ${hbase.version} + + + org.apache.hbase + hbase-http + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-server + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-hadoop-compat + ${hbase.version} + + + org.apache.hbase + hbase-mapreduce + ${hbase.version} + + + org.apache.hbase + hbase-hadoop-compat + ${hbase.version} + test-jar + test + + + org.apache.hbase + hbase-hadoop2-compat + ${hbase.version} + + + org.apache.hbase + hbase-hadoop2-compat + ${hbase.version} + test-jar + test + + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-annotations + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.apache.hadoop + hadoop-minicluster + ${hadoop.version} + true + test + + + org.apache.hadoop + hadoop-client-minicluster + ${hadoop.version} + test + + + + + org.apache.hadoop + hadoop-auth + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + test-jar + test + + + org.apache.hadoop + hadoop-minikdc + ${hadoop.version} + + + + + org.apache.pig + pig + ${pig.version} + h2 + + + org.xerial.snappy + snappy-java + + + + + org.apache.calcite.avatica + avatica + ${avatica.version} + + + org.apache.calcite.avatica + avatica-core + ${avatica.version} + + + org.apache.calcite.avatica + avatica-server + ${avatica.version} + + + + + org.apache.tephra + tephra-api + ${tephra.version} + + + org.apache.tephra + tephra-core + ${tephra.version} + + + ch.qos.logback + logback-core + + + ch.qos.logback + logback-classic + + + + + org.apache.tephra + tephra-core + test-jar + ${tephra.version} + test + + + ch.qos.logback + logback-core + + + ch.qos.logback + logback-classic + + + + + org.apache.tephra + tephra-hbase-compat-2.0 + ${tephra.version} + + + + + org.antlr + antlr-runtime + ${antlr.version} + + + jline + jline + 2.11 + + + sqlline + sqlline + ${sqlline.version} + + + com.google.guava + guava + ${guava.version} + + + org.apache.flume + flume-ng-core + ${flume.version} + + + org.xerial.snappy + snappy-java + + + + + com.github.stephenc.findbugs + findbugs-annotations + ${findbugs-annotations.version} + + + com.github.stephenc.jcip + jcip-annotations + ${jcip-annotations.version} + + + org.iq80.snappy + snappy + ${snappy.version} + + + org.codehaus.jackson + jackson-core-asl + ${jackson.version} + compile + + + org.codehaus.jackson + jackson-mapper-asl + ${jackson.version} + compile + + + com.google.code.findbugs + jsr305 + 2.0.1 + + + org.codehaus.jackson + jackson-jaxrs + ${jackson.version} + test + + + org.codehaus.jackson + jackson-xc + ${jackson.version} + test + + + junit + junit + ${junit.version} + + + org.mockito + mockito-all + ${mockito-all.version} + test + + + com.google.protobuf + protobuf-java + ${protobuf-java.version} + + + log4j + log4j + ${log4j.version} + + + org.slf4j + slf4j-api + ${slf4j.version} + + + commons-cli + commons-cli + ${commons-cli.version} + + + commons-logging + commons-logging + ${commons-logging.version} + + + org.apache.htrace + htrace-core + ${htrace.version} + + + commons-codec + commons-codec + ${commons-codec.version} + + + commons-collections + commons-collections + ${collections.version} + + + org.apache.commons + commons-csv + ${commons-csv.version} + + + org.apache.commons + commons-lang3 + ${commons-lang.version} + + + joda-time + joda-time + ${jodatime.version} + + + com.clearspring.analytics + stream + ${stream.version} + + + com.salesforce.i18n + i18n-util + ${i18n-util.version} + + + org.eclipse.jetty + jetty-http + ${jetty.version} + + + org.eclipse.jetty + jetty-util + ${jetty.version} + + + org.eclipse.jetty + jetty-security + ${jetty.version} + + + org.eclipse.jetty + jetty-server + ${jetty.version} + + + org.eclipse.jetty + jetty-servlet + ${jetty.version} + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + + + javax.servlet + javax.servlet-api + ${servlet.api.version} + + + org.bouncycastle + bcprov-jdk16 + ${bouncycastle.version} + test + + + com.lmax + disruptor + ${disruptor.version} + + + + + + + + release + + + + org.apache.rat + apache-rat-plugin + + + package + + check + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + + + + + + spark16 + + 1.6.1 + 2.10.4 + 2.10 + + + + + + + + org.apache.maven.plugins + maven-project-info-reports-plugin + 3.0.0 + + + org.codehaus.mojo + findbugs-maven-plugin + 3.0.5 + + + + diff --git a/phoenix-parcel/src/build/components/all-common-dependencies.xml b/phoenix-parcel/src/build/components/all-common-dependencies.xml new file mode 100644 index 00000000000..9af3e94a383 --- /dev/null +++ b/phoenix-parcel/src/build/components/all-common-dependencies.xml @@ -0,0 +1,56 @@ + + + + + + + false + ${parcel.folder}/lib/phoenix/lib + + org.apache.phoenix:phoenix-core + org.iq80.snappy:snappy + org.antlr:antlr* + org.apache.tephra:tephra* + com.google.code.gson:gson + org.jruby.joni:joni + org.jruby.jcodings:jcodings + joda-time:joda-time + org.apache.twill:twill* + com.google.inject.extensions:guice-assistedinject + it.unimi.dsi:fastutil + io.dropwizard.metrics:metrics-core + org.apache.thrift:libthrift + com.clearspring.analytics:stream + com.salesforce.i18n:i18n-util + com.tdunning:json + com.jayway.jsonpath:json-path + net.minidev:json-smart + net.minidev:accessors-smart + sqlline:sqlline + org.apache.commons:commons-csv + com.ibm.icu:icu4j + com.ibm.icu:icu4j-charset + com.ibm.icu:icu4j-localespi + + + + diff --git a/phoenix-parcel/src/build/components/all-common-files.xml b/phoenix-parcel/src/build/components/all-common-files.xml new file mode 100644 index 00000000000..fa85ec0f503 --- /dev/null +++ b/phoenix-parcel/src/build/components/all-common-files.xml @@ -0,0 +1,84 @@ + + + + + + + + + ${project.basedir}/src/parcel/bin + ${parcel.folder}/bin + 0755 + 0755 + false + + * + + + + + ${project.basedir}/src/parcel/meta + ${parcel.folder}/meta + 0644 + 0755 + true + + * + + + + + + ${project.basedir}/../bin + ${parcel.folder}/lib/phoenix/bin + 0755 + 0755 + false + + hbase-site.xml + + + + + ${project.basedir}/../dev + ${parcel.folder}/lib/phoenix/dev + 0644 + 0755 + false + + * + + + + + ${project.basedir}/../examples + ${parcel.folder}/lib/phoenix/examples + 0644 + 0755 + false + + * + + + + + diff --git a/phoenix-parcel/src/build/components/all-common-jars.xml b/phoenix-parcel/src/build/components/all-common-jars.xml new file mode 100644 index 00000000000..c659ab89c96 --- /dev/null +++ b/phoenix-parcel/src/build/components/all-common-jars.xml @@ -0,0 +1,217 @@ + + + + + + + ${project.basedir}/../phoenix-client/target + ${parcel.folder}/lib/phoenix/ + + phoenix-*-client.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-server/target + ${parcel.folder}/lib/phoenix/ + + phoenix-*-server.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-queryserver/target/ + ${parcel.folder}/lib/phoenix/ + + phoenix-*-queryserver.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-queryserver-client/target/ + ${parcel.folder}/lib/phoenix/ + + phoenix-*-thin-client.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-hive/target/ + ${parcel.folder}/lib/phoenix/ + + phoenix-*-hive.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + + + ${project.basedir}/../phoenix-hadoop-compat/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-pig/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-pig-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-flume/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-core/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-spark/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-javadoc.jar + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-queryserver/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-queryserver-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-hive/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-hive-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + ${project.basedir}/../phoenix-queryserver-client/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + phoenix-*-thin-client.jar + + 0644 + + + ${project.basedir}/../phoenix-pherf/target/ + ${parcel.folder}/lib/phoenix/lib + + phoenix-*.jar + + + *-minimal.jar + *-sources.jar + *-tests.jar + + 0644 + + + diff --git a/phoenix-parcel/src/build/manifest/make_manifest.py b/phoenix-parcel/src/build/manifest/make_manifest.py new file mode 100644 index 00000000000..38a9dc58de1 --- /dev/null +++ b/phoenix-parcel/src/build/manifest/make_manifest.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# +# Licensed to Cloudera, Inc. under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. Cloudera, Inc. licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This program creates a manifest.json file from a directory of parcels and +# places the file in the same directory as the parcels. +# Once created, the directory can be served over http as a parcel repository. + +import hashlib +import json +import os +import re +import sys +import tarfile +import time + +def _get_parcel_dirname(parcel_name): + """ + Extract the required parcel directory name for a given parcel. + + eg: CDH-5.0.0-el6.parcel -> CDH-5.0.0 + """ + parts = re.match(r"^(.*?)-(.*)-(.*?)$", parcel_name).groups() + return parts[0] + '-' + parts[1] + +def _safe_copy(key, src, dest): + """ + Conditionally copy a key/value pair from one dictionary to another. + + Nothing is done if the key is not present in the source dictionary + """ + if key in src: + dest[key] = src[key] + +def make_manifest(path, timestamp=time.time()): + """ + Make a manifest.json document from the contents of a directory. + + This function will scan the specified directory, identify any parcel files + in it, and then build a manifest from those files. Certain metadata will be + extracted from the parcel and copied into the manifest. + + @param path: The path of the directory to scan for parcels + @param timestamp: Unix timestamp to place in manifest.json + @return: the manifest.json as a string + """ + manifest = {} + manifest['lastUpdated'] = int(timestamp * 1000) + manifest['parcels'] = [] + + files = os.listdir(path) + for f in files: + if not f.endswith('.parcel'): + continue + + print("Found parcel %s" % (f,)) + entry = {} + entry['parcelName'] = f + + fullpath = os.path.join(path, f) + + with open(fullpath, 'rb') as fp: + entry['hash'] = hashlib.sha1(fp.read()).hexdigest() + + with tarfile.open(fullpath, 'r') as tar: + try: + json_member = tar.getmember(os.path.join(_get_parcel_dirname(f), + 'meta', 'parcel.json')) + except KeyError: + print("Parcel does not contain parcel.json") + continue + try: + parcel = json.loads(tar.extractfile(json_member).read().decode(encoding='UTF-8')) + except: + print("Failed to parse parcel.json") + continue + _safe_copy('depends', parcel, entry) + _safe_copy('replaces', parcel, entry) + _safe_copy('conflicts', parcel, entry) + _safe_copy('components', parcel, entry) + _safe_copy('servicesRestartInfo', parcel, entry) + + try: + notes_member = tar.getmember(os.path.join(_get_parcel_dirname(f), + 'meta', 'release-notes.txt')) + entry['releaseNotes'] = tar.extractfile(notes_member).read().decode(encoding='UTF-8') + except KeyError: + # No problem if there's no release notes + pass + + manifest['parcels'].append(entry) + + return json.dumps(manifest, indent=4, separators=(',', ': ')) + +if __name__ == "__main__": + path = os.path.curdir + if len(sys.argv) > 1: + path = sys.argv[1] + print("Scanning directory: %s" % (path)) + + manifest = make_manifest(path) + with open(os.path.join(path, 'manifest.json'), 'w') as fp: + fp.write(manifest) diff --git a/phoenix-parcel/src/build/parcel.xml b/phoenix-parcel/src/build/parcel.xml new file mode 100644 index 00000000000..91fe978d8f3 --- /dev/null +++ b/phoenix-parcel/src/build/parcel.xml @@ -0,0 +1,40 @@ + + + + + + + all + + tar + + false + + + src/build/components/all-common-files.xml + src/build/components/all-common-jars.xml + src/build/components/all-common-dependencies.xml + + + \ No newline at end of file diff --git a/phoenix-parcel/src/parcel/bin/phoenix-performance.py b/phoenix-parcel/src/parcel/bin/phoenix-performance.py new file mode 100644 index 00000000000..6916d179f62 --- /dev/null +++ b/phoenix-parcel/src/parcel/bin/phoenix-performance.py @@ -0,0 +1,39 @@ +#!/bin/bash +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ + + # Reference: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in + SOURCE="${BASH_SOURCE[0]}" + BIN_DIR="$( dirname "$SOURCE" )" + while [ -h "$SOURCE" ] + do + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + done + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + LIB_DIR=$BIN_DIR/../lib + + +# Autodetect JAVA_HOME if not defined +. $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome + +export PATH=$JAVA_HOME/jre/bin:$PATH +exec $LIB_DIR/phoenix/bin/performance.py "$@" diff --git a/phoenix-parcel/src/parcel/bin/phoenix-psql.py b/phoenix-parcel/src/parcel/bin/phoenix-psql.py new file mode 100644 index 00000000000..41b920ac0a2 --- /dev/null +++ b/phoenix-parcel/src/parcel/bin/phoenix-psql.py @@ -0,0 +1,39 @@ +#!/bin/bash +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ + + # Reference: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in + SOURCE="${BASH_SOURCE[0]}" + BIN_DIR="$( dirname "$SOURCE" )" + while [ -h "$SOURCE" ] + do + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + done + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + LIB_DIR=$BIN_DIR/../lib + + +# Autodetect JAVA_HOME if not defined +. $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome + +export PATH=$JAVA_HOME/jre/bin:$PATH +exec $LIB_DIR/phoenix/bin/psql.py "$@" diff --git a/phoenix-parcel/src/parcel/bin/phoenix-sqlline.py b/phoenix-parcel/src/parcel/bin/phoenix-sqlline.py new file mode 100644 index 00000000000..db3c32d8946 --- /dev/null +++ b/phoenix-parcel/src/parcel/bin/phoenix-sqlline.py @@ -0,0 +1,40 @@ +#!/bin/bash +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ + + # Reference: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in + SOURCE="${BASH_SOURCE[0]}" + BIN_DIR="$( dirname "$SOURCE" )" + while [ -h "$SOURCE" ] + do + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + done + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + LIB_DIR=$BIN_DIR/../lib + + +# Autodetect JAVA_HOME if not defined +. $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome + +export HBASE_CONF_PATH=${HBASE_CONF_PATH:-/etc/hbase/conf} +export PATH=$JAVA_HOME/jre/bin:$PATH +exec $LIB_DIR/phoenix/bin/sqlline.py "$@" diff --git a/phoenix-parcel/src/parcel/bin/phoenix-utils.py b/phoenix-parcel/src/parcel/bin/phoenix-utils.py new file mode 100644 index 00000000000..d9bb69210f6 --- /dev/null +++ b/phoenix-parcel/src/parcel/bin/phoenix-utils.py @@ -0,0 +1,39 @@ +#!/bin/bash +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ + + # Reference: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in + SOURCE="${BASH_SOURCE[0]}" + BIN_DIR="$( dirname "$SOURCE" )" + while [ -h "$SOURCE" ] + do + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + done + BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + LIB_DIR=$BIN_DIR/../lib + + +# Autodetect JAVA_HOME if not defined +. $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome + +export PATH=$JAVA_HOME/jre/bin:$PATH +exec $LIB_DIR/phoenix/bin/phoenix_utils.py "$@" diff --git a/phoenix-parcel/src/parcel/cloudera/cdh_version.properties b/phoenix-parcel/src/parcel/cloudera/cdh_version.properties new file mode 100644 index 00000000000..148bd0f60b5 --- /dev/null +++ b/phoenix-parcel/src/parcel/cloudera/cdh_version.properties @@ -0,0 +1,19 @@ +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ diff --git a/phoenix-parcel/src/parcel/meta/alternatives.json b/phoenix-parcel/src/parcel/meta/alternatives.json new file mode 100644 index 00000000000..3b49b123dd1 --- /dev/null +++ b/phoenix-parcel/src/parcel/meta/alternatives.json @@ -0,0 +1,26 @@ +{ + "phoenix-performance.py": { + "destination": "/usr/bin/phoenix-performance.py", + "source": "bin/phoenix-performance.py", + "priority": 10, + "isDirectory": false + }, + "phoenix-psql.py": { + "destination": "/usr/bin/phoenix-psql.py", + "source": "bin/phoenix-psql.py", + "priority": 10, + "isDirectory": false + }, + "phoenix-sqlline.py": { + "destination": "/usr/bin/phoenix-sqlline.py", + "source": "bin/phoenix-sqlline.py", + "priority": 10, + "isDirectory": false + }, + "phoenix-utils.py" : { + "destination": "/usr/bin/phoenix-utils.py", + "source": "bin/phoenix-utils.py", + "priority": 10, + "isDirectory": false + } +} diff --git a/phoenix-parcel/src/parcel/meta/parcel.json b/phoenix-parcel/src/parcel/meta/parcel.json new file mode 100644 index 00000000000..0be423a8920 --- /dev/null +++ b/phoenix-parcel/src/parcel/meta/parcel.json @@ -0,0 +1,34 @@ +{ + "schema_version": 1, + "name": "APACHE_PHOENIX", + "version": "${parcel.version}", + "groups": [], + "extraVersionInfo": { + "baseVersion": "${parcel.base.version}", + "fullVersion": "${parcel.full.version}", + "patchCount": "${parcel.patch.count}" + }, + "packages": [ + { + "version": "${parcel.package.version}", + "name": "phoenix" + } + ], + "components": [ + { + "name": "phoenix", + "version": "${parcel.component.version}", + "pkg_version": "${parcel.package.version}", + "pkg_release": "${parcel.component.release}" + } + ], + "scripts": { + "defines": "phoenix_env.sh" + }, + "depends": "${parcel.depends}", + "provides": [ + "hbase-plugin" + ], + "setActiveSymlink": true, + "users": {} +} \ No newline at end of file diff --git a/phoenix-parcel/src/parcel/meta/phoenix_env.sh b/phoenix-parcel/src/parcel/meta/phoenix_env.sh new file mode 100644 index 00000000000..9287fc99ad0 --- /dev/null +++ b/phoenix-parcel/src/parcel/meta/phoenix_env.sh @@ -0,0 +1,47 @@ +#!/bin/bash +############################################################################ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +############################################################################ + +set -ex + + +#The following is written to aid local testing +if [ -z $PARCELS_ROOT ] ; then + export MYDIR=`dirname "${BASH_SOURCE[0]}"` + PARCELS_ROOT=`cd $MYDIR/../.. && pwd` +fi +PARCEL_DIRNAME=${PARCEL_DIRNAME-PHOENIX} + +MYLIBDIR=${PARCELS_ROOT}/${PARCEL_DIRNAME}/lib/phoenix + +[ -d $MYLIBDIR ] || { + echo "Could not find phoenix parcel lib dir, exiting" >&2 + exit 1 +} + +APPENDSTRING=`echo ${MYLIBDIR}/*.jar | sed 's/ /:/g'` +echo "appending '$APPENDSTRING' to HBASE_CLASSPATH" +if [ -z $HBASE_CLASSPATH ] ; then + export HBASE_CLASSPATH=$APPENDSTRING +else + export HBASE_CLASSPATH="$HBASE_CLASSPATH:$APPENDSTRING" +fi +echo "Set HBASE_CLASSPATH to '$HBASE_CLASSPATH'" +echo "phoenix_env.sh successfully executed at `date`" diff --git a/pom.xml b/pom.xml index b14433acea5..0a9541ae222 100644 --- a/pom.xml +++ b/pom.xml @@ -37,6 +37,7 @@ phoenix-assembly phoenix-tracing-webapp phoenix-load-balancer + phoenix-parcel From 9de850a3082567b957ce7d6c6d773586f2ebe82b Mon Sep 17 00:00:00 2001 From: Rob Voyer Date: Thu, 24 Jan 2019 17:27:28 -0500 Subject: [PATCH 5/8] parcel fix --- phoenix-parcel/pom.xml | 279 +++++++++++++++++++++++++++++++++++++++++ pom.xml | 1 + 2 files changed, 280 insertions(+) create mode 100644 phoenix-parcel/pom.xml diff --git a/phoenix-parcel/pom.xml b/phoenix-parcel/pom.xml new file mode 100644 index 00000000000..af7ec4f0550 --- /dev/null +++ b/phoenix-parcel/pom.xml @@ -0,0 +1,279 @@ + + + + + 4.0.0 + + org.apache.phoenix + phoenix + 5.0.0-HBase-2.0-cdh6.0.1 + + phoenix-parcel + Phoenix Parcels for CDH + Assemble Phoenix artifacts for CDH + pom + + + true + ${project.basedir}/.. + true + ${project.version} + 0 + 1.${parcel.patch.count} + APACHE_PHOENIX-${phoenix.version}.p${parcel.release} + ${parcel.folder}.parcel + ${phoenix.version}.p${parcel.release} + ${phoenix.version} + ${phoenix.version}.p${parcel.release} + ${phoenix.version}+${parcel.patch.count} + ${phoenix.version} + ${cdh.version}.p${parcel.release} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + default-jar + none + + + + + + maven-assembly-plugin + + + prepare-parcel + prepare-package + + single + + + ${parcel.file} + false + gnu + false + + src/build/parcel.xml + + posix + + + + + + com.coderplus.maven.plugins + copy-rename-maven-plugin + 1.0.1 + + + copy-file-el6 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-el6.parcel + + + + copy-file-el5 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-el5.parcel + + + + copy-file-el7 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-el7.parcel + + + + copy-file-sles11 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-sles11.parcel + + + + copy-file-sles12 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-sles12.parcel + + + + copy-file-precise + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-precise.parcel + + + + copy-file-jessie + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-jessie.parcel + + + + copy-file-trusty + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-trusty.parcel + + + + copy-file-wheezy + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-wheezy.parcel + + + + copy-file-xenial + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-xenial.parcel + + + + + + org.codehaus.mojo + exec-maven-plugin + + + make-manifest + package + + exec + + + + + python + ${project.build.directory} + + ${project.basedir}/src/build/manifest/make_manifest.py + ${project.build.directory} + + + + + com.coderplus.maven.plugins + copy-rename-maven-plugin + 1.0.1 + + + copy-file-el6 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-el6.parcel + + + + copy-file-el7 + package + + copy + + + ${project.build.directory}/${parcel.folder}.parcel.tar + ${project.build.directory}/${parcel.folder}-el7.parcel + + + + + + + + + + + org.apache.phoenix + phoenix-core + + + org.apache.phoenix + phoenix-flume + + + org.apache.phoenix + phoenix-pig + + + org.apache.phoenix + phoenix-spark + + + diff --git a/pom.xml b/pom.xml index 0a9541ae222..c40b65fbe8d 100644 --- a/pom.xml +++ b/pom.xml @@ -79,6 +79,7 @@ true ${project.basedir} + 6.0.1 2.0.0-cdh6.0.1 3.0.0-cdh6.0.1 From f90810f8ce1c15cb96bcafc88ac5d36b726c0788 Mon Sep 17 00:00:00 2001 From: Rob Voyer Date: Tue, 16 Apr 2019 15:06:26 -0400 Subject: [PATCH 6/8] FT-17943 update to match hbase 2.1.0 used by cdh 6.1.1 --- phoenix-assembly/pom.xml | 2 +- phoenix-client/pom.xml | 2 +- phoenix-core/pom.xml | 2 +- .../apache/phoenix/end2end/QueryMoreIT.java | 6 +++--- .../phoenix/mapreduce/CsvBulkImportUtil.java | 6 +++--- .../util/PhoenixConfigurationUtil.java | 6 +++--- .../phoenix/schema/types/PVarbinary.java | 4 ++-- .../phoenix/util/csv/CsvUpsertExecutor.java | 4 ++-- .../phoenix/util/json/JsonUpsertExecutor.java | 4 ++-- .../util/AbstractUpsertExecutorTest.java | 12 +++++------ .../util/TenantIdByteConversionTest.java | 4 ++-- phoenix-flume/pom.xml | 2 +- phoenix-hive/pom.xml | 2 +- phoenix-kafka/pom.xml | 2 +- phoenix-load-balancer/pom.xml | 2 +- phoenix-parcel/pom.xml | 2 +- phoenix-pherf/pom.xml | 2 +- phoenix-pig/pom.xml | 2 +- phoenix-queryserver-client/pom.xml | 2 +- phoenix-queryserver/pom.xml | 2 +- phoenix-server/pom.xml | 2 +- phoenix-spark/pom.xml | 2 +- phoenix-tracing-webapp/pom.xml | 2 +- pom.xml | 20 +++++++++---------- 24 files changed, 47 insertions(+), 49 deletions(-) diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index 1f6826ddfbc..b0499b3ed3a 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-assembly Phoenix Assembly diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml index fb27d1e429c..f78b768d0a1 100644 --- a/phoenix-client/pom.xml +++ b/phoenix-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-client Phoenix Client diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml index 4d005615edd..9bdecfe404e 100644 --- a/phoenix-core/pom.xml +++ b/phoenix-core/pom.xml @@ -4,7 +4,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-core Phoenix Core diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java index 9109c123e08..62180261831 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java @@ -36,7 +36,7 @@ import java.util.Map; import java.util.Properties; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; @@ -278,7 +278,7 @@ private String[] getRecordsOutofCursorTable(String tableOrViewName, boolean quer values[i] = rs.getObject(i + 1); } conn = getTenantSpecificConnection(tenantId); - pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodeColumnValues(conn, tableOrViewName.toUpperCase(), values, columns))); + pkIds.add(Base64.getEncoder().encodeToString(PhoenixRuntime.encodeColumnValues(conn, tableOrViewName.toUpperCase(), values, columns))); } return pkIds.toArray(new String[pkIds.size()]); } @@ -296,7 +296,7 @@ private List doQueryMore(boolean queryAgainstTenantView, String tenantId PreparedStatement stmt = conn.prepareStatement(query); int bindCounter = 1; for (int i = 0; i < cursorIds.length; i++) { - Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn, tableName.toUpperCase(), Base64.decode(cursorIds[i]), columns); + Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn, tableName.toUpperCase(), Base64.getDecoder().decode(cursorIds[i]), columns); for (int j = 0; j < pkParts.length; j++) { stmt.setObject(bindCounter++, pkParts[j]); } diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java index ff9ff727116..2d67c6ef10b 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java @@ -19,7 +19,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.query.QueryServices; @@ -68,7 +68,7 @@ public static void configurePreUpsertProcessor(Configuration conf, @VisibleForTesting static void setChar(Configuration conf, String confKey, char charValue) { - conf.set(confKey, Base64.encodeBytes(Character.toString(charValue).getBytes())); + conf.set(confKey, Base64.getEncoder().encodeToString(Character.toString(charValue).getBytes())); } @VisibleForTesting @@ -77,7 +77,7 @@ static Character getCharacter(Configuration conf, String confKey) { if (strValue == null) { return null; } - return new String(Base64.decode(strValue)).charAt(0); + return new String(Base64.getDecoder().decode(strValue)).charAt(0); } public static Path getOutputPath(Path outputdir, String tableName) { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java index f3f0415edd4..f76d8684dec 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; import org.apache.hadoop.mapreduce.lib.db.DBWritable; @@ -506,14 +506,14 @@ public static ImportPreUpsertKeyValueProcessor loadPreUpsertProcessor(Configurat public static byte[] getIndexMaintainers(final Configuration configuration){ Preconditions.checkNotNull(configuration); - return Base64.decode(configuration.get(INDEX_MAINTAINERS)); + return Base64.getDecoder().decode(configuration.get(INDEX_MAINTAINERS)); } public static void setIndexMaintainers(final Configuration configuration, final ImmutableBytesWritable indexMetaDataPtr) { Preconditions.checkNotNull(configuration); Preconditions.checkNotNull(indexMetaDataPtr); - configuration.set(INDEX_MAINTAINERS, Base64.encodeBytes(indexMetaDataPtr.get())); + configuration.set(INDEX_MAINTAINERS, Base64.getEncoder().encodeToString(indexMetaDataPtr.get())); } public static void setDisableIndexes(Configuration configuration, String indexName) { diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java index b3ce57ad4be..520b5036ee1 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java @@ -20,7 +20,7 @@ import java.sql.Types; import java.text.Format; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.util.ByteUtil; @@ -131,7 +131,7 @@ public Object toObject(String value) { if (value == null || value.length() == 0) { return null; } - Object object = Base64.decode(value); + Object object = Base64.getDecoder().decode(value); if (object == null) { throw newIllegalDataException( "Input: [" + value + "] is not base64 encoded"); } return object; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java index cd40b4415f7..ada6b187574 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java @@ -27,7 +27,7 @@ import javax.annotation.Nullable; import org.apache.commons.csv.CSVRecord; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.expression.function.EncodeFormat; import org.apache.phoenix.query.QueryServices; @@ -189,7 +189,7 @@ public Object apply(@Nullable String input) { Object object = null; switch (format) { case BASE64: - object = Base64.decode(input); + object = Base64.getDecoder().decode(input); if (object == null) { throw new IllegalDataException( "Input: [" + input + "] is not base64 encoded"); } break; diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java b/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java index ffa797dfa9d..0f052f6a09f 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java @@ -28,7 +28,7 @@ import javax.annotation.Nullable; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.expression.function.EncodeFormat; import org.apache.phoenix.query.QueryServices; @@ -212,7 +212,7 @@ public Object apply(@Nullable Object input) { Object object = null; switch (format) { case BASE64: - object = Base64.decode(input.toString()); + object = Base64.getDecoder().decode(input.toString()); if (object == null) { throw new IllegalDataException( "Input: [" + input + "] is not base64 encoded"); } break; diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java index 2b2544dd531..37597385c4b 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java @@ -33,7 +33,7 @@ import java.util.List; import java.util.Properties; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.query.BaseConnectionlessQueryTest; import org.apache.phoenix.query.QueryServices; @@ -81,7 +81,7 @@ public void tearDown() throws SQLException { @Test public void testExecute() throws Exception { byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue(); - String encodedBinaryData = Base64.encodeBytes(binaryData); + String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData); getUpsertExecutor().execute(createRecord(123L, "NameValue", 42, Arrays.asList(1, 2, 3), true, encodedBinaryData)); @@ -110,7 +110,7 @@ public void testExecute_TooFewFields() throws Exception { @Test public void testExecute_TooManyFields() throws Exception { byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue(); - String encodedBinaryData = Base64.encodeBytes(binaryData); + String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData); R recordWithTooManyFields = createRecord(123L, "NameValue", 42, Arrays.asList(1, 2, 3), true, encodedBinaryData, "garbage"); getUpsertExecutor().execute(recordWithTooManyFields); @@ -131,7 +131,7 @@ public void testExecute_TooManyFields() throws Exception { @Test public void testExecute_NullField() throws Exception { byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue(); - String encodedBinaryData = Base64.encodeBytes(binaryData); + String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData); getUpsertExecutor().execute(createRecord(123L, "NameValue", null, Arrays.asList(1, 2, 3), false, encodedBinaryData)); @@ -151,7 +151,7 @@ public void testExecute_NullField() throws Exception { @Test public void testExecute_InvalidType() throws Exception { byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue(); - String encodedBinaryData = Base64.encodeBytes(binaryData); + String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData); R recordWithInvalidType = createRecord(123L, "NameValue", "ThisIsNotANumber", Arrays.asList(1, 2, 3), true, encodedBinaryData); getUpsertExecutor().execute(recordWithInvalidType); @@ -163,7 +163,7 @@ public void testExecute_InvalidType() throws Exception { @Test public void testExecute_InvalidBoolean() throws Exception { byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue(); - String encodedBinaryData = Base64.encodeBytes(binaryData); + String encodedBinaryData = Base64.getEncoder().encodeToString(binaryData); R csvRecordWithInvalidType = createRecord("123,NameValue,42,1:2:3,NotABoolean,"+encodedBinaryData); getUpsertExecutor().execute(csvRecordWithInvalidType); diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java index fb70d228796..37ed569a65e 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java @@ -22,7 +22,7 @@ import static org.junit.Assert.fail; import java.sql.SQLException; -import org.apache.hadoop.hbase.util.Base64; +import java.util.Base64; import java.util.Collection; import java.util.List; @@ -201,7 +201,7 @@ public static Collection data() { //Binary byte[] bytes = new byte[] {0, 1, 2, 3}; - String byteString = new String( Base64.encodeBytes(bytes) ); + String byteString = new String( Base64.getEncoder().encode(bytes) ); testCases.add(new Object[] { getDataSchema(PBinary.INSTANCE, SortOrder.getDefault()), false, diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml index fec11a33fc6..3db3df59c14 100644 --- a/phoenix-flume/pom.xml +++ b/phoenix-flume/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-flume Phoenix - Flume diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml index c219ffe784e..6a4bd4e5d62 100644 --- a/phoenix-hive/pom.xml +++ b/phoenix-hive/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-hive Phoenix - Hive diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml index 989ecf0036a..2a49efd65b6 100644 --- a/phoenix-kafka/pom.xml +++ b/phoenix-kafka/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-kafka Phoenix - Kafka diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml index 99d5b02c7cb..7325fb71078 100644 --- a/phoenix-load-balancer/pom.xml +++ b/phoenix-load-balancer/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-load-balancer Phoenix Load Balancer diff --git a/phoenix-parcel/pom.xml b/phoenix-parcel/pom.xml index af7ec4f0550..7c2fc927a54 100644 --- a/phoenix-parcel/pom.xml +++ b/phoenix-parcel/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-parcel Phoenix Parcels for CDH diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml index 32b7d59cedf..7b594f6e0f6 100644 --- a/phoenix-pherf/pom.xml +++ b/phoenix-pherf/pom.xml @@ -15,7 +15,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-pherf diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml index 0984b02dcc3..baf89f1103e 100644 --- a/phoenix-pig/pom.xml +++ b/phoenix-pig/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-pig Phoenix - Pig diff --git a/phoenix-queryserver-client/pom.xml b/phoenix-queryserver-client/pom.xml index e348bf56409..d15f96bc239 100644 --- a/phoenix-queryserver-client/pom.xml +++ b/phoenix-queryserver-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-queryserver-client Phoenix Query Server Client diff --git a/phoenix-queryserver/pom.xml b/phoenix-queryserver/pom.xml index 78747b5e496..59a99160bf4 100644 --- a/phoenix-queryserver/pom.xml +++ b/phoenix-queryserver/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-queryserver Phoenix Query Server diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml index 40917acd35f..bf314c8530f 100644 --- a/phoenix-server/pom.xml +++ b/phoenix-server/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-server Phoenix Server diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index fc40d9c605a..50bdfc2e9d3 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -28,7 +28,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-spark Phoenix - Spark diff --git a/phoenix-tracing-webapp/pom.xml b/phoenix-tracing-webapp/pom.xml index 9d26ed25af1..e9856c2e73e 100755 --- a/phoenix-tracing-webapp/pom.xml +++ b/phoenix-tracing-webapp/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-tracing-webapp diff --git a/pom.xml b/pom.xml index c40b65fbe8d..4b564608025 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 pom Apache Phoenix A SQL layer over HBase @@ -79,16 +79,14 @@ true ${project.basedir} - 6.0.1 + 6.1.1 - 2.0.0-cdh6.0.1 - 3.0.0-cdh6.0.1 + 2.1.0-cdh6.1.1 + 3.0.0-cdh6.1.1 1.4 - 3.0.0 - 0.13.0 1.9.13 3.5.2 @@ -102,8 +100,8 @@ 1.0 1.2.0 13.0.1 - 1.8.0-cdh6.0.1 - + 1.8.0-cdh6.1.1 + 0.9.0.0 1.3.9-1 1.0-1 @@ -118,7 +116,7 @@ 1.11.0 9.3.19.v20170502 0.14.0-incubating - 2.2.0-cdh6.0.1 + 2.4.0-cdh6.1.1 2.11.8 2.11 2.9.5 @@ -326,8 +324,8 @@ UTF-8 ${numForkedIT} alphabetical - false - -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. + false + -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -Dorg.apache.hadoop.hbas e.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded. ${test.output.tofile} ${basedir}/src/it/java org.apache.phoenix.end2end.NeedsOwnMiniClusterTest From 63df6190d1daa3cefbf185f7ed02b717dfcf95a4 Mon Sep 17 00:00:00 2001 From: Rob Voyer Date: Tue, 16 Apr 2019 15:19:39 -0400 Subject: [PATCH 7/8] FT-17943 update to match hbase 2.1.0 used by cdh 6.1.1 --- phoenix-assembly/pom.xml | 2 +- phoenix-client/pom.xml | 2 +- phoenix-core/pom.xml | 2 +- phoenix-flume/pom.xml | 2 +- phoenix-hive/pom.xml | 2 +- phoenix-kafka/pom.xml | 2 +- phoenix-load-balancer/pom.xml | 2 +- phoenix-parcel/pom.xml | 2 +- phoenix-pherf/pom.xml | 2 +- phoenix-pig/pom.xml | 2 +- phoenix-queryserver-client/pom.xml | 2 +- phoenix-queryserver/pom.xml | 2 +- phoenix-server/pom.xml | 2 +- phoenix-spark/pom.xml | 2 +- phoenix-tracing-webapp/pom.xml | 2 +- pom.xml | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index 1f6826ddfbc..b0499b3ed3a 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-assembly Phoenix Assembly diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml index fb27d1e429c..f78b768d0a1 100644 --- a/phoenix-client/pom.xml +++ b/phoenix-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-client Phoenix Client diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml index 4d005615edd..9bdecfe404e 100644 --- a/phoenix-core/pom.xml +++ b/phoenix-core/pom.xml @@ -4,7 +4,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-core Phoenix Core diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml index fec11a33fc6..3db3df59c14 100644 --- a/phoenix-flume/pom.xml +++ b/phoenix-flume/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-flume Phoenix - Flume diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml index c219ffe784e..6a4bd4e5d62 100644 --- a/phoenix-hive/pom.xml +++ b/phoenix-hive/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-hive Phoenix - Hive diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml index 989ecf0036a..2a49efd65b6 100644 --- a/phoenix-kafka/pom.xml +++ b/phoenix-kafka/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-kafka Phoenix - Kafka diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml index 99d5b02c7cb..7325fb71078 100644 --- a/phoenix-load-balancer/pom.xml +++ b/phoenix-load-balancer/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-load-balancer Phoenix Load Balancer diff --git a/phoenix-parcel/pom.xml b/phoenix-parcel/pom.xml index af7ec4f0550..7c2fc927a54 100644 --- a/phoenix-parcel/pom.xml +++ b/phoenix-parcel/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-parcel Phoenix Parcels for CDH diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml index 32b7d59cedf..7b594f6e0f6 100644 --- a/phoenix-pherf/pom.xml +++ b/phoenix-pherf/pom.xml @@ -15,7 +15,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-pherf diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml index 0984b02dcc3..baf89f1103e 100644 --- a/phoenix-pig/pom.xml +++ b/phoenix-pig/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-pig Phoenix - Pig diff --git a/phoenix-queryserver-client/pom.xml b/phoenix-queryserver-client/pom.xml index e348bf56409..d15f96bc239 100644 --- a/phoenix-queryserver-client/pom.xml +++ b/phoenix-queryserver-client/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-queryserver-client Phoenix Query Server Client diff --git a/phoenix-queryserver/pom.xml b/phoenix-queryserver/pom.xml index 78747b5e496..59a99160bf4 100644 --- a/phoenix-queryserver/pom.xml +++ b/phoenix-queryserver/pom.xml @@ -26,7 +26,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-queryserver Phoenix Query Server diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml index 40917acd35f..bf314c8530f 100644 --- a/phoenix-server/pom.xml +++ b/phoenix-server/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-server Phoenix Server diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index fc40d9c605a..50bdfc2e9d3 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -28,7 +28,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-spark Phoenix - Spark diff --git a/phoenix-tracing-webapp/pom.xml b/phoenix-tracing-webapp/pom.xml index 9d26ed25af1..e9856c2e73e 100755 --- a/phoenix-tracing-webapp/pom.xml +++ b/phoenix-tracing-webapp/pom.xml @@ -27,7 +27,7 @@ org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 phoenix-tracing-webapp diff --git a/pom.xml b/pom.xml index c40b65fbe8d..b49f32039ce 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 org.apache.phoenix phoenix - 5.0.0-HBase-2.0-cdh6.0.1 + 5.0.0-HBase-2.1.0-cdh6.1.1 pom Apache Phoenix A SQL layer over HBase From 33c4e5be5cf865459ac2274325c357c9c96cf3b7 Mon Sep 17 00:00:00 2001 From: Jaspal Saini Date: Fri, 26 Jul 2019 10:20:02 -0400 Subject: [PATCH 8/8] Initial commit --- .../phoenix/spark/PhoenixRelation.scala | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala index d2eac8c30cd..b0d7889de64 100644 --- a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala +++ b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala @@ -17,16 +17,22 @@ */ package org.apache.phoenix.spark +import java.sql.{Date, Timestamp} +import java.text.Format + import org.apache.hadoop.conf.Configuration import org.apache.spark.rdd.RDD import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{Row, SQLContext} import org.apache.spark.sql.sources._ import org.apache.phoenix.util.StringUtil.escapeStringConstant -import org.apache.phoenix.util.SchemaUtil +import org.apache.phoenix.util.{DateUtil, SchemaUtil} case class PhoenixRelation(tableName: String, zkUrl: String, dateAsTimestamp: Boolean = false)(@transient val sqlContext: SQLContext) - extends BaseRelation with PrunedFilteredScan { + extends BaseRelation with PrunedFilteredScan { + + val dateformat:Format = DateUtil.getDateFormatter(DateUtil.DEFAULT_DATE_FORMAT) + val timeformat:Format = DateUtil.DEFAULT_TIMESTAMP_FORMATTER /* This is the buildScan() implementing Spark's PrunedFilteredScan. @@ -105,6 +111,7 @@ case class PhoenixRelation(tableName: String, zkUrl: String, dateAsTimestamp: Bo // Helper function to escape string values in SQL queries private def compileValue(value: Any): Any = value match { + case stringValue: String => s"'${escapeStringConstant(stringValue)}'" // Borrowed from 'elasticsearch-hadoop', support these internal UTF types across Spark versions @@ -112,7 +119,11 @@ case class PhoenixRelation(tableName: String, zkUrl: String, dateAsTimestamp: Bo case utf if (isClass(utf, "org.apache.spark.sql.types.UTF8String")) => s"'${escapeStringConstant(utf.toString)}'" // Spark 1.5 case utf if (isClass(utf, "org.apache.spark.unsafe.types.UTF8String")) => s"'${escapeStringConstant(utf.toString)}'" + case timestampValue: Timestamp => getTimestampString(timestampValue) + + case dateValue: Date => getDateString(dateValue) + // Wrapping value in to_timestamp if // Pass through anything else case _ => value } @@ -120,4 +131,14 @@ case class PhoenixRelation(tableName: String, zkUrl: String, dateAsTimestamp: Bo private def isClass(obj: Any, className: String) = { className.equals(obj.getClass().getName()) } + + private def getTimestampString(timestampValue: Timestamp): String = { + "TO_TIMESTAMP('%s', '%s', '%s')".format(timeformat.format(timestampValue), + DateUtil.DEFAULT_TIME_FORMAT, DateUtil.DEFAULT_TIME_ZONE_ID) + } + + private def getDateString(dateValue: Date): String = { + "TO_DATE('%s', '%s', '%s')".format(dateformat.format(dateValue), + DateUtil.DEFAULT_DATE_FORMAT, DateUtil.DEFAULT_TIME_ZONE_ID) + } }