diff --git a/build.xml b/build.xml
index d16f46f92560..13b09c12fb19 100644
--- a/build.xml
+++ b/build.xml
@@ -119,7 +119,7 @@
-
+
@@ -130,7 +130,7 @@
-
+
@@ -292,6 +292,7 @@
+
@@ -301,6 +302,9 @@
+
+
+
@@ -373,6 +377,9 @@
+
+
+
@@ -452,7 +459,6 @@
-
+
@@ -691,6 +698,8 @@
todir="${build.dir.hive}/maven/jars/" />
+
+
@@ -776,6 +787,9 @@
+
+
+
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index ba822e28b83f..5367f52aab92 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -549,10 +549,15 @@ public int complete (String buffer, int offset, List completions) {
}
public static void main(String[] args) throws Exception {
+ int ret = run(args);
+ System.exit(ret);
+ }
+
+ public static int run(String[] args) throws Exception {
OptionsProcessor oproc = new OptionsProcessor();
if (!oproc.process_stage1(args)) {
- System.exit(1);
+ return 1;
}
// NOTE: It is critical to do this here so that log4j is reinitialized
@@ -572,11 +577,11 @@ public static void main(String[] args) throws Exception {
ss.out = new PrintStream(System.out, true, "UTF-8");
ss.err = new PrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
- System.exit(3);
+ return 3;
}
if (!oproc.process_stage2(ss)) {
- System.exit(2);
+ return 2;
}
if (!ss.getIsSilent()) {
@@ -627,16 +632,16 @@ public static void main(String[] args) throws Exception {
cli.processInitFiles(ss);
if (ss.execString != null) {
- System.exit(cli.processLine(ss.execString));
+ return cli.processLine(ss.execString);
}
try {
if (ss.fileName != null) {
- System.exit(cli.processFile(ss.fileName));
+ return cli.processFile(ss.fileName);
}
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
- System.exit(3);
+ return 3;
}
ConsoleReader reader = new ConsoleReader();
@@ -675,7 +680,7 @@ public static void main(String[] args) throws Exception {
ss.close();
- System.exit(ret);
+ return ret;
}
/**
diff --git a/pdk/build.xml b/pdk/build.xml
new file mode 100644
index 000000000000..511152ed0310
--- /dev/null
+++ b/pdk/build.xml
@@ -0,0 +1,59 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pdk/scripts/README b/pdk/scripts/README
new file mode 100644
index 000000000000..289368ed08fb
--- /dev/null
+++ b/pdk/scripts/README
@@ -0,0 +1,3 @@
+Note that this directory contains scripts which are bundled into the
+Plugin Development Kit (rather than used as part of the Hive build
+itself).
diff --git a/pdk/scripts/build-plugin.xml b/pdk/scripts/build-plugin.xml
new file mode 100644
index 000000000000..e67c022cba33
--- /dev/null
+++ b/pdk/scripts/build-plugin.xml
@@ -0,0 +1,133 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pdk/scripts/class-registration.xsl b/pdk/scripts/class-registration.xsl
new file mode 100644
index 000000000000..ce2ea95a528f
--- /dev/null
+++ b/pdk/scripts/class-registration.xsl
@@ -0,0 +1,41 @@
+
+
+
+
+
+
+
+
+ CREATE TEMPORARY FUNCTION
+
+
+ AS '
+
+ ';
+
+
+
+
+
+
+
+
+
diff --git a/pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java b/pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java
new file mode 100644
index 000000000000..d22c13fbbd8f
--- /dev/null
+++ b/pdk/src/java/org/apache/hive/pdk/FunctionExtractor.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.pdk;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+
+public class FunctionExtractor {
+ public static void main(String [] args) throws Exception {
+ System.out.println("");
+ for (String arg : args) {
+ Class> c = Class.forName(arg);
+ Description d = c.getAnnotation(Description.class);
+ if (d == null) {
+ continue;
+ }
+ System.out.print(" ");
+ }
+ System.out.println("");
+ }
+}
diff --git a/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java b/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java
new file mode 100644
index 000000000000..34c6b727206e
--- /dev/null
+++ b/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTest.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.pdk;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+/**
+ * Defines one Hive plugin unit test.
+ */
+public @interface HivePdkUnitTest {
+
+ /**
+ * Hive query to run for this test.
+ */
+ String query();
+
+ /**
+ * Expected result from query.
+ */
+ String result();
+}
diff --git a/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java b/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java
new file mode 100644
index 000000000000..d85f8c7b66e3
--- /dev/null
+++ b/pdk/src/java/org/apache/hive/pdk/HivePdkUnitTests.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.pdk;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+/**
+ * Defines a suite of Hive plugin unit tests.
+ */
+public @interface HivePdkUnitTests {
+
+ /**
+ * Hive commands (semicolon-separated) to run as suite cleanup.
+ */
+ String cleanup() default "";
+
+ /**
+ * Hive commands (semicolon-separated) to run as suite setup.
+ */
+ String setup() default "";
+
+ /**
+ * Hive plugin unit tests in this suite.
+ */
+ HivePdkUnitTest[] cases();
+}
diff --git a/pdk/src/java/org/apache/hive/pdk/PluginTest.java b/pdk/src/java/org/apache/hive/pdk/PluginTest.java
new file mode 100644
index 000000000000..b475eb4c3959
--- /dev/null
+++ b/pdk/src/java/org/apache/hive/pdk/PluginTest.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.pdk;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.extensions.TestSetup;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+import org.apache.hadoop.hive.cli.CliDriver;
+
+/**
+ * PluginTest is a test harness for invoking all of the unit tests
+ * annotated on the classes in a plugin.
+ */
+public class PluginTest extends TestCase {
+
+ private HivePdkUnitTest unitTest;
+
+ private PluginTest(HivePdkUnitTest unitTest) {
+ super(unitTest.query());
+ this.unitTest = unitTest;
+ }
+
+ public void runTest() throws Exception {
+ String output = runHive(
+ "-i",
+ "metadata/add-jar.sql",
+ "-e",
+ unitTest.query());
+ assertEquals(unitTest.result(), output);
+ }
+
+ static String runHive(String ... args) throws Exception {
+ ByteArrayOutputStream outBytes = new ByteArrayOutputStream();
+ ByteArrayOutputStream errBytes = new ByteArrayOutputStream();
+ PrintStream outSaved = System.out;
+ PrintStream errSaved = System.err;
+ System.setOut(new PrintStream(outBytes, true));
+ System.setErr(new PrintStream(errBytes, true));
+ try {
+ CliDriver.run(args);
+ } finally {
+ System.setOut(outSaved);
+ System.setErr(errSaved);
+ }
+ ByteArrayInputStream outBytesIn =
+ new ByteArrayInputStream(outBytes.toByteArray());
+ ByteArrayInputStream errBytesIn =
+ new ByteArrayInputStream(errBytes.toByteArray());
+ BufferedReader is =
+ new BufferedReader(new InputStreamReader(outBytesIn));
+ BufferedReader es =
+ new BufferedReader(new InputStreamReader(errBytesIn));
+ StringBuilder output = new StringBuilder();
+ String line;
+ while ((line = is.readLine()) != null) {
+ if (output.length() > 0) {
+ output.append("\n");
+ }
+ output.append(line);
+ }
+ if (output.length() == 0) {
+ output = new StringBuilder();
+ while ((line = es.readLine()) != null) {
+ output.append("\n");
+ output.append(line);
+ }
+ }
+ return output.toString();
+ }
+
+ public static Test suite() throws Exception {
+ String classList = System.getProperty("hive.plugin.class.list");
+ String [] classNames = classList.split(" ");
+ TestSuite suite = new TestSuite("Plugin Tests");
+ for (String className : classNames) {
+ Class> c = Class.forName(className);
+ HivePdkUnitTests tests = c.getAnnotation(HivePdkUnitTests.class);
+ if (tests == null) {
+ continue;
+ }
+ TestSuite classSuite = new TestSuite(c.getName());
+ for (HivePdkUnitTest unitTest : tests.cases()) {
+ classSuite.addTest(new PluginTest(unitTest));
+ }
+ suite.addTest(new PluginTestSetup(classSuite, tests));
+ }
+
+ return new PluginGlobalSetup(suite);
+ }
+
+ public static void main(String [] args) throws Exception {
+ junit.textui.TestRunner.run(suite());
+ }
+
+ public static class PluginTestSetup extends TestSetup {
+ String name;
+ HivePdkUnitTests unitTests;
+
+ PluginTestSetup(TestSuite test, HivePdkUnitTests unitTests) {
+ super(test);
+ this.name = test.getName();
+ this.unitTests = unitTests;
+ }
+
+ protected void setUp() throws Exception {
+ String cleanup = unitTests.cleanup();
+ String setup = unitTests.setup();
+ if (cleanup == null) {
+ cleanup = "";
+ }
+ if (setup == null) {
+ setup = "";
+ }
+ if ((cleanup.length() > 0) || (setup.length() > 0)) {
+ String result = runHive(
+ "-e",
+ cleanup + "\n" + setup);
+ if (result.length() > 0) {
+ System.err.println(name + " SETUP: " + result);
+ }
+ }
+ }
+
+ protected void tearDown() throws Exception {
+ String cleanup = unitTests.cleanup();
+ if (cleanup != null) {
+ String result = runHive(
+ "-e",
+ cleanup);
+ if (result.length() > 0) {
+ System.err.println(name + " TEARDOWN: " + result);
+ }
+ }
+ }
+ }
+
+ public static class PluginGlobalSetup extends TestSetup {
+ private File testScriptDir;
+
+ PluginGlobalSetup(Test test) {
+ super(test);
+ testScriptDir =
+ new File(System.getProperty("hive.plugin.root.dir"), "test");
+ }
+
+ protected void setUp() throws Exception {
+ String result = runHive(
+ "-i",
+ new File(testScriptDir, "cleanup.sql").toString(),
+ "-i",
+ "metadata/add-jar.sql",
+ "-i",
+ "metadata/class-registration.sql",
+ "-f",
+ new File(testScriptDir, "setup.sql").toString());
+ if (result.length() > 0) {
+ System.err.println("GLOBAL SETUP: " + result);
+ }
+ }
+
+ protected void tearDown() throws Exception {
+ String result = runHive(
+ "-f",
+ new File(testScriptDir, "cleanup.sql").toString());
+ if (result.length() > 0) {
+ System.err.println("GLOBAL TEARDOWN: " + result);
+ }
+ }
+ }
+}
diff --git a/pdk/test-plugin/build.xml b/pdk/test-plugin/build.xml
new file mode 100644
index 000000000000..117d2cd5dd29
--- /dev/null
+++ b/pdk/test-plugin/build.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java b/pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java
new file mode 100644
index 000000000000..05aea6665a8d
--- /dev/null
+++ b/pdk/test-plugin/src/org/apache/hive/pdktest/Rot13.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.pdktest;
+
+import org.apache.hive.pdk.HivePdkUnitTest;
+import org.apache.hive.pdk.HivePdkUnitTests;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Example UDF for rot13 transformation.
+ */
+@Description(name = "rot13",
+ value = "_FUNC_(str) - Returns str with all characters transposed via rot13",
+ extended = "Example:\n"
+ + " > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + " 'Snprobbx'")
+@HivePdkUnitTests(
+ setup = "create table rot13_data(s string); "
+ + "insert overwrite table rot13_data select 'Facebook' from onerow;",
+ cleanup = "drop table if exists rot13_data;",
+ cases = {
+ @HivePdkUnitTest(
+ query = "SELECT tp_rot13('Mixed Up!') FROM onerow;",
+ result = "Zvkrq Hc!"),
+ @HivePdkUnitTest(
+ query = "SELECT tp_rot13(s) FROM rot13_data;",
+ result = "Snprobbx")
+ }
+ )
+public class Rot13 extends UDF {
+ private Text t = new Text();
+
+ public Rot13() {
+ }
+
+ public Text evaluate(Text s) {
+ StringBuilder out = new StringBuilder(s.getLength());
+ char[] ca = s.toString().toCharArray();
+ for (char c : ca) {
+ if (c >= 'a' && c <= 'm') {
+ c += 13;
+ } else if (c >= 'n' && c <= 'z') {
+ c -= 13;
+ } else if (c >= 'A' && c <= 'M') {
+ c += 13;
+ } else if (c >= 'N' && c <= 'Z') {
+ c -= 13;
+ }
+ out.append(c);
+ }
+ t.set(out.toString());
+ return t;
+ }
+}
diff --git a/pdk/test-plugin/test/cleanup.sql b/pdk/test-plugin/test/cleanup.sql
new file mode 100644
index 000000000000..087a3ba54e8d
--- /dev/null
+++ b/pdk/test-plugin/test/cleanup.sql
@@ -0,0 +1 @@
+drop table if exists onerow;
diff --git a/pdk/test-plugin/test/onerow.txt b/pdk/test-plugin/test/onerow.txt
new file mode 100644
index 000000000000..857d7a06ac9e
--- /dev/null
+++ b/pdk/test-plugin/test/onerow.txt
@@ -0,0 +1 @@
+plugh
diff --git a/pdk/test-plugin/test/setup.sql b/pdk/test-plugin/test/setup.sql
new file mode 100644
index 000000000000..a1995117823b
--- /dev/null
+++ b/pdk/test-plugin/test/setup.sql
@@ -0,0 +1,3 @@
+create table onerow(s string);
+load data local inpath '${env:HIVE_PLUGIN_ROOT_DIR}/test/onerow.txt'
+overwrite into table onerow;