diff --git a/MERGE-INFO.txt b/MERGE-INFO.txt
deleted file mode 100644
index 31ac65a8b..000000000
--- a/MERGE-INFO.txt
+++ /dev/null
@@ -1 +0,0 @@
-from apache-wf branch @2291 (which is in synch with branch-1.6 @2291)
diff --git a/NOTICE.txt b/NOTICE.txt
index 32f4b271e..0fa365817 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -10,11 +10,11 @@ Components licenses:
HSQL License : HSQLDB
JDOM License : JDOM
BSD License : xmlenc Library
-Apache 2.0 License: Apache Log4j, Codec, Commons CLI, Commons DBCP,
- Commons Pool, EL, Hadoop, JSON.simple,
+Apache 2.0 License: Apache Log4j, Codec, Commons CLI, Commons DBCP,
+ Commons Pool, EL, Hadoop, JSON.simple,
Jakarta Commons Net, Logging
GNU GPL v3.0 : Ext JS 2.2 (with Open Source License Exception)
-Detailed License information can be found in the documentation
+Detailed License information can be found in the documentation
in the ooziedocs.war at index.html##LicenseInfo
diff --git a/bin/createjpaconf.sh b/bin/createjpaconf.sh
new file mode 100755
index 000000000..3f7aa06ef
--- /dev/null
+++ b/bin/createjpaconf.sh
@@ -0,0 +1,192 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+SCRIPT_DIR=$(dirname $0)
+CURRENT_DIR=$(pwd)
+DBTYPE=
+USERNAME=
+PASSWORD=
+DBURL=
+
+function usage()
+{
+ echo >&2 \
+ "usage: $0 [-ddbtype] [-uusername] [-ppassword] [-lurl]"
+ exit 1
+}
+
+while getopts :d:u:p:l: opt
+do
+ case "$opt" in
+ d) DBTYPE="$OPTARG";;
+ u) USERNAME="$OPTARG";;
+ p) PASSWORD="$OPTARG";;
+ l) DBURL="$OPTARG";;
+ \?) #unknown flag
+ usage;;
+ esac
+done
+
+# check all arguments are given:
+[ -z "$DBTYPE" ] && usage
+[ -z "$USERNAME" ] && usage
+[ -z "$DBURL" ] && usage
+
+DriverClassName=
+Url=
+
+if [ "$DBTYPE" == "oracle" ]; then
+ DriverClassName=oracle.jdbc.driver.OracleDriver
+ Url=jdbc:oracle:thin:@${DBURL}
+ DB_ISOLATION="read-committed"
+elif [ "$DBTYPE" == "mysql" ]; then
+ DriverClassName=com.mysql.jdbc.Driver
+ Url=jdbc:mysql://${DBURL}
+ DB_ISOLATION="repeatable-read"
+else
+ DriverClassName=org.hsqldb.jdbcDriver
+ Url="jdbc:hsqldb:${DBURL};create=true"
+ DB_ISOLATION="read-committed"
+fi
+
+CONNECTSTRING="DriverClassName=${DriverClassName},Url=${Url},Username=${USERNAME},Password=${PASSWORD},MaxActive=100"
+
+#create persistence.xml
+mkdir ${SCRIPT_DIR}/tmp
+cat << EOF-persistence.xml > ${SCRIPT_DIR}/tmp/persistence.xml
+exit
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.oozie.WorkflowActionBean
+ org.apache.oozie.WorkflowJobBean
+ org.apache.oozie.CoordinatorJobBean
+ org.apache.oozie.CoordinatorActionBean
+ org.apache.oozie.SLAEventBean
+ org.apache.oozie.client.rest.JsonWorkflowJob
+ org.apache.oozie.client.rest.JsonWorkflowAction
+ org.apache.oozie.client.rest.JsonCoordinatorJob
+ org.apache.oozie.client.rest.JsonCoordinatorAction
+ org.apache.oozie.client.rest.JsonSLAEvent
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+EOF-persistence.xml
+
+cd ${SCRIPT_DIR}
+cp tmp/persistence.xml ../webapp/src/main/resources/META-INF/
+
+#oracle
+if [ "$DBTYPE" == "oracle" ]; then
+ cp ../webapp/src/main/resources/META-INF/orm.xml.oracle ../webapp/src/main/resources/META-INF/orm.xml
+#mysql
+elif [ "$DBTYPE" == "mysql" ]; then
+ cp ../webapp/src/main/resources/META-INF/orm.xml.mysql ../webapp/src/main/resources/META-INF/orm.xml
+#hsql
+else
+ cp ../webapp/src/main/resources/META-INF/orm.xml.hsql ../webapp/src/main/resources/META-INF/orm.xml
+fi
+
+
+rm -fr tmp
diff --git a/bin/mkdistro.sh b/bin/mkdistro.sh
index b25a6a19f..d3827b3d9 100755
--- a/bin/mkdistro.sh
+++ b/bin/mkdistro.sh
@@ -1,20 +1,20 @@
-#!/bin/sh
+#!/bin/bash
#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
# resolve links - $0 may be a softlink
@@ -39,6 +39,17 @@ if [ "$1" == "-full" ]; then
shift
fi
+if [ "$1" == "-h" ]; then
+ echo
+ echo "**persistence.xml replacement**"
+ echo
+ echo "usage: $0 [-ddbtype] [-uusername] [-ppassword] [-lurl]"
+ echo
+ echo "**mvn help**"
+ mvn -h
+ exit 0
+fi
+
function checkExitStatus {
if [ "$?" != "0" ]; then
echo
@@ -51,33 +62,84 @@ function checkExitStatus {
function cleanUpLocalRepo {
rm -rf ~/.m2/repository/org/apache/oozie/*
+ rm -rf $PWD/core/mem
}
+#process createjpaconf.sh arguments - begin
+while getopts :d:u:p:l: opt
+do
+ case "$opt" in
+ d) DBTYPE="$OPTARG";;
+ u) USERNAME="$OPTARG";;
+ p) PASSWORD="$OPTARG";;
+ l) DBURL="$OPTARG";;
+ \?) #unknown flag
+ break;;
+ esac
+done
+
+if [ -z "$DBTYPE" ]; then
+ echo "[INFO] Use default persistence.xml!!"
+else
+ if [ -z "$USERNAME" ]; then
+ echo "[ERROR] DB UserName required!!"
+ exit 1
+ fi
+ if [ -z "$DBURL" ]; then
+ echo "[ERROR] DB URL required!!"
+ exit 1
+ fi
+ [[ "$DBTYPE" = [-]* ]] && { echo "[ERROR] Wrong DBTYPE!!" ; exit 1 ; }
+ [[ "$USERNAME" = [-]* ]] && { echo "[ERROR] Wrong USERNAME" ; exit 1 ; }
+ [[ "$PASSWORD" = [-]* ]] && { echo "[ERROR] Wrong PASSWORD" ; exit 1 ; }
+ [[ "$DBURL" = [-]* ]] && { echo "[ERROR] Wrong DBURL" ; exit 1 ; }
+ echo "[INFO] Use replaced persistence.xml!!"
+ shift $(( $OPTIND - 1 ))
+ SCRIPT_DIR=$(dirname $0)
+ if [ -z "$PASSWORD" ]; then
+ ${SCRIPT_DIR}/createjpaconf.sh -d${DBTYPE} -u${USERNAME} -l${DBURL}
+ else
+ ${SCRIPT_DIR}/createjpaconf.sh -d${DBTYPE} -u${USERNAME} -p${PASSWORD} -l${DBURL}
+ fi
+fi
+#process createjpaconf.sh arguments - end
+
export DATETIME=`date -u "+%Y.%m.%d-%H:%M:%SGMT"`
cd ${BASEDIR}
export SVNREV=`svn info | grep "Revision" | awk '{print $2}'`
export SVNURL=`svn info | grep "URL" | awk '{print $2}'`
#clean up local repo
+#ln -s $PWD/client/src $PWD/client_enhanced/src
+#ln -s $PWD/client_enhanced/pom.xml.enhance $PWD/client_enhanced/pom.xml
cleanUpLocalRepo
MVN_OPTS="-Dbuild.time=${DATETIME} -Dsvn.revision=${SVNREV} -Dsvn.url=${SVNURL}"
+cd client
+mvn clean package -Doozie.build.jpa.enhanced=false ${MVN_OPTS} $*
+mvn assembly:single -Doozie.build.jpa.enhanced=false ${MVN_OPTS} $*
+cd ..
+
#clean, compile, test, package, install
mvn clean install ${MVN_OPTS} $*
checkExitStatus "running: clean compile, test, package, install"
-if [ "$FULLDISTRO" == "true" ]; then
- #cobertura
- mvn cobertura:cobertura ${MVN_OPTS} $*
- checkExitStatus "running: cobertura"
+#if [ "$FULLDISTRO" == "true" ]; then
+
+ #clover
+ #mvn clover2:instrument clover2:aggregate clover2:clover ${MVN_OPTS} $*
+ #checkExitStatus "running: clover"
#dependencies report
- mvn project-info-reports:dependencies ${MVN_OPTS} $*
- checkExitStatus "running: dependencies"
+ #mvn project-info-reports:dependencies ${MVN_OPTS} $*
+ #checkExitStatus "running: dependencies"
- #TODO findbugs report
-fi
+ #findbugs report
+ #mvn findbugs:findbugs ${MVN_OPTS} $*
+ #checkExitStatus "running: findbugs"
+
+#fi
#javadocs
mvn javadoc:javadoc ${MVN_OPTS} $*
@@ -94,7 +156,10 @@ mvn assembly:single ${MVN_OPTS} $*
checkExitStatus "running: assembly"
cleanUpLocalRepo
+#unlink $PWD/client_enhanced/src
+#unlink $PWD/client_enhanced/pom.xml
echo
echo "Oozie distro created, DATE[${DATETIME}] SVN-REV[${SVNREV}], available at [${BASEDIR}/distro/target]"
-echo
\ No newline at end of file
+echo
+
diff --git a/bin/purgelocalrepo.sh b/bin/purgelocalrepo.sh
index 2fb086c2c..da619a2f2 100755
--- a/bin/purgelocalrepo.sh
+++ b/bin/purgelocalrepo.sh
@@ -1,20 +1,20 @@
#!/bin/sh
#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
function cleanUpLocalRepo {
diff --git a/build-setup/packages/hadoop-core-0.20.1.jar b/build-setup/packages/hadoop-core-0.20.1.jar
deleted file mode 100644
index c6326c39b..000000000
Binary files a/build-setup/packages/hadoop-core-0.20.1.jar and /dev/null differ
diff --git a/build-setup/packages/hadoop-core-0.20.1.pom b/build-setup/packages/hadoop-core-0.20.1.pom
deleted file mode 100644
index 87a935177..000000000
--- a/build-setup/packages/hadoop-core-0.20.1.pom
+++ /dev/null
@@ -1,122 +0,0 @@
-
- 4.0.0
- org.apache.hadoop
- hadoop-core
- 0.20.1
-
- Hadoop
- jar
-
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
-
-
-
- org.apache.commons
- commons-cli
- 2.0-SNAPSHOT
- compile
-
-
- commons-codec
- commons-codec
- 1.3
- compile
-
-
- commons-httpclient
- commons-httpclient
- 3.0.1
- compile
-
-
- commons-logging
- commons-logging-api
- 1.0.4
- compile
-
-
- commons-logging
- commons-logging
- 1.0.4
- compile
-
-
- commons-net
- commons-net
- 1.4.1
- compile
-
-
- org.mortbay.jetty
- jetty
- 6.1.14
- compile
-
-
- commons-el
- commons-el
- 1.0
- compile
-
-
- tomcat
- jasper-compiler
- 5.5.12
- compile
-
-
- tomcat
- jasper-runtime
- 5.5.12
- compile
-
-
- org.mortbay.jetty
- jsp-api-2.1
- 6.1.14
- compile
-
-
- log4j
- log4j
- 1.2.15
- compile
-
-
- oro
- oro
- 2.0.8
- compile
-
-
- javax.servlet
- servlet-api
- 2.5
- compile
-
-
- xmlenc
- xmlenc
- 0.52
- compile
-
-
- org.slf4j
- slf4j-api
- 1.4.3
- compile
-
-
- org.slf4j
- slf4j-log4j12
- 1.4.3
- compile
-
-
-
-
diff --git a/build-setup/packages/hadoop-streaming-0.20.1.jar b/build-setup/packages/hadoop-streaming-0.20.1.jar
deleted file mode 100644
index 8de77b3aa..000000000
Binary files a/build-setup/packages/hadoop-streaming-0.20.1.jar and /dev/null differ
diff --git a/build-setup/packages/hadoop-streaming-0.20.1.pom b/build-setup/packages/hadoop-streaming-0.20.1.pom
deleted file mode 100644
index 399dce13d..000000000
--- a/build-setup/packages/hadoop-streaming-0.20.1.pom
+++ /dev/null
@@ -1,26 +0,0 @@
-
- 4.0.0
- org.apache.hadoop
- hadoop-streaming
- 0.20.1
-
- Hadoop Streaming
- jar
-
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- 0.20.1
- compile
-
-
-
-
diff --git a/build-setup/packages/hadoop-test-0.20.1.jar b/build-setup/packages/hadoop-test-0.20.1.jar
deleted file mode 100644
index a39f3c87b..000000000
Binary files a/build-setup/packages/hadoop-test-0.20.1.jar and /dev/null differ
diff --git a/build-setup/packages/hadoop-test-0.20.1.pom b/build-setup/packages/hadoop-test-0.20.1.pom
deleted file mode 100644
index 02621b7a3..000000000
--- a/build-setup/packages/hadoop-test-0.20.1.pom
+++ /dev/null
@@ -1,136 +0,0 @@
-
- 4.0.0
- org.apache.hadoop
- hadoop-test
- 0.20.1
-
- Hadoop Test
- jar
-
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- 0.20.1
- compile
-
-
- junit
- junit
- 3.8.1
- compile
-
-
-
- org.apache.commons
- commons-cli
- 2.0-SNAPSHOT
- compile
-
-
- commons-codec
- commons-codec
- 1.3
- compile
-
-
- commons-httpclient
- commons-httpclient
- 3.0.1
- compile
-
-
- commons-logging
- commons-logging-api
- 1.0.4
- compile
-
-
- commons-logging
- commons-logging
- 1.0.4
- compile
-
-
- commons-net
- commons-net
- 1.4.1
- compile
-
-
- org.mortbay.jetty
- jetty
- 6.1.14
- compile
-
-
- commons-el
- commons-el
- 1.0
- compile
-
-
- tomcat
- jasper-compiler
- 5.5.12
- compile
-
-
- tomcat
- jasper-runtime
- 5.5.12
- compile
-
-
- org.mortbay.jetty
- jsp-api-2.1
- 6.1.14
- compile
-
-
- log4j
- log4j
- 1.2.15
- compile
-
-
- oro
- oro
- 2.0.8
- compile
-
-
- javax.servlet
- servlet-api
- 2.5
- compile
-
-
- xmlenc
- xmlenc
- 0.52
- compile
-
-
- org.slf4j
- slf4j-api
- 1.4.3
- compile
-
-
- org.slf4j
- slf4j-log4j12
- 1.4.3
- compile
-
-
-
-
-
diff --git a/build-setup/packages/pig-0.2.0-H20-J660.jar b/build-setup/packages/pig-0.2.0-H20-J660.jar
deleted file mode 100644
index dd8272954..000000000
Binary files a/build-setup/packages/pig-0.2.0-H20-J660.jar and /dev/null differ
diff --git a/build-setup/packages/pig-0.2.0-H20-J660.pom b/build-setup/packages/pig-0.2.0-H20-J660.pom
deleted file mode 100644
index a76a2b232..000000000
--- a/build-setup/packages/pig-0.2.0-H20-J660.pom
+++ /dev/null
@@ -1,32 +0,0 @@
-
- 4.0.0
- org.apache.hadoop
- pig
- 0.2.0-H20-J660
-
- Pig
- jar
-
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- 0.20.0
- compile
-
-
- jline
- jline
- 0.9.94
- compile
-
-
-
-
diff --git a/build-setup/readme.txt b/build-setup/readme.txt
deleted file mode 100644
index 3daf03a6c..000000000
--- a/build-setup/readme.txt
+++ /dev/null
@@ -1,5 +0,0 @@
------
-There is 1 version of Pig 0.2.0:
-
-* 0.2.0-H20-J660: this is 0.2.0 Apache source patched with Jira 660, for Hadoop 0.20.0
------
diff --git a/build-setup/setup-jars.sh b/build-setup/setup-jars.sh
deleted file mode 100755
index af471bad8..000000000
--- a/build-setup/setup-jars.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/sh
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# resolve links - $0 may be a softlink
-PRG="${0}"
-
-while [ -h "${PRG}" ]; do
- ls=`ls -ld "${PRG}"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "${PRG}"`/"$link"
- fi
-done
-
-BASEDIR=`dirname ${PRG}`
-BASEDIR=`cd ${BASEDIR};pwd`
-
-function checkExitStatus {
- if [ "$?" != "0" ]; then
- echo "ERROR, Oozie development environment could not be configured"
- exit -1
- fi
-}
-
-function installArtifact {
- jar="packages/${2}-${3}.jar"
- installJar ${1} ${2} ${3} ${jar}
-}
-
-function installJar {
- pom="packages/${2}-${3}.pom"
- mvn install:install-file -Dpackaging=jar -DgroupId=${1} -DartifactId=${2} -Dversion=${3} -Dfile=${4} -DpomFile=${pom}
- checkExitStatus
-}
-
-cd ${BASEDIR}
-checkExitStatus
-
-#Hadoop 0.20.1
-installArtifact org.apache.hadoop hadoop-core 0.20.1
-installArtifact org.apache.hadoop hadoop-streaming 0.20.1
-installArtifact org.apache.hadoop hadoop-test 0.20.1
-
-#Pig 0.2.0
-installArtifact org.apache.hadoop pig 0.2.0-H20-J660
-
-echo
-echo "JAR artifacts for Oozie development installed successfully"
-echo
-
diff --git a/build-setup/setup-maven.sh b/build-setup/setup-maven.sh
index 27d001be7..f7ea06246 100755
--- a/build-setup/setup-maven.sh
+++ b/build-setup/setup-maven.sh
@@ -1,22 +1,23 @@
#!/bin/sh
#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
+
# resolve links - $0 may be a softlink
PRG="${0}"
diff --git a/client/pom.xml b/client/pom.xml
index 7f83781c0..ab5478ec4 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -54,6 +54,12 @@
3.8.1
test
+
+ javax.persistence
+ persistence-api
+ 1.0
+ provided
+
@@ -63,42 +69,131 @@
true
-
-
- maven-assembly-plugin
-
-
- ../src/main/assemblies/client.xml
-
-
-
-
-
+
- kerberos-auth
+ jpaEnhancedOff
false
- oozie.test.hadoop.auth
- kerberos
+ oozie.build.jpa.enhanced
+ false
+
+
+
+ target-no-jpa
+
+
+ maven-assembly-plugin
+
+
+ ../src/main/assemblies/client.xml
+
+
+
+
+
+
+
+
+ jpaEnhancedOn
+
+ true
+
+ oozie.build.jpa.enhanced
+ true
+
+
+ javax.persistence
+ persistence-api
+ 1.0
+ provided
+
+
+ org.apache.openjpa
+ openjpa-persistence
+ 1.2.1
+
+
+ log4j
+ log4j
+ 1.2.15
+ compile
+
+
+ com.sun.jdmk
+ jmxtools
+
+
+ com.sun.jmx
+ jmxri
+
+
+ javax.mail
+ mail
+
+
+ javax.jms
+ jmx
+
+
+ javax.jms
+ jms
+
+
+
+
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
-
-
- **/*.java
-
-
-
-
-
+
+
+ maven-antrun-plugin
+
+
+ process-classes
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ run
+
+
+
+
+
+ maven-assembly-plugin
+
+
+ ../src/main/assemblies/empty.xml
+
+
+
+
+
diff --git a/client/src/main/bin/oozie b/client/src/main/bin/oozie
index cf77f3c97..699672fc0 100644
--- a/client/src/main/bin/oozie
+++ b/client/src/main/bin/oozie
@@ -1,21 +1,4 @@
#!/bin/sh
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
# resolve links - $0 may be a softlink
PRG="${0}"
diff --git a/client/src/main/java/org/apache/oozie/cli/CLIParser.java b/client/src/main/java/org/apache/oozie/cli/CLIParser.java
index c10df45b5..d2f38aa7b 100644
--- a/client/src/main/java/org/apache/oozie/cli/CLIParser.java
+++ b/client/src/main/java/org/apache/oozie/cli/CLIParser.java
@@ -1,20 +1,3 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -125,15 +108,16 @@ public Command parse(String[] args) throws ParseException {
if (args.length == 0) {
throw new ParseException("missing sub-command");
}
- else
- if (commands.containsKey(args[0])) {
- GnuParser parser = new GnuParser();
- String[] minusCommand = new String[args.length - 1];
- System.arraycopy(args, 1, minusCommand, 0, minusCommand.length);
- return new Command(args[0], parser.parse(commands.get(args[0]), minusCommand));
- }
else {
- throw new ParseException(MessageFormat.format("invalid sub-command [{0}]", args[0]));
+ if (commands.containsKey(args[0])) {
+ GnuParser parser = new GnuParser();
+ String[] minusCommand = new String[args.length - 1];
+ System.arraycopy(args, 1, minusCommand, 0, minusCommand.length);
+ return new Command(args[0], parser.parse(commands.get(args[0]), minusCommand));
+ }
+ else {
+ throw new ParseException(MessageFormat.format("invalid sub-command [{0}]", args[0]));
+ }
}
}
diff --git a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
index d61f5e3dd..e6d9ef067 100644
--- a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
+++ b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
@@ -24,10 +24,13 @@
import org.apache.commons.cli.ParseException;
import org.apache.oozie.cli.CLIParser;
import org.apache.oozie.BuildInfo;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.CoordinatorJob;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.client.OozieClient.SYSTEM_MODE;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
@@ -71,6 +74,7 @@ public class OozieCLI {
public static final String JOBS_CMD = "jobs";
public static final String ADMIN_CMD = "admin";
public static final String VALIDATE_CMD = "validate";
+ public static final String SLA_CMD = "sla";
public static final String OOZIE_OPTION = "oozie";
public static final String CONFIG_OPTION = "config";
@@ -78,25 +82,32 @@ public class OozieCLI {
public static final String OFFSET_OPTION = "offset";
public static final String START_OPTION = "start";
public static final String RUN_OPTION = "run";
+ public static final String DRYRUN_OPTION = "dryrun";
public static final String SUSPEND_OPTION = "suspend";
public static final String RESUME_OPTION = "resume";
public static final String KILL_OPTION = "kill";
public static final String RERUN_OPTION = "rerun";
public static final String INFO_OPTION = "info";
+ public static final String LOG_OPTION = "log";
+ public static final String DEFINITION_OPTION = "definition";
+
public static final String LEN_OPTION = "len";
public static final String FILTER_OPTION = "filter";
- public static final String SAFEMODE_OPTION = "safemode";
+ public static final String JOBTYPE_OPTION = "jobtype";
+ public static final String SYSTEM_MODE_OPTION = "systemmode";
public static final String VERSION_OPTION = "version";
public static final String STATUS_OPTION = "status";
public static final String LOCAL_TIME_OPTION = "localtime";
- private static final String[] OOZIE_HELP =
- {"the env variable '" + ENV_OOZIE_URL + "' is used as default value for the '-" + OOZIE_OPTION + "' option",
- "custom headers for Oozie web services can be specified using '-D" + WS_HEADER_PREFIX + "NAME=VALUE'"
- };
+ public static final String VERBOSE_OPTION = "verbose";
+ public static final String VERBOSE_DELIMITER = "\t";
+
+ private static final String[] OOZIE_HELP = {
+ "the env variable '" + ENV_OOZIE_URL + "' is used as default value for the '-" + OOZIE_OPTION + "' option",
+ "custom headers for Oozie web services can be specified using '-D" + WS_HEADER_PREFIX + "NAME=VALUE'"};
private static final String RULER;
- private static final int LINE_WIDTH = 184;
+ private static final int LINE_WIDTH = 132;
private boolean used;
@@ -109,9 +120,8 @@ public class OozieCLI {
}
/**
- * Entry point for the Oozie CLI when invoked from the command line.
- *
- * Upon completion this method exits the JVM with '0' (success) or '-1' (failure).
+ * Entry point for the Oozie CLI when invoked from the command line. Upon completion this method exits the JVM
+ * with '0' (success) or '-1' (failure).
*
* @param args options and arguments for the Oozie CLI.
*/
@@ -136,14 +146,15 @@ protected String[] getCLIHelp() {
}
private static Options createAdminOptions() {
- Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
- Option safe_mode = new Option(SAFEMODE_OPTION, true, "switch safemode on/off (true|false)");
- Option status = new Option(STATUS_OPTION, false, "show the current system status");
- Option version = new Option(VERSION_OPTION, false, "show Oozie server build version");
+ Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
+ Option system_mode = new Option(SYSTEM_MODE_OPTION, true,
+ "Supported in Oozie-2.0 or later versions ONLY. Change oozie system mode [NORMAL|NOWEBSERVICE|SAFEMODE]");
+ Option status = new Option(STATUS_OPTION, false, "show the current system status");
+ Option version = new Option(VERSION_OPTION, false, "show Oozie server build version");
Options adminOptions = new Options();
adminOptions.addOption(oozie);
OptionGroup group = new OptionGroup();
- group.addOption(safe_mode);
+ group.addOption(system_mode);
group.addOption(status);
group.addOption(version);
adminOptions.addOptionGroup(group);
@@ -151,41 +162,57 @@ private static Options createAdminOptions() {
}
private static Options createJobOptions() {
- Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
- Option config = new Option(CONFIG_OPTION, true, "job configuration file '.xml' or '.properties'");
- Option submit = new Option(SUBMIT_OPTION, false, "submit a job (requires -config)");
- Option run = new Option(RUN_OPTION, false, "run a job (requires -config)");
- Option rerun = new Option(RERUN_OPTION, true, "rerun a job (requires -config)");
- Option start = new Option(START_OPTION, true, "start a job");
- Option suspend = new Option(SUSPEND_OPTION, true, "suspend a job");
- Option resume = new Option(RESUME_OPTION, true, "resume a job");
- Option kill = new Option(KILL_OPTION, true, "kill a job");
- Option info = new Option(INFO_OPTION, true, "info of a job");
+ Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
+ Option config = new Option(CONFIG_OPTION, true, "job configuration file '.xml' or '.properties'");
+ Option submit = new Option(SUBMIT_OPTION, false, "submit a job (requires -config)");
+ Option run = new Option(RUN_OPTION, false, "run a job (requires -config)");
+ Option rerun = new Option(RERUN_OPTION, true, "rerun a job (requires -config)");
+ Option dryrun = new Option(DRYRUN_OPTION, false,
+ "Supported in Oozie-2.0 or later versions ONLY - dryrun or test run a coordinator job (requires -config) - job is not queued");
+ Option start = new Option(START_OPTION, true, "start a job");
+ Option suspend = new Option(SUSPEND_OPTION, true, "suspend a job");
+ Option resume = new Option(RESUME_OPTION, true, "resume a job");
+ Option kill = new Option(KILL_OPTION, true, "kill a job");
+ Option info = new Option(INFO_OPTION, true, "info of a job");
+ Option offset = new Option(OFFSET_OPTION, true, "job info offset of actions (default '1', requires -info)");
+ Option len = new Option(LEN_OPTION, true, "number of actions (default TOTAL ACTIONS, requires -info)");
Option localtime = new Option(LOCAL_TIME_OPTION, false, "use local time (default GMT)");
+ Option log = new Option(LOG_OPTION, true, "job log");
+ Option definition = new Option(DEFINITION_OPTION, true, "job definition");
+ Option verbose = new Option(VERBOSE_OPTION, false, "verbose mode");
+
OptionGroup actions = new OptionGroup();
actions.addOption(submit);
actions.addOption(start);
actions.addOption(run);
+ actions.addOption(dryrun);
actions.addOption(suspend);
actions.addOption(resume);
actions.addOption(kill);
actions.addOption(info);
actions.addOption(rerun);
+ actions.addOption(log);
+ actions.addOption(definition);
actions.setRequired(true);
Options jobOptions = new Options();
jobOptions.addOption(oozie);
jobOptions.addOption(config);
jobOptions.addOption(localtime);
+ jobOptions.addOption(verbose);
+ jobOptions.addOption(offset);
+ jobOptions.addOption(len);
jobOptions.addOptionGroup(actions);
return jobOptions;
}
private static Options createJobsOptions() {
- Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
- Option start = new Option(OFFSET_OPTION, true, "jobs offset (default '1')");
- Option len = new Option(LEN_OPTION, true, "number of jobs (default '100')");
- Option filter = new Option(FILTER_OPTION, true, "user=;name=;group=;status=;...");
+ Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
+ Option start = new Option(OFFSET_OPTION, true, "jobs offset (default '1')");
+ Option jobtype = new Option(JOBTYPE_OPTION, true, "job type ('Supported in Oozie-2.0 or later versions ONLY - coordinator' or 'wf' (default))");
+ Option len = new Option(LEN_OPTION, true, "number of jobs (default '100')");
+ Option filter = new Option(FILTER_OPTION, true, "user=;name=;group=;status=;...");
Option localtime = new Option(LOCAL_TIME_OPTION, false, "use local time (default GMT)");
+ Option verbose = new Option(VERBOSE_OPTION, false, "verbose mode");
start.setType(Integer.class);
len.setType(Integer.class);
Options jobsOptions = new Options();
@@ -195,15 +222,26 @@ private static Options createJobsOptions() {
jobsOptions.addOption(len);
jobsOptions.addOption(oozie);
jobsOptions.addOption(filter);
+ jobsOptions.addOption(jobtype);
+ jobsOptions.addOption(verbose);
return jobsOptions;
}
+ private static Options createSlaOptions() {
+ Option oozie = new Option(OOZIE_OPTION, true, "Oozie URL");
+ Option start = new Option(OFFSET_OPTION, true, "start offset (default '0')");
+ Option len = new Option(LEN_OPTION, true, "number of results (default '100')");
+ start.setType(Integer.class);
+ len.setType(Integer.class);
+ Options slaOptions = new Options();
+ slaOptions.addOption(start);
+ slaOptions.addOption(len);
+ slaOptions.addOption(oozie);
+ return slaOptions;
+ }
+
/**
- * Run a CLI programmatically.
- *
- * It does not exit the JVM.
- *
- * A CLI instance can be used only once.
+ * Run a CLI programmatically. It does not exit the JVM. A CLI instance can be used only once.
*
* @param args options and arguments for the Oozie CLI.
* @return '0' (success), '-1' (failure).
@@ -220,27 +258,44 @@ public synchronized int run(String[] args) {
parser.addCommand(JOBS_CMD, "", "jobs status", createJobsOptions(), false);
parser.addCommand(ADMIN_CMD, "", "admin operations", createAdminOptions(), false);
parser.addCommand(VALIDATE_CMD, "", "validate a workflow XML file", new Options(), true);
+ parser.addCommand(SLA_CMD, "", "sla operations (Supported in Oozie-2.0 or later)", createSlaOptions(), false);
try {
CLIParser.Command command = parser.parse(args);
if (command.getName().equals(HELP_CMD)) {
parser.showHelp();
}
- else if (command.getName().equals(JOB_CMD)) {
- jobCommand(command.getCommandLine());
- }
- else if (command.getName().equals(JOBS_CMD)) {
- jobsCommand(command.getCommandLine());
- }
- else if (command.getName().equals(ADMIN_CMD)) {
- adminCommand(command.getCommandLine());
- }
- else if (command.getName().equals(VERSION_CMD)) {
- versionCommand();
- }
- else if (command.getName().equals(VALIDATE_CMD)) {
- validateCommand(command.getCommandLine());
+ else {
+ if (command.getName().equals(JOB_CMD)) {
+ jobCommand(command.getCommandLine());
+ }
+ else {
+ if (command.getName().equals(JOBS_CMD)) {
+ jobsCommand(command.getCommandLine());
+ }
+ else {
+ if (command.getName().equals(ADMIN_CMD)) {
+ adminCommand(command.getCommandLine());
+ }
+ else {
+ if (command.getName().equals(VERSION_CMD)) {
+ versionCommand();
+ }
+ else {
+ if (command.getName().equals(VALIDATE_CMD)) {
+ validateCommand(command.getCommandLine());
+ }
+ else {
+ if (command.getName().equals(SLA_CMD)) {
+ slaCommand(command.getCommandLine());
+ }
+ }
+ }
+ }
+ }
+ }
}
+
return 0;
}
catch (OozieCLIException ex) {
@@ -254,6 +309,7 @@ else if (command.getName().equals(VALIDATE_CMD)) {
return -1;
}
catch (Exception ex) {
+ ex.printStackTrace();
System.err.println(ex.getMessage());
return -1;
}
@@ -264,7 +320,8 @@ private String getOozieUrl(CommandLine commandLine) {
if (url == null) {
url = System.getenv(ENV_OOZIE_URL);
if (url == null) {
- throw new IllegalArgumentException("Oozie URL is no available as option or in the environment");
+ throw new IllegalArgumentException(
+ "Oozie URL is not available neither in command option or in the environment");
}
}
return url;
@@ -315,7 +372,7 @@ private Properties parseDocument(Document doc, Properties conf) throws IOExcepti
}
Element field = (Element) fieldNode;
if ("name".equals(field.getTagName()) && field.hasChildNodes()) {
- attr = ((Text) field.getFirstChild()).getData().trim();
+ attr = ((Text) field.getFirstChild()).getData();
}
if ("value".equals(field.getTagName()) && field.hasChildNodes()) {
value = ((Text) field.getFirstChild()).getData();
@@ -348,20 +405,21 @@ private Properties getConfiguration(CommandLine commandLine) throws IOException
if (configFile.endsWith(".properties")) {
conf.load(new FileReader(file));
}
- else if (configFile.endsWith(".xml")) {
- parse(new FileInputStream(configFile), conf);
- }
else {
- throw new IllegalArgumentException("configuration must be a '.properties' or a '.xml' file");
+ if (configFile.endsWith(".xml")) {
+ parse(new FileInputStream(configFile), conf);
+ }
+ else {
+ throw new IllegalArgumentException("configuration must be a '.properties' or a '.xml' file");
+ }
}
}
return conf;
}
/**
- * Create a OozieClient.
- *
- * It injects any '-Dheader:' as header to the the {@link org.apache.oozie.client.OozieClient}.
+ * Create a OozieClient. It injects any '-Dheader:' as header to the the {@link
+ * org.apache.oozie.client.OozieClient}.
*
* @param commandLine the parsed command line options.
* @return a pre configured workflow client.
@@ -393,77 +451,267 @@ private void jobCommand(CommandLine commandLine) throws IOException, OozieCLIExc
if (options.contains(SUBMIT_OPTION)) {
System.out.println(JOB_ID_PREFIX + wc.submit(getConfiguration(commandLine)));
}
- else if (options.contains(START_OPTION)) {
- wc.start(commandLine.getOptionValue(START_OPTION));
- }
- else if (options.contains(SUSPEND_OPTION)) {
- wc.suspend(commandLine.getOptionValue(SUSPEND_OPTION));
- }
- else if (options.contains(RESUME_OPTION)) {
- wc.resume(commandLine.getOptionValue(RESUME_OPTION));
- }
- else if (options.contains(KILL_OPTION)) {
- wc.kill(commandLine.getOptionValue(KILL_OPTION));
- }
- else if (options.contains(RUN_OPTION)) {
- System.out.println(JOB_ID_PREFIX + wc.run(getConfiguration(commandLine)));
- }
- else if (options.contains(RERUN_OPTION)) {
- wc.reRun(commandLine.getOptionValue(RERUN_OPTION), getConfiguration(commandLine));
- }
- else if (options.contains(INFO_OPTION)) {
- printJob(wc.getJobInfo(commandLine.getOptionValue(INFO_OPTION)), options.contains(LOCAL_TIME_OPTION));
+ else {
+ if (options.contains(START_OPTION)) {
+ wc.start(commandLine.getOptionValue(START_OPTION));
+ }
+ else {
+ if (options.contains(DRYRUN_OPTION)) {
+ String[] dryrunStr = wc.dryrun(getConfiguration(commandLine)).split("action for new instance");
+ int arraysize = dryrunStr.length;
+ System.out.println("***coordJob after parsing: ***");
+ System.out.println(dryrunStr[0]);
+ int aLen = dryrunStr.length - 1;
+ if (aLen < 0) {
+ aLen = 0;
+ }
+ System.out.println("***total coord actions is " + aLen + " ***");
+ for (int i = 1; i <= arraysize - 1; i++) {
+ System.out.println(RULER);
+ System.out.println("coordAction instance: " + i + ":");
+ System.out.println(dryrunStr[i]);
+ }
+ }
+ else {
+ if (options.contains(SUSPEND_OPTION)) {
+ wc.suspend(commandLine.getOptionValue(SUSPEND_OPTION));
+ }
+ else {
+ if (options.contains(RESUME_OPTION)) {
+ wc.resume(commandLine.getOptionValue(RESUME_OPTION));
+ }
+ else {
+ if (options.contains(KILL_OPTION)) {
+ wc.kill(commandLine.getOptionValue(KILL_OPTION));
+ }
+ else {
+ if (options.contains(RUN_OPTION)) {
+ System.out.println(JOB_ID_PREFIX + wc.run(getConfiguration(commandLine)));
+ }
+ else {
+ if (options.contains(RERUN_OPTION)) {
+ wc.reRun(commandLine.getOptionValue(RERUN_OPTION), getConfiguration(commandLine));
+ }
+ else {
+ if (options.contains(INFO_OPTION)) {
+ if (commandLine.getOptionValue(INFO_OPTION).endsWith("-C")) {
+ String s = commandLine.getOptionValue(OFFSET_OPTION);
+ int start = Integer.parseInt((s != null) ? s : "0");
+ s = commandLine.getOptionValue(LEN_OPTION);
+ int len = Integer.parseInt((s != null) ? s : "0");
+ printCoordJob(wc.getCoordJobInfo(commandLine.getOptionValue(INFO_OPTION), start, len), options
+ .contains(LOCAL_TIME_OPTION), options.contains(VERBOSE_OPTION));
+ }
+ else {
+ if (commandLine.getOptionValue(INFO_OPTION).contains("-C@")) {
+ printCoordAction(wc.getCoordActionInfo(commandLine.getOptionValue(INFO_OPTION)), options
+ .contains(LOCAL_TIME_OPTION));
+ }
+ else {
+ if (commandLine.getOptionValue(INFO_OPTION).contains("-W@")) {
+ printWorkflowAction(wc.getWorkflowActionInfo(commandLine.getOptionValue(INFO_OPTION)), options
+ .contains(LOCAL_TIME_OPTION));
+ }
+ else {
+ String s = commandLine.getOptionValue(OFFSET_OPTION);
+ int start = Integer.parseInt((s != null) ? s : "0");
+ s = commandLine.getOptionValue(LEN_OPTION);
+ String jobtype = commandLine.getOptionValue(JOBTYPE_OPTION);
+ jobtype = (jobtype != null) ? jobtype : "wf";
+ int len = Integer.parseInt((s != null) ? s : "0");
+ printJob(wc.getJobInfo(commandLine.getOptionValue(INFO_OPTION), start, len), options
+ .contains(LOCAL_TIME_OPTION), options.contains(VERBOSE_OPTION));
+ }
+ }
+ }
+ }
+ else {
+ if (options.contains(LOG_OPTION)) {
+ System.out.println(wc.getJobLog(commandLine.getOptionValue(LOG_OPTION)));
+ }
+ else {
+ if (options.contains(DEFINITION_OPTION)) {
+ System.out.println(wc.getJobDefinition(commandLine.getOptionValue(DEFINITION_OPTION)));
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
}
+
}
catch (OozieClientException ex) {
throw new OozieCLIException(ex.toString(), ex);
}
}
- private static final String JOBS_FORMATTER = "%-32s%-22s%-11s%-5s%-10s%-10s%-24s%-24s%-24s%-23s";
+ private void printCoordJob(CoordinatorJob coordJob, boolean localtime, boolean verbose) {
+ System.out.println("Job ID : " + coordJob.getId());
- private static final String JOB_FORMATTER = "%-13s : %-72s";
+ System.out.println(RULER);
+
+ List actions = coordJob.getActions();
+ System.out.println("Job Name : " + maskIfNull(coordJob.getAppName()));
+ System.out.println("App Path : " + maskIfNull(coordJob.getAppPath()));
+ System.out.println("Status : " + coordJob.getStatus());
+ System.out.println(RULER);
+
+ if (verbose) {
+ System.out.println("ID" + VERBOSE_DELIMITER + "Action Number" + VERBOSE_DELIMITER + "Console URL"
+ + VERBOSE_DELIMITER + "Error Code" + VERBOSE_DELIMITER + "Error Message" + VERBOSE_DELIMITER
+ + "External ID" + VERBOSE_DELIMITER + "External Status" + VERBOSE_DELIMITER + "Job ID"
+ + VERBOSE_DELIMITER + "Tracker URI" + VERBOSE_DELIMITER + "Created" + VERBOSE_DELIMITER + "Status"
+ + VERBOSE_DELIMITER + "Last Modified" + VERBOSE_DELIMITER + "Missing Dependencies");
+ System.out.println(RULER);
+
+ for (CoordinatorAction action : actions) {
+ System.out.println(maskIfNull(action.getId()) + VERBOSE_DELIMITER + action.getActionNumber()
+ + VERBOSE_DELIMITER + maskIfNull(action.getConsoleUrl()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getErrorCode()) + VERBOSE_DELIMITER + maskIfNull(action.getErrorMessage())
+ + VERBOSE_DELIMITER + maskIfNull(action.getExternalId()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getExternalStatus()) + VERBOSE_DELIMITER + maskIfNull(action.getJobId())
+ + VERBOSE_DELIMITER + maskIfNull(action.getTrackerUri()) + VERBOSE_DELIMITER
+ + maskDate(action.getCreatedTime(), localtime) + VERBOSE_DELIMITER + action.getStatus()
+ + VERBOSE_DELIMITER + maskDate(action.getLastModifiedTime(), localtime) + VERBOSE_DELIMITER
+ + maskIfNull(action.getMissingDependencies()));
+
+ System.out.println(RULER);
+ }
+ }
+ else {
+ System.out.println(String.format(COORD_ACTION_FORMATTER, "ID", "Status", "Ext ID", "Err Code", "Created",
+ "Last Mod"));
+
+ for (CoordinatorAction action : actions) {
+ System.out.println(String
+ .format(COORD_ACTION_FORMATTER, maskIfNull(action.getId()), action.getStatus(),
+ maskIfNull(action.getExternalId()), maskIfNull(action.getErrorCode()), maskDate(action
+ .getCreatedTime(), localtime),
+ maskDate(action.getLastModifiedTime(), localtime)));
- private static final String ACTION_FORMATTER = "%-24s%-12s%-11s%-13s%-22s%-16s%-14s%-24s%-23s";
+ System.out.println(RULER);
+ }
+ }
+ }
+
+ private void printCoordAction(CoordinatorAction coordAction, boolean contains) {
+ System.out.println("ID : " + maskIfNull(coordAction.getId()));
+
+ System.out.println(RULER);
+
+ System.out.println("Action Number : " + coordAction.getActionNumber());
+ System.out.println("Console URL : " + maskIfNull(coordAction.getConsoleUrl()));
+ System.out.println("Error Code : " + maskIfNull(coordAction.getErrorCode()));
+ System.out.println("Error Message : " + maskIfNull(coordAction.getErrorMessage()));
+ System.out.println("External ID : " + maskIfNull(coordAction.getExternalId()));
+ System.out.println("External Status : " + maskIfNull(coordAction.getExternalStatus()));
+ System.out.println("Job ID : " + maskIfNull(coordAction.getJobId()));
+ System.out.println("Tracker URI : " + maskIfNull(coordAction.getTrackerUri()));
+ System.out.println("Created : " + maskDate(coordAction.getCreatedTime(), contains));
+ System.out.println("Status : " + coordAction.getStatus());
+ System.out.println("Last Modified : " + maskDate(coordAction.getLastModifiedTime(), contains));
+ System.out.println("Missing Dependencies : " + maskIfNull(coordAction.getMissingDependencies()));
+
+ System.out.println(RULER);
+ }
+
+ private void printWorkflowAction(WorkflowAction action, boolean contains) {
+ System.out.println("ID : " + maskIfNull(action.getId()));
- private void printJob(WorkflowJob job, boolean localtime) throws IOException {
- System.out.println("Job Id: " + job.getId());
System.out.println(RULER);
- System.out.println(String.format(JOB_FORMATTER, "Workflow Name", job.getAppName()));
- System.out.println(String.format(JOB_FORMATTER, "App Path", job.getAppPath()));
- System.out.println(String.format(JOB_FORMATTER, "Status", job.getStatus()));
- System.out.println(String.format(JOB_FORMATTER, "Run", job.getRun()));
- System.out.println(String.format(JOB_FORMATTER, "User", job.getUser()));
- System.out.println(String.format(JOB_FORMATTER, "Group", job.getGroup()));
- System.out.println(String.format(JOB_FORMATTER, "Created", maskDate(job.getCreatedTime(), localtime)));
- System.out.println(String.format(JOB_FORMATTER, "Started", maskDate(job.getStartTime(), localtime)));
- System.out.println(String.format(JOB_FORMATTER, "Last Modified", maskDate(job.getLastModTime(), localtime)));
- System.out.println(String.format(JOB_FORMATTER, "Ended", maskDate(job.getEndTime(), localtime)));
+ System.out.println("Console URL : " + maskIfNull(action.getConsoleUrl()));
+ System.out.println("Error Code : " + maskIfNull(action.getErrorCode()));
+ System.out.println("Error Message : " + maskIfNull(action.getErrorMessage()));
+ System.out.println("External ID : " + maskIfNull(action.getExternalId()));
+ System.out.println("External Status : " + maskIfNull(action.getExternalStatus()));
+ System.out.println("Name : " + maskIfNull(action.getName()));
+ System.out.println("Retries : " + action.getRetries());
+ System.out.println("Tracker URI : " + maskIfNull(action.getTrackerUri()));
+ System.out.println("Type : " + maskIfNull(action.getType()));
+ System.out.println("Started : " + maskDate(action.getStartTime(), contains));
+ System.out.println("Status : " + action.getStatus());
+ System.out.println("Ended : " + maskDate(action.getEndTime(), contains));
+
+ System.out.println(RULER);
+ }
+
+ private static final String WORKFLOW_JOBS_FORMATTER = "%-41s%-13s%-10s%-10s%-10s%-24s%-24s";
+ private static final String COORD_JOBS_FORMATTER = "%-41s%-15s%-10s%-5s%-13s%-24s%-24s";
+
+ private static final String WORKFLOW_ACTION_FORMATTER = "%-78s%-10s%-23s%-11s%-10s";
+ private static final String COORD_ACTION_FORMATTER = "%-41s%-10s%-37s%-10s%-17s%-17s";
+
+ private void printJob(WorkflowJob job, boolean localtime, boolean verbose) throws IOException {
+ System.out.println("Job ID : " + maskIfNull(job.getId()));
+
+ System.out.println(RULER);
+
+ System.out.println("Workflow Name : " + maskIfNull(job.getAppName()));
+ System.out.println("App Path : " + maskIfNull(job.getAppPath()));
+ System.out.println("Status : " + job.getStatus());
+ System.out.println("Run : " + job.getRun());
+ System.out.println("User : " + maskIfNull(job.getUser()));
+ System.out.println("Group : " + maskIfNull(job.getGroup()));
+ System.out.println("Created : " + maskDate(job.getCreatedTime(), localtime));
+ System.out.println("Started : " + maskDate(job.getStartTime(), localtime));
+ System.out.println("Last Modified : " + maskDate(job.getLastModifiedTime(), localtime));
+ System.out.println("Ended : " + maskDate(job.getEndTime(), localtime));
List actions = job.getActions();
- if(actions!=null && actions.size()>0){
+
+ if (actions != null && actions.size() > 0) {
System.out.println();
System.out.println("Actions");
System.out.println(RULER);
- System.out.println(String.format(ACTION_FORMATTER,
- "Action Name", "Type", "Status", "Transition",
- "Ext. Id", "Ext. Status", "Error Code",
- "Started", "Ended"));
- System.out.println(RULER);
-
- for(WorkflowAction action:job.getActions()){
- System.out.println(String.format(ACTION_FORMATTER, action.getName(),
- action.getType(), action.getStatus(), maskIfNull(action.getTransition()),
- maskIfNull(action.getExternalId()), maskIfNull(action.getExternalStatus()),
- maskIfNull(action.getErrorCode()), maskDate(action.getStartTime(), localtime),
- maskDate(action.getEndTime(), localtime)));
+
+ if (verbose) {
+ System.out.println("ID" + VERBOSE_DELIMITER + "Console URL" + VERBOSE_DELIMITER + "Error Code"
+ + VERBOSE_DELIMITER + "Error Message" + VERBOSE_DELIMITER + "External ID" + VERBOSE_DELIMITER
+ + "External Status" + VERBOSE_DELIMITER + "Name" + VERBOSE_DELIMITER + "Retries"
+ + VERBOSE_DELIMITER + "Tracker URI" + VERBOSE_DELIMITER + "Type" + VERBOSE_DELIMITER
+ + "Started" + VERBOSE_DELIMITER + "Status" + VERBOSE_DELIMITER + "Ended");
+ System.out.println(RULER);
+
+ for (WorkflowAction action : job.getActions()) {
+ System.out.println(maskIfNull(action.getId()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getConsoleUrl()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getErrorCode()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getErrorMessage()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getExternalId()) + VERBOSE_DELIMITER
+ + maskIfNull(action.getExternalStatus()) + VERBOSE_DELIMITER + maskIfNull(action.getName())
+ + VERBOSE_DELIMITER + action.getRetries() + VERBOSE_DELIMITER
+ + maskIfNull(action.getTrackerUri()) + VERBOSE_DELIMITER + maskIfNull(action.getType())
+ + VERBOSE_DELIMITER + maskDate(action.getStartTime(), localtime) + VERBOSE_DELIMITER
+ + action.getStatus() + VERBOSE_DELIMITER + maskDate(action.getEndTime(), localtime));
+
+ System.out.println(RULER);
+ }
+ }
+ else {
+ System.out.println(String.format(WORKFLOW_ACTION_FORMATTER, "ID", "Status", "Ext ID", "Ext Status",
+ "Err Code"));
+
System.out.println(RULER);
+
+ for (WorkflowAction action : job.getActions()) {
+ System.out.println(String.format(WORKFLOW_ACTION_FORMATTER, maskIfNull(action.getId()), action
+ .getStatus(), maskIfNull(action.getExternalId()), maskIfNull(action.getExternalStatus()),
+ maskIfNull(action.getErrorCode())));
+
+ System.out.println(RULER);
+ }
}
}
else {
- System.out.println(RULER);
+ System.out.println(RULER);
}
+
System.out.println();
}
@@ -474,10 +722,78 @@ private void jobsCommand(CommandLine commandLine) throws IOException, OozieCLIEx
String s = commandLine.getOptionValue(OFFSET_OPTION);
int start = Integer.parseInt((s != null) ? s : "0");
s = commandLine.getOptionValue(LEN_OPTION);
+ String jobtype = commandLine.getOptionValue(JOBTYPE_OPTION);
+ jobtype = (jobtype != null) ? jobtype : "wf";
int len = Integer.parseInt((s != null) ? s : "0");
+ try {
+ if (jobtype.contains("wf")) {
+ printJobs(wc.getJobsInfo(filter, start, len), commandLine.hasOption(LOCAL_TIME_OPTION), commandLine
+ .hasOption(VERBOSE_OPTION));
+ }
+ else {
+ printCoordJobs(wc.getCoordJobsInfo(filter, start, len), commandLine.hasOption(LOCAL_TIME_OPTION),
+ commandLine.hasOption(VERBOSE_OPTION));
+ }
+
+ }
+ catch (OozieClientException ex) {
+ throw new OozieCLIException(ex.toString(), ex);
+ }
+ }
+
+ private void printCoordJobs(List jobs, boolean localtime, boolean verbose) throws IOException {
+ if (jobs != null && jobs.size() > 0) {
+ if (verbose) {
+ System.out.println("Job ID" + VERBOSE_DELIMITER + "App Name" + VERBOSE_DELIMITER + "App Path"
+ + VERBOSE_DELIMITER + "Console URL" + VERBOSE_DELIMITER + "User" + VERBOSE_DELIMITER + "Group"
+ + VERBOSE_DELIMITER + "Concurrency" + VERBOSE_DELIMITER + "Frequency" + VERBOSE_DELIMITER
+ + "Time Unit" + VERBOSE_DELIMITER + "Time Zone" + VERBOSE_DELIMITER + "Time Out"
+ + VERBOSE_DELIMITER + "Started" + VERBOSE_DELIMITER + "Next Materialize" + VERBOSE_DELIMITER
+ + "Status" + VERBOSE_DELIMITER + "Last Action" + VERBOSE_DELIMITER + "Ended");
+ System.out.println(RULER);
+
+ for (CoordinatorJob job : jobs) {
+ System.out.println(maskIfNull(job.getId()) + VERBOSE_DELIMITER + maskIfNull(job.getAppName())
+ + VERBOSE_DELIMITER + maskIfNull(job.getAppPath()) + VERBOSE_DELIMITER
+ + maskIfNull(job.getConsoleUrl()) + VERBOSE_DELIMITER + maskIfNull(job.getUser())
+ + VERBOSE_DELIMITER + maskIfNull(job.getGroup()) + VERBOSE_DELIMITER + job.getConcurrency()
+ + VERBOSE_DELIMITER + job.getFrequency() + VERBOSE_DELIMITER + job.getTimeUnit()
+ + VERBOSE_DELIMITER + maskIfNull(job.getTimeZone()) + VERBOSE_DELIMITER + job.getTimeout()
+ + VERBOSE_DELIMITER + maskDate(job.getStartTime(), localtime) + VERBOSE_DELIMITER
+ + maskDate(job.getNextMaterializedTime(), localtime) + VERBOSE_DELIMITER + job.getStatus()
+ + VERBOSE_DELIMITER + maskDate(job.getLastActionTime(), localtime) + VERBOSE_DELIMITER
+ + maskDate(job.getEndTime(), localtime));
+
+ System.out.println(RULER);
+ }
+ }
+ else {
+ System.out.println(String.format(COORD_JOBS_FORMATTER, "Job ID", "App Name", "Status", "Freq", "Unit",
+ "Started", "Next Materialized"));
+ System.out.println(RULER);
+
+ for (CoordinatorJob job : jobs) {
+ System.out.println(String.format(COORD_JOBS_FORMATTER, maskIfNull(job.getId()), maskIfNull(job
+ .getAppName()), job.getStatus(), job.getFrequency(), job.getTimeUnit(), maskDate(job
+ .getStartTime(), localtime), maskDate(job.getNextMaterializedTime(), localtime)));
+
+ System.out.println(RULER);
+ }
+ }
+ }
+ else {
+ System.out.println("No Jobs match your criteria!");
+ }
+ }
+ private void slaCommand(CommandLine commandLine) throws IOException, OozieCLIException {
+ OozieClient wc = createOozieClient(commandLine);
+ String s = commandLine.getOptionValue(OFFSET_OPTION);
+ int start = Integer.parseInt((s != null) ? s : "0");
+ s = commandLine.getOptionValue(LEN_OPTION);
+ int len = Integer.parseInt((s != null) ? s : "100");
try {
- printJobs(wc.getJobsInfo(filter, start, len), commandLine.hasOption(LOCAL_TIME_OPTION));
+ wc.getSlaInfo(start, len);
}
catch (OozieClientException ex) {
throw new OozieCLIException(ex.toString(), ex);
@@ -493,26 +809,33 @@ private void adminCommand(CommandLine commandLine) throws OozieCLIException {
}
try {
- boolean status = false;
+ SYSTEM_MODE status = SYSTEM_MODE.NORMAL;
if (options.contains(VERSION_OPTION)) {
System.out.println("Oozie server build version: " + wc.getServerBuildVersion());
}
else {
- if (options.contains(SAFEMODE_OPTION)) {
- String safeModeOption = commandLine.getOptionValue(SAFEMODE_OPTION);
+ if (options.contains(SYSTEM_MODE_OPTION)) {
+ String systemModeOption = commandLine.getOptionValue(SYSTEM_MODE_OPTION).toUpperCase();
try {
- status = safeModeOption.equalsIgnoreCase("ON");
+ // status = safeModeOption.equalsIgnoreCase("ON");
+ status = SYSTEM_MODE.valueOf(systemModeOption);
}
catch (Exception e) {
- throw new OozieCLIException("Invalid input provided for option: " + SAFEMODE_OPTION);
+ throw new OozieCLIException("Invalid input provided for option: " + SYSTEM_MODE_OPTION
+ + " value given :" + systemModeOption
+ + " Expected values are: NORMAL/NOWEBSERVICE/SAFEMODE ");
}
- wc.setSafeMode(status);
+ wc.setSystemMode(status);
}
- else if (options.contains(STATUS_OPTION)) {
- status = wc.isInSafeMode();
+ else {
+ if (options.contains(STATUS_OPTION)) {
+ // status = wc.isInSafeMode();
+ status = wc.getSystemMode();
+ }
}
- System.out.println("Safemode: " + (status ? "ON" : "OFF"));
+ // System.out.println("Safemode: " + (status ? "ON" : "OFF"));
+ System.out.println("System mode: " + status);
}
}
catch (OozieClientException ex) {
@@ -521,48 +844,69 @@ else if (options.contains(STATUS_OPTION)) {
}
private void versionCommand() throws OozieCLIException {
- System.out.println("Oozie client build version: " +
- BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VERSION));
+ System.out.println("Oozie client build version: "
+ + BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VERSION));
}
- private void printJobs(List jobs, boolean localtime) throws IOException {
- if(jobs!=null && jobs.size() > 0) {
- System.out.println(String.format(JOBS_FORMATTER, "Job Id", "Workflow Name", "Status", "Run", "User",
- "Group", "Created", "Started", "Last Modified", "Ended"));
- System.out.println(RULER);
+ private void printJobs(List jobs, boolean localtime, boolean verbose) throws IOException {
+ if (jobs != null && jobs.size() > 0) {
+ if (verbose) {
+ System.out.println("Job ID" + VERBOSE_DELIMITER + "App Name" + VERBOSE_DELIMITER + "App Path"
+ + VERBOSE_DELIMITER + "Console URL" + VERBOSE_DELIMITER + "User" + VERBOSE_DELIMITER + "Group"
+ + VERBOSE_DELIMITER + "Run" + VERBOSE_DELIMITER + "Created" + VERBOSE_DELIMITER + "Started"
+ + VERBOSE_DELIMITER + "Status" + VERBOSE_DELIMITER + "Last Modified" + VERBOSE_DELIMITER
+ + "Ended");
+ System.out.println(RULER);
+
+ for (WorkflowJob job : jobs) {
+ System.out.println(maskIfNull(job.getId()) + VERBOSE_DELIMITER + maskIfNull(job.getAppName())
+ + VERBOSE_DELIMITER + maskIfNull(job.getAppPath()) + VERBOSE_DELIMITER
+ + maskIfNull(job.getConsoleUrl()) + VERBOSE_DELIMITER + maskIfNull(job.getUser())
+ + VERBOSE_DELIMITER + maskIfNull(job.getGroup()) + VERBOSE_DELIMITER + job.getRun()
+ + VERBOSE_DELIMITER + maskDate(job.getCreatedTime(), localtime) + VERBOSE_DELIMITER
+ + maskDate(job.getStartTime(), localtime) + VERBOSE_DELIMITER + job.getStatus()
+ + VERBOSE_DELIMITER + maskDate(job.getLastModifiedTime(), localtime) + VERBOSE_DELIMITER
+ + maskDate(job.getEndTime(), localtime));
+
+ System.out.println(RULER);
+ }
+ }
+ else {
+ System.out.println(String.format(WORKFLOW_JOBS_FORMATTER, "Job ID", "App Name", "Status", "User",
+ "Group", "Started", "Ended"));
+ System.out.println(RULER);
- for (WorkflowJob job : jobs) {
+ for (WorkflowJob job : jobs) {
+ System.out.println(String.format(WORKFLOW_JOBS_FORMATTER, maskIfNull(job.getId()), maskIfNull(job
+ .getAppName()), job.getStatus(), maskIfNull(job.getUser()), maskIfNull(job.getGroup()),
+ maskDate(job.getStartTime(), localtime), maskDate(job.getEndTime(), localtime)));
- System.out.println(String.format(JOBS_FORMATTER,
- job.getId(), job.getAppName(), job.getStatus().toString(),
- job.getRun(), job.getUser(), job.getGroup(),
- maskDate(job.getCreatedTime(), localtime),
- maskDate(job.getStartTime(), localtime),
- maskDate(job.getLastModTime(), localtime),
- maskDate(job.getEndTime(), localtime)));
+ System.out.println(RULER);
+ }
}
- System.out.println(RULER);
- } else {
+ }
+ else {
System.out.println("No Jobs match your criteria!");
}
}
-
- private String maskIfNull(String value){
- if(value!=null && value.length()>0){
+
+ private String maskIfNull(String value) {
+ if (value != null && value.length() > 0) {
return value;
}
return "-";
}
-
- private String maskDate(Date date, boolean isLocalTimeZone){
- if(date==null){
+
+ private String maskDate(Date date, boolean isLocalTimeZone) {
+ if (date == null) {
return "-";
}
-
- SimpleDateFormat dateFormater = new SimpleDateFormat("yyyy-MM-dd HH:mm Z",
- Locale.US);
- if(!isLocalTimeZone){
- dateFormater.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+ // SimpleDateFormat dateFormater = new SimpleDateFormat("yyyy-MM-dd
+ // HH:mm Z", Locale.US);
+ SimpleDateFormat dateFormater = new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.US);
+ if (!isLocalTimeZone) {
+ dateFormater.setTimeZone(TimeZone.getTimeZone("GMT"));
}
return dateFormater.format(date);
}
@@ -576,8 +920,8 @@ private void validateCommand(CommandLine commandLine) throws OozieCLIException {
if (file.exists()) {
try {
List sources = new ArrayList();
- sources.add(new StreamSource(
- Thread.currentThread().getContextClassLoader().getResourceAsStream("oozie-workflow-0.1.xsd")));
+ sources.add(new StreamSource(Thread.currentThread().getContextClassLoader().getResourceAsStream(
+ "oozie-workflow-0.1.xsd")));
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = factory.newSchema(sources.toArray(new StreamSource[sources.size()]));
Validator validator = schema.newValidator();
@@ -592,5 +936,4 @@ private void validateCommand(CommandLine commandLine) throws OozieCLIException {
throw new OozieCLIException("File does not exists");
}
}
-
}
diff --git a/client/src/main/java/org/apache/oozie/client/CoordinatorAction.java b/client/src/main/java/org/apache/oozie/client/CoordinatorAction.java
new file mode 100644
index 000000000..2bdd6cf02
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/CoordinatorAction.java
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client;
+
+import java.util.List;
+import java.util.Date;
+
+/**
+ * Bean that represents an Oozie application instance.
+ */
+
+public interface CoordinatorAction {
+ /**
+ * Defines the possible stati of an application instance.
+ */
+ public static enum Status {
+ WAITING,
+ READY,
+ SUBMITTED,
+ RUNNING,
+ TIMEDOUT,
+ SUCCEEDED,
+ KILLED,
+ FAILED,
+ DISCARDED
+ }
+
+ /**
+ * Return the coordinator job ID.
+ *
+ * @return the coordinator job ID.
+ */
+ String getJobId();
+
+ /**
+ * Return the application instance ID.
+ *
+ * @return the application instance ID.
+ */
+ String getId();
+
+ /**
+ * Return the creation time for the application instance
+ *
+ * @return the creation time for the application instance
+ */
+ Date getCreatedTime();
+
+ /**
+ * Return the application instance ?? created configuration.
+ *
+ * @return the application instance configuration.
+ */
+ String getCreatedConf();
+
+
+ /**
+ * Return the last modified time
+ *
+ * @return the last modified time
+ */
+ Date getLastModifiedTime();
+
+ /**
+ * Return the action number
+ *
+ * @return the action number
+ */
+ int getActionNumber();
+
+ /**
+ * Return the run-time configuration
+ *
+ * @return the run-time configuration
+ */
+ String getRunConf();
+
+ /**
+ * Return the current status of the application instance.
+ *
+ * @return the current status of the application instance.
+ */
+ Status getStatus();
+
+ /**
+ * Return the missing dependencies for the particular action
+ *
+ * @return the missing dependencies for the particular action
+ */
+ String getMissingDependencies();
+
+
+ /**
+ * Return the external status of the application instance.
+ *
+ * @return the external status of the application instance.
+ */
+ String getExternalStatus();
+
+ /**
+ * Return the URL to programmatically track the status of the application instance.
+ *
+ * @return the URL to programmatically track the status of the application instance.
+ */
+ String getTrackerUri();
+
+ /**
+ * Return the URL to the web console of the system executing the application instance.
+ *
+ * @return the URL to the web console of the system executing the application instance.
+ */
+ String getConsoleUrl();
+
+ /**
+ * Return the error code of the application instance, if it ended in ERROR.
+ *
+ * @return the error code of the application instance.
+ */
+ String getErrorCode();
+
+ /**
+ * Return the error message of the application instance, if it ended in ERROR.
+ *
+ * @return the error message of the application instance.
+ */
+ String getErrorMessage();
+
+ void setErrorCode(String errorCode);
+
+ void setErrorMessage(String errorMessage);
+
+ String getExternalId();
+
+}
diff --git a/client/src/main/java/org/apache/oozie/client/CoordinatorJob.java b/client/src/main/java/org/apache/oozie/client/CoordinatorJob.java
new file mode 100644
index 000000000..71d3640fd
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/CoordinatorJob.java
@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client;
+
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Bean that represents an Oozie application.
+ */
+public interface CoordinatorJob {
+
+ /**
+ * Defines the possible stati of an Oozie application.
+ */
+ public static enum Status {
+ PREP, PREMATER, RUNNING, SUSPENDED, SUCCEEDED, KILLED, FAILED
+ }
+
+ /**
+ * Defines the possible execution order of an Oozie application.
+ */
+ public static enum Execution {
+ FIFO, LIFO, LAST_ONLY
+ }
+
+ /**
+ * Defines the possible frequency unit of an Oozie application.
+ */
+ public static enum Timeunit {
+ MINUTE, HOUR, DAY, WEEK, MONTH, END_OF_DAY, END_OF_MONTH, NONE
+ }
+
+ /**
+ * Return the path to the Oozie application.
+ *
+ * @return the path to the Oozie application.
+ */
+ String getAppPath();
+
+ /**
+ * Return the name of the Oozie application (from the application definition).
+ *
+ * @return the name of the Oozie application.
+ */
+ String getAppName();
+
+ /**
+ * Return the application ID.
+ *
+ * @return the application ID.
+ */
+ String getId();
+
+ /**
+ * Return the application configuration.
+ *
+ * @return the application configuration.
+ */
+ String getConf();
+
+ /**
+ * Return the application status.
+ *
+ * @return the application status.
+ */
+ Status getStatus();
+
+ /**
+ * Return the frequency for the coord job in unit of minute
+ *
+ * @return the frequency for the coord job in unit of minute
+ */
+ int getFrequency();
+
+ /**
+ * Return the timeUnit for the coord job, it could be, Timeunit enum, e.g. MINUTE, HOUR, DAY, WEEK or MONTH
+ *
+ * @return the time unit for the coord job
+ */
+ Timeunit getTimeUnit();
+
+ /**
+ * Return the time zone information for the coord job
+ *
+ * @return the time zone information for the coord job
+ */
+ String getTimeZone();
+
+ /**
+ * Return the concurrency for the coord job
+ *
+ * @return the concurrency for the coord job
+ */
+ int getConcurrency();
+
+ /**
+ * Return the execution order policy for the coord job
+ *
+ * @return the execution order policy for the coord job
+ */
+ Execution getExecutionOrder();
+
+ /**
+ * Return the time out value for the coord job
+ *
+ * @return the time out value for the coord job
+ */
+ int getTimeout();
+
+ /**
+ * Return the date for the last action of the coord job
+ *
+ * @return the date for the last action of the coord job
+ */
+ Date getLastActionTime();
+
+ /**
+ * Return the application next materialized time.
+ *
+ * @return the application next materialized time.
+ */
+ Date getNextMaterializedTime();
+
+ /**
+ * Return the application start time.
+ *
+ * @return the application start time.
+ */
+ Date getStartTime();
+
+ /**
+ * Return the application end time.
+ *
+ * @return the application end time.
+ */
+ Date getEndTime();
+
+ /**
+ * Return the application user owner.
+ *
+ * @return the application user owner.
+ */
+ String getUser();
+
+ /**
+ * Return the application group.
+ *
+ * @return the application group.
+ */
+ String getGroup();
+
+ /**
+ * Return the BundleId.
+ *
+ * @return the BundleId.
+ */
+ String getBundleId();
+
+ /**
+ * Return the application console URL.
+ *
+ * @return the application console URL.
+ */
+ String getConsoleUrl();
+
+ /**
+ * Return list of coordinator actions.
+ *
+ * @return the list of coordinator actions.
+ */
+ List getActions();
+}
diff --git a/client/src/main/java/org/apache/oozie/client/OozieClient.java b/client/src/main/java/org/apache/oozie/client/OozieClient.java
index 550662a5f..7c6f8fa1f 100644
--- a/client/src/main/java/org/apache/oozie/client/OozieClient.java
+++ b/client/src/main/java/org/apache/oozie/client/OozieClient.java
@@ -17,7 +17,10 @@
*/
package org.apache.oozie.client;
+import org.apache.oozie.client.rest.JsonCoordinatorAction;
+import org.apache.oozie.client.rest.JsonCoordinatorJob;
import org.apache.oozie.client.rest.JsonWorkflowJob;
+import org.apache.oozie.client.rest.JsonWorkflowAction;
import org.apache.oozie.client.rest.RestConstants;
import org.apache.oozie.client.rest.JsonTags;
import org.apache.oozie.BuildInfo;
@@ -32,6 +35,8 @@
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.parsers.DocumentBuilderFactory;
+
+import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
@@ -39,6 +44,8 @@
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -49,30 +56,19 @@
import java.util.concurrent.Callable;
/**
- * Client API to submit and manage Oozie workflow jobs against an Oozie intance.
- *
- * This class is thread safe.
- *
- * Syntax for filter for the {@link #getJobsInfo(String)} {@link #getJobsInfo(String, int, int)} methods:
- * [NAME=VALUE][;NAME=VALUE]*
.
- *
- * Valid filter names are:
- *
- *
- * name: the workflow application name from the workflow definition.
- * user: the user that submitted the job.
- * group: the group for the job.
- * status: the status of the job.
- *
- *
- * The query will do an AND among all the filter names.
- * The query will do an OR among all the filter values for the same name. Multiple values must be specified as
- * different name value pairs.
+ * Client API to submit and manage Oozie workflow jobs against an Oozie intance. This class is thread safe.
+ * Syntax for filter for the {@link #getJobsInfo(String)} {@link #getJobsInfo(String, int, int)} methods:
+ * [NAME=VALUE][;NAME=VALUE]*
. Valid filter names are: name: the workflow application
+ * name from the workflow definition. user: the user that submitted the job. group: the group for the
+ * job. status: the status of the job. The query will do an AND among all the filter names. The
+ * query will do an OR among all the filter values for the same name. Multiple values must be specified as different
+ * name value pairs.
*/
public class OozieClient {
- public static final long WS_PROTOCOL_VERSION = 0;
+ public static final long WS_PROTOCOL_VERSION_0 = 0;
+ public static final long WS_PROTOCOL_VERSION = 1;
public static final String USER_NAME = "user.name";
@@ -80,12 +76,16 @@ public class OozieClient {
public static final String APP_PATH = "oozie.wf.application.path";
+ public static final String COORDINATOR_APP_PATH = "oozie.coord.application.path";
+
public static final String EXTERNAL_ID = "oozie.wf.external.id";
public static final String WORKFLOW_NOTIFICATION_URL = "oozie.wf.workflow.notification.url";
public static final String ACTION_NOTIFICATION_URL = "oozie.wf.action.notification.url";
+ public static final String COORD_ACTION_NOTIFICATION_URL = "oozie.coord.action.notification.url";
+
public static final String RERUN_SKIP_NODES = "oozie.wf.rerun.skip.nodes";
public static final String LOG_TOKEN = "oozie.wf.log.token";
@@ -94,7 +94,6 @@ public class OozieClient {
public static final String ACTION_RETRY_INTERVAL = "oozie.wf.action.retry.interval";
-
public static final String FILTER_USER = "user";
public static final String FILTER_GROUP = "group";
@@ -103,7 +102,11 @@ public class OozieClient {
public static final String FILTER_STATUS = "status";
+ public static enum SYSTEM_MODE {
+ NORMAL, NOWEBSERVICE, SAFEMODE
+ }
+ ;
private String baseUrl;
private String protocolUrl;
@@ -143,9 +146,8 @@ public OozieClient(String oozieUrl) {
}
/**
- * Return the Oozie URL of the workflow client instance.
- *
- * This URL is the base URL fo the Oozie system, with not protocol versioning.
+ * Return the Oozie URL of the workflow client instance. This URL is the base URL fo the Oozie system, with not
+ * protocol versioning.
*
* @return the Oozie URL of the workflow client instance.
*/
@@ -154,9 +156,8 @@ public String getOozieUrl() {
}
/**
- * Return the Oozie URL used by the client and server for WS communications.
- *
- * This URL is the original URL plus the versioning element path.
+ * Return the Oozie URL used by the client and server for WS communications. This URL is the original URL plus
+ * the versioning element path.
*
* @return the Oozie URL used by the client and server for communication.
* @throws OozieClientException thrown in the client and the server are not protocol compatible.
@@ -178,10 +179,13 @@ public synchronized void validateWSVersion() throws OozieClientException {
HttpURLConnection conn = createConnection(url, "GET");
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
JSONArray array = (JSONArray) JSONValue.parse(new InputStreamReader(conn.getInputStream()));
- if (!array.contains(WS_PROTOCOL_VERSION)) {
+ if (array == null) {
+ throw new OozieClientException("HTTP error", "no response message");
+ }
+ if (!array.contains(WS_PROTOCOL_VERSION) && !array.contains(WS_PROTOCOL_VERSION_0)) {
StringBuilder msg = new StringBuilder();
- msg.append("Unsupported version [").append(WS_PROTOCOL_VERSION)
- .append("], supported versions[");
+ msg.append("Supported version [").append(WS_PROTOCOL_VERSION).append(
+ "] or less, Unsupported versions[");
String separator = "";
for (Object version : array) {
msg.append(separator).append(version);
@@ -189,6 +193,14 @@ public synchronized void validateWSVersion() throws OozieClientException {
msg.append("]");
throw new OozieClientException(OozieClientException.UNSUPPORTED_VERSION, msg.toString());
}
+ if (array.contains(WS_PROTOCOL_VERSION)) {
+ protocolUrl = baseUrl + "v" + WS_PROTOCOL_VERSION + "/";
+ }
+ else {
+ if (array.contains(WS_PROTOCOL_VERSION_0)) {
+ protocolUrl = baseUrl + "v" + WS_PROTOCOL_VERSION_0 + "/";
+ }
+ }
}
else {
handleError(conn);
@@ -197,7 +209,6 @@ public synchronized void validateWSVersion() throws OozieClientException {
catch (IOException ex) {
throw new OozieClientException(OozieClientException.IO_ERROR, ex);
}
- protocolUrl = baseUrl + "v" + WS_PROTOCOL_VERSION + "/";
validatedVersion = true;
}
}
@@ -251,8 +262,8 @@ public Iterator getHeaderNames() {
return Collections.unmodifiableMap(headers).keySet().iterator();
}
- private URL createURL(String collection, String resource, Map parameters)
- throws IOException, OozieClientException {
+ private URL createURL(String collection, String resource, Map parameters) throws IOException,
+ OozieClientException {
validateWSVersion();
StringBuilder sb = new StringBuilder();
sb.append(protocolUrl).append(collection);
@@ -263,8 +274,8 @@ private URL createURL(String collection, String resource, Map pa
String separator = "?";
for (Map.Entry param : parameters.entrySet()) {
if (param.getValue() != null) {
- sb.append(separator).append(URLEncoder.encode(param.getKey(), "UTF-8")).append("=")
- .append(URLEncoder.encode(param.getValue(), "UTF-8"));
+ sb.append(separator).append(URLEncoder.encode(param.getKey(), "UTF-8")).append("=").append(
+ URLEncoder.encode(param.getValue(), "UTF-8"));
separator = "&";
}
}
@@ -272,6 +283,17 @@ private URL createURL(String collection, String resource, Map pa
return new URL(sb.toString());
}
+ private boolean validateCommand(String url) {
+ {
+ if (protocolUrl.contains(baseUrl + "v0")) {
+ if (url.contains("dryrun") || url.contains("jobtype=c") || url.contains("systemmode")) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
private HttpURLConnection createConnection(URL url, String method) throws IOException, OozieClientException {
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(method);
@@ -300,8 +322,15 @@ public ClientCallable(String method, String collection, String resource, Map {
JobSubmit(Properties conf, boolean start) {
super("POST", RestConstants.JOBS, "", (start) ? prepareParams(RestConstants.ACTION_PARAM,
- RestConstants.JOB_ACTION_START)
- : prepareParams());
+ RestConstants.JOB_ACTION_START) : prepareParams());
this.conf = notNull(conf, "conf");
}
JobSubmit(String jobId, Properties conf) {
- super("PUT", RestConstants.JOB, notEmpty(jobId, "jobId"),
- prepareParams(RestConstants.ACTION_PARAM, RestConstants.JOB_ACTION_RERUN));
+ super("PUT", RestConstants.JOB, notEmpty(jobId, "jobId"), prepareParams(RestConstants.ACTION_PARAM,
+ RestConstants.JOB_ACTION_RERUN));
+ this.conf = notNull(conf, "conf");
+ }
+
+ public JobSubmit(Properties conf, String jobActionDryrun) {
+ super("POST", RestConstants.JOBS, "", prepareParams(RestConstants.ACTION_PARAM,
+ RestConstants.JOB_ACTION_DRYRUN));
this.conf = notNull(conf, "conf");
+ // TODO Auto-generated constructor stub
}
protected String call(HttpURLConnection conn) throws IOException, OozieClientException {
@@ -420,8 +455,7 @@ public String submit(Properties conf) throws OozieClientException {
private class JobAction extends ClientCallable {
JobAction(String jobId, String action) {
- super("PUT", RestConstants.JOB, notEmpty(jobId, "jobId"),
- prepareParams(RestConstants.ACTION_PARAM, action));
+ super("PUT", RestConstants.JOB, notEmpty(jobId, "jobId"), prepareParams(RestConstants.ACTION_PARAM, action));
}
protected Void call(HttpURLConnection conn) throws IOException, OozieClientException {
@@ -432,6 +466,15 @@ protected Void call(HttpURLConnection conn) throws IOException, OozieClientExcep
}
}
+ /**
+ * dryrun for a given job
+ *
+ * @param conf Job configuration.
+ */
+ public String dryrun(Properties conf) throws OozieClientException {
+ return new JobSubmit(conf, RestConstants.JOB_ACTION_DRYRUN).call();
+ }
+
/**
* Start a workflow job.
*
@@ -496,9 +539,10 @@ public void kill(String jobId) throws OozieClientException {
private class JobInfo extends ClientCallable {
- JobInfo(String jobId) {
- super("GET", RestConstants.JOB, notEmpty(jobId, "jobId"),
- prepareParams(RestConstants.JOB_SHOW_PARAM, RestConstants.JOB_SHOW_INFO));
+ JobInfo(String jobId, int start, int len) {
+ super("GET", RestConstants.JOB, notEmpty(jobId, "jobId"), prepareParams(RestConstants.JOB_SHOW_PARAM,
+ RestConstants.JOB_SHOW_INFO, RestConstants.OFFSET_PARAM, Integer.toString(start),
+ RestConstants.LEN_PARAM, Integer.toString(len)));
}
protected WorkflowJob call(HttpURLConnection conn) throws IOException, OozieClientException {
@@ -514,6 +558,25 @@ protected WorkflowJob call(HttpURLConnection conn) throws IOException, OozieClie
}
}
+ private class WorkflowActionInfo extends ClientCallable {
+ WorkflowActionInfo(String actionId) {
+ super("GET", RestConstants.JOB, notEmpty(actionId, "id"), prepareParams(RestConstants.JOB_SHOW_PARAM,
+ RestConstants.JOB_SHOW_INFO));
+ }
+
+ protected WorkflowAction call(HttpURLConnection conn) throws IOException, OozieClientException {
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+ Reader reader = new InputStreamReader(conn.getInputStream());
+ JSONObject json = (JSONObject) JSONValue.parse(reader);
+ return new JsonWorkflowAction(json);
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+ }
+
/**
* Get the info of a workflow job.
*
@@ -522,19 +585,205 @@ protected WorkflowJob call(HttpURLConnection conn) throws IOException, OozieClie
* @throws OozieClientException thrown if the job info could not be retrieved.
*/
public WorkflowJob getJobInfo(String jobId) throws OozieClientException {
- return new JobInfo(jobId).call();
+ return getJobInfo(jobId, 0, 0);
+ }
+
+ /**
+ * Get the info of a workflow job and subset actions.
+ *
+ * @param jobId job Id.
+ * @param start starting index in the list of actions belonging to the job
+ * @param len number of actions to be returned
+ * @return the job info.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public WorkflowJob getJobInfo(String jobId, int start, int len) throws OozieClientException {
+ return new JobInfo(jobId, start, len).call();
+ }
+
+ /**
+ * Get the info of a workflow action.
+ *
+ * @param actionId Id.
+ * @return the workflow action info.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public WorkflowAction getWorkflowActionInfo(String actionId) throws OozieClientException {
+ return new WorkflowActionInfo(actionId).call();
+ }
+
+ /**
+ * Get the log of a workflow job.
+ *
+ * @param jobId job Id.
+ * @return the job log.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public String getJobLog(String jobId) throws OozieClientException {
+ return new JobLog(jobId).call();
+ }
+
+ private class JobLog extends JobMetadata {
+
+ JobLog(String jobId) {
+ super(jobId, RestConstants.JOB_SHOW_LOG);
+ }
+ }
+
+ /**
+ * Get the definition of a workflow job.
+ *
+ * @param jobId job Id.
+ * @return the job log.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public String getJobDefinition(String jobId) throws OozieClientException {
+ return new JobDefinition(jobId).call();
+ }
+
+ private class JobDefinition extends JobMetadata {
+
+ JobDefinition(String jobId) {
+ super(jobId, RestConstants.JOB_SHOW_DEFINITION);
+ }
+ }
+
+ private class JobMetadata extends ClientCallable {
+
+ JobMetadata(String jobId, String metaType) {
+ super("GET", RestConstants.JOB, notEmpty(jobId, "jobId"), prepareParams(RestConstants.JOB_SHOW_PARAM,
+ metaType));
+ }
+
+ protected String call(HttpURLConnection conn) throws IOException, OozieClientException {
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+
+ String output = getReaderAsString(new InputStreamReader(conn.getInputStream()), -1);
+ return output;
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+
+ /**
+ * Return a reader as string.
+ *
+ * @param reader reader to read into a string.
+ * @param maxLen max content length allowed, if -1 there is no limit.
+ * @return the reader content.
+ * @throws IOException thrown if the resource could not be read.
+ */
+ private String getReaderAsString(Reader reader, int maxLen) throws IOException {
+ if (reader == null) {
+ throw new IllegalArgumentException("reader cannot be null");
+ }
+
+ StringBuffer sb = new StringBuffer();
+ char[] buffer = new char[2048];
+ int read;
+ int count = 0;
+ while ((read = reader.read(buffer)) > -1) {
+ count += read;
+
+ // read up to maxLen chars;
+ if ((maxLen > -1) && (count > maxLen)) {
+ break;
+ }
+ sb.append(buffer, 0, read);
+ }
+ reader.close();
+ return sb.toString();
+ }
+ }
+
+ private class CoordJobInfo extends ClientCallable {
+
+ CoordJobInfo(String jobId, int start, int len) {
+ super("GET", RestConstants.JOB, notEmpty(jobId, "jobId"), prepareParams(RestConstants.JOB_SHOW_PARAM,
+ RestConstants.JOB_SHOW_INFO, RestConstants.OFFSET_PARAM, Integer.toString(start),
+ RestConstants.LEN_PARAM, Integer.toString(len)));
+ }
+
+ protected CoordinatorJob call(HttpURLConnection conn) throws IOException, OozieClientException {
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+ Reader reader = new InputStreamReader(conn.getInputStream());
+ JSONObject json = (JSONObject) JSONValue.parse(reader);
+ return new JsonCoordinatorJob(json);
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+ }
+
+ private class CoordActionInfo extends ClientCallable {
+ CoordActionInfo(String actionId) {
+ super("GET", RestConstants.JOB, notEmpty(actionId, "id"), prepareParams(RestConstants.JOB_SHOW_PARAM,
+ RestConstants.JOB_SHOW_INFO));
+ }
+
+ protected CoordinatorAction call(HttpURLConnection conn) throws IOException, OozieClientException {
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+ Reader reader = new InputStreamReader(conn.getInputStream());
+ JSONObject json = (JSONObject) JSONValue.parse(reader);
+ return new JsonCoordinatorAction(json);
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+ }
+
+ /**
+ * Get the info of a coordinator job.
+ *
+ * @param jobId job Id.
+ * @return the job info.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public CoordinatorJob getCoordJobInfo(String jobId) throws OozieClientException {
+ return new CoordJobInfo(jobId, 0, 0).call();
+ }
+
+ /**
+ * Get the info of a coordinator job and subset actions.
+ *
+ * @param jobId job Id.
+ * @param start starting index in the list of actions belonging to the job
+ * @param len number of actions to be returned
+ * @return the job info.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public CoordinatorJob getCoordJobInfo(String jobId, int start, int len) throws OozieClientException {
+ return new CoordJobInfo(jobId, start, len).call();
+ }
+
+ /**
+ * Get the info of a coordinator action.
+ *
+ * @param actionId Id.
+ * @return the coordinator action info.
+ * @throws OozieClientException thrown if the job info could not be retrieved.
+ */
+ public CoordinatorAction getCoordActionInfo(String actionId) throws OozieClientException {
+ return new CoordActionInfo(actionId).call();
}
private class JobsStatus extends ClientCallable> {
JobsStatus(String filter, int start, int len) {
super("GET", RestConstants.JOBS, "", prepareParams(RestConstants.JOBS_FILTER_PARAM, filter,
- RestConstants.OFFSET_PARAM, Integer.toString(start),
+ RestConstants.JOBTYPE_PARAM, "wf", RestConstants.OFFSET_PARAM, Integer.toString(start),
RestConstants.LEN_PARAM, Integer.toString(len)));
}
@SuppressWarnings("unchecked")
protected List call(HttpURLConnection conn) throws IOException, OozieClientException {
+ conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
Reader reader = new InputStreamReader(conn.getInputStream());
JSONObject json = (JSONObject) JSONValue.parse(reader);
@@ -548,6 +797,30 @@ protected List call(HttpURLConnection conn) throws IOException, Ooz
}
}
+ private class CoordJobsStatus extends ClientCallable> {
+
+ CoordJobsStatus(String filter, int start, int len) {
+ super("GET", RestConstants.JOBS, "", prepareParams(RestConstants.JOBS_FILTER_PARAM, filter,
+ RestConstants.JOBTYPE_PARAM, "coord", RestConstants.OFFSET_PARAM, Integer.toString(start),
+ RestConstants.LEN_PARAM, Integer.toString(len)));
+ }
+
+ @SuppressWarnings("unchecked")
+ protected List call(HttpURLConnection conn) throws IOException, OozieClientException {
+ conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+ Reader reader = new InputStreamReader(conn.getInputStream());
+ JSONObject json = (JSONObject) JSONValue.parse(reader);
+ JSONArray jobs = (JSONArray) json.get(JsonTags.COORDINATOR_JOBS);
+ return JsonCoordinatorJob.fromJSONArray(jobs);
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+ }
+
/**
* Return the info of the workflow jobs that match the filter.
*
@@ -562,9 +835,8 @@ public List getJobsInfo(String filter, int start, int len) throws O
}
/**
- * Return the info of the workflow jobs that match the filter.
- *
- * It returns the first 100 jobs that match the filter.
+ * Return the info of the workflow jobs that match the filter. It returns the first 100 jobs that match the
+ * filter.
*
* @param filter job filter. Refer to the {@link OozieClient} for the filter syntax.
* @return a list with the workflow jobs info, without node details.
@@ -574,6 +846,42 @@ public List getJobsInfo(String filter) throws OozieClientException
return getJobsInfo(filter, 1, 50);
}
+ /**
+ * Print sla info about coordinator and workflow jobs and actions.
+ *
+ * @param start starting offset
+ * @param len number of results
+ * @return
+ * @throws OozieClientException
+ */
+ public void getSlaInfo(int start, int len) throws OozieClientException {
+ new SlaInfo(start, len).call();
+ }
+
+ private class SlaInfo extends ClientCallable {
+
+ SlaInfo(int start, int len) {
+ super("GET", RestConstants.SLA, "", prepareParams(RestConstants.SLA_GT_SEQUENCE_ID,
+ Integer.toString(start), RestConstants.MAX_EVENTS, Integer.toString(len)));
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Void call(HttpURLConnection conn) throws IOException, OozieClientException {
+ conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
+ if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+ BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+ String line = null;
+ while ((line = br.readLine()) != null) {
+ System.out.println(line);
+ }
+ }
+ else {
+ handleError(conn);
+ }
+ return null;
+ }
+ }
+
private class JobIdAction extends ClientCallable {
JobIdAction(String externalId) {
@@ -595,9 +903,7 @@ protected String call(HttpURLConnection conn) throws IOException, OozieClientExc
}
/**
- * Return the workflow job Id for an external Id.
- *
- * The external Id must have provided at job creation time.
+ * Return the workflow job Id for an external Id. The external Id must have provided at job creation time.
*
* @param externalId external Id given at job creation time.
* @return the workflow job Id for an external Id, null
if none.
@@ -607,13 +913,13 @@ public String getJobId(String externalId) throws OozieClientException {
return new JobIdAction(externalId).call();
}
- private class SetSafeMode extends ClientCallable{
+ private class SetSystemMode extends ClientCallable {
- public SetSafeMode(boolean status) {
- super("PUT", RestConstants.ADMIN, RestConstants.ADMIN_STATUS_RESOURCE,
- prepareParams(RestConstants.ADMIN_SAFE_MODE_PARAM, status+""));
+ public SetSystemMode(SYSTEM_MODE status) {
+ super("PUT", RestConstants.ADMIN, RestConstants.ADMIN_STATUS_RESOURCE, prepareParams(
+ RestConstants.ADMIN_SYSTEM_MODE_PARAM, status + ""));
}
-
+
public Void call(HttpURLConnection conn) throws IOException, OozieClientException {
if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
handleError(conn);
@@ -621,47 +927,49 @@ public Void call(HttpURLConnection conn) throws IOException, OozieClientExceptio
return null;
}
}
-
+
/**
- * Enable or disable safe mode. Used by OozieCLI.
- *
- * In safe mode, Oozie would not accept any commands except status command to
- * change and view the safe mode status.
- *
+ * Enable or disable safe mode. Used by OozieCLI. In safe mode, Oozie would not accept any commands except status
+ * command to change and view the safe mode status.
+ *
* @param status true to enable safe mode, false to disable safe mode.
* @throws OozieClientException if it fails to set the safe mode status.
*/
- public void setSafeMode(boolean status) throws OozieClientException {
- new SetSafeMode(status).call();
+ public void setSystemMode(SYSTEM_MODE status) throws OozieClientException {
+ new SetSystemMode(status).call();
}
- private class GetSafeMode extends ClientCallable {
+ private class GetSystemMode extends ClientCallable {
- GetSafeMode() {
+ GetSystemMode() {
super("GET", RestConstants.ADMIN, RestConstants.ADMIN_STATUS_RESOURCE, prepareParams());
}
- protected Boolean call(HttpURLConnection conn) throws IOException, OozieClientException {
+ protected SYSTEM_MODE call(HttpURLConnection conn) throws IOException, OozieClientException {
if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
Reader reader = new InputStreamReader(conn.getInputStream());
JSONObject json = (JSONObject) JSONValue.parse(reader);
- return (Boolean)json.get(JsonTags.SYSTEM_SAFE_MODE);
+ return SYSTEM_MODE.valueOf((String) json.get(JsonTags.OOZIE_SYSTEM_MODE));
}
else {
handleError(conn);
}
- return true;
+ return SYSTEM_MODE.NORMAL;
}
}
/**
* Returns if Oozie is in safe mode or not.
- *
- * @return true if safe mode is ON
false if safe mode is OFF
+ *
+ * @return true if safe mode is ON
false if safe mode is OFF
* @throws OozieClientException throw if it could not obtain the safe mode status.
*/
- public boolean isInSafeMode() throws OozieClientException {
- return new GetSafeMode().call();
+ /*
+ * public boolean isInSafeMode() throws OozieClientException { return new
+ * GetSafeMode().call(); }
+ */
+ public SYSTEM_MODE getSystemMode() throws OozieClientException {
+ return new GetSystemMode().call();
}
private class GetBuildVersion extends ClientCallable {
@@ -674,7 +982,7 @@ protected String call(HttpURLConnection conn) throws IOException, OozieClientExc
if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
Reader reader = new InputStreamReader(conn.getInputStream());
JSONObject json = (JSONObject) JSONValue.parse(reader);
- return (String)json.get(JsonTags.BUILD_VERSION);
+ return (String) json.get(JsonTags.BUILD_VERSION);
}
else {
handleError(conn);
@@ -702,4 +1010,16 @@ public String getClientBuildVersion() {
return BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VERSION);
}
+ /**
+ * Return the info of the coordinator jobs that match the filter.
+ *
+ * @param filter job filter. Refer to the {@link OozieClient} for the filter syntax.
+ * @param start jobs offset, base 1.
+ * @param len number of jobs to return.
+ * @return a list with the coordinator jobs info
+ * @throws OozieClientException thrown if the jobs info could not be retrieved.
+ */
+ public List getCoordJobsInfo(String filter, int start, int len) throws OozieClientException {
+ return new CoordJobsStatus(filter, start, len).call();
+ }
}
diff --git a/client/src/main/java/org/apache/oozie/client/OozieClientException.java b/client/src/main/java/org/apache/oozie/client/OozieClientException.java
index 9a551d757..6e40e0bb7 100644
--- a/client/src/main/java/org/apache/oozie/client/OozieClientException.java
+++ b/client/src/main/java/org/apache/oozie/client/OozieClientException.java
@@ -67,7 +67,7 @@ public OozieClientException(String errorCode, String message, Throwable cause) {
/**
* Return the exception error code.
*
- * @return the exception error code.
+ * @return the exception error code.
*/
public String getErrorCode() {
return errorCode;
diff --git a/client/src/main/java/org/apache/oozie/client/SLAEvent.java b/client/src/main/java/org/apache/oozie/client/SLAEvent.java
new file mode 100644
index 000000000..e955dde45
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/SLAEvent.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client;
+
+import java.util.Date;
+
+/**
+ * Bean that represents a SLA event
+ */
+public interface SLAEvent {
+
+ /**
+ * Defines the possible status of an SLA events or Job status for SLA events.
+ */
+ public static enum Status {
+ CREATED, STARTED, SUCCEEDED, KILLED, FAILED
+ }
+
+ /**
+ * Defines the possible status of an SLA events.
+ */
+ public static enum SlaAppType {
+ COORDINATOR_ACTION, COORDINATOR_JOB, WORKFLOW_JOB, WORKFLOW_ACTION
+ }
+
+ public long getEvent_id();
+
+ public String getSlaId();
+
+ public SlaAppType getAppType();
+
+ public String getAppName();
+
+ public String getUser();
+
+ public String getGroupName();
+
+ public String getParentClientId();
+
+ public String getParentSlaId();
+
+ public Date getExpectedStart();
+
+ public Date getExpectedEnd();
+
+ public Date getStatusTimestamp();
+
+ public String getNotificationMsg();
+
+ public String getAlertContact();
+
+ public String getDevContact();
+
+ public String getQaContact();
+
+ public String getSeContact();
+
+ public String getAlertFrequency();
+
+ public String getAlertPercentage();
+
+ public String getUpstreamApps();
+
+ public Status getJobStatus();
+
+ public String getJobData();
+
+}
diff --git a/client/src/main/java/org/apache/oozie/client/WorkflowAction.java b/client/src/main/java/org/apache/oozie/client/WorkflowAction.java
index 89f9fdfbf..921536604 100644
--- a/client/src/main/java/org/apache/oozie/client/WorkflowAction.java
+++ b/client/src/main/java/org/apache/oozie/client/WorkflowAction.java
@@ -38,7 +38,7 @@ public static enum Status {
END_RETRY,
END_MANUAL,
KILLED,
- FAILED,}
+ FAILED, }
/**
* Return the action action ID.
diff --git a/client/src/main/java/org/apache/oozie/client/WorkflowJob.java b/client/src/main/java/org/apache/oozie/client/WorkflowJob.java
index aba7f71b8..5778c8c3a 100644
--- a/client/src/main/java/org/apache/oozie/client/WorkflowJob.java
+++ b/client/src/main/java/org/apache/oozie/client/WorkflowJob.java
@@ -33,7 +33,7 @@ public static enum Status {
}
//add NAME
-
+
/**
* Return the path to the workflow application for the workflow job.
*
@@ -57,8 +57,8 @@ public static enum Status {
/**
* Return the job configuration.
- *
- * @return the job configuration.
+ *
+ * @return the job configuration.
*/
String getConf();
@@ -74,7 +74,7 @@ public static enum Status {
*
* @return the workflow job last modified time.
*/
- Date getLastModTime();
+ Date getLastModifiedTime();
/**
* Return the workflow job creation time.
@@ -112,9 +112,7 @@ public static enum Status {
String getGroup();
/**
- * Return the workflow job run number.
- *
- * Except for reruns, this property is always 1.
+ * Return the workflow job run number. Except for reruns, this property is always 1.
*
* @return the workflow job run number.
*/
@@ -130,7 +128,7 @@ public static enum Status {
/**
* Return the workflow nodes that already executed and are executing.
*
- * @return the workflow nodes that already executed and are executing.
+ * @return the workflow nodes that already executed and are executing.
*/
List getActions();
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonBean.java b/client/src/main/java/org/apache/oozie/client/rest/JsonBean.java
index b324e9dfb..9dab35b7d 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/JsonBean.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonBean.java
@@ -26,6 +26,7 @@ public interface JsonBean {
/**
* Return the JSONObject for the bean.
+ *
* @return the JSONObject for the bean.
*/
public JSONObject toJSONObject();
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorAction.java b/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorAction.java
new file mode 100644
index 000000000..55f7618c4
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorAction.java
@@ -0,0 +1,374 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client.rest;
+
+import java.util.List;
+
+import java.util.Date;
+
+import org.apache.oozie.client.CoordinatorAction;
+
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+import java.text.MessageFormat;
+import java.util.ArrayList;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "COORD_ACTIONS")
+@DiscriminatorColumn(name = "bean_type", discriminatorType = DiscriminatorType.STRING)
+public class JsonCoordinatorAction implements CoordinatorAction, JsonBean {
+
+ @Id
+ private String id;
+
+ @Transient
+ private String jobId;
+
+ @Basic
+ @Column(name = "job_type")
+ private String type;
+
+ @Transient
+ private Status status = CoordinatorAction.Status.WAITING;
+
+ @Basic
+ @Column(name = "action_number")
+ private int actionNumber;
+
+ @Transient
+ private Date createdTime;
+
+ @Column(name = "created_conf")
+ @Lob
+ private String createdConf;
+
+ @Transient
+ private String externalId;
+
+ @Basic
+ @Column(name = "time_out")
+ private int timeOut = 0;
+
+ @Transient
+ private Date lastModifiedTime;
+
+ @Transient
+ private Date nominalTime;
+
+ @Column(name = "run_conf")
+ @Lob
+ private String runConf;
+
+ @Column(name = "action_xml")
+ @Lob
+ private String actionXml;
+
+ @Column(name = "missing_dependencies")
+ @Lob
+ private String missingDependencies;
+
+ @Basic
+ @Column(name = "external_status")
+ private String externalStatus;
+
+ @Basic
+ @Column(name = "tracker_uri")
+ private String trackerUri;
+
+ @Basic
+ @Column(name = "console_url")
+ private String consoleUrl;
+
+ @Basic
+ @Column(name = "error_code")
+ private String errorCode;
+
+ @Basic
+ @Column(name = "error_message")
+ private String errorMessage;
+
+ public JsonCoordinatorAction() {
+
+ }
+
+ public JsonCoordinatorAction(JSONObject jsonObject) {
+ id = (String) jsonObject.get(JsonTags.COORDINATOR_ACTION_ID);
+ jobId = (String) jsonObject.get(JsonTags.COORDINATOR_JOB_ID);
+
+ type = (String) jsonObject.get(JsonTags.COORDINATOR_ACTION_TYPE);
+ actionNumber = (int) JsonUtils.getLongValue(jsonObject,
+ JsonTags.COORDINATOR_ACTION_NUMBER);
+ createdConf = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_CREATED_CONF);
+ createdTime = JsonUtils.parseDateRfc822((String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_CREATED_TIME));
+ externalId = (String) jsonObject.get(JsonTags.COORDINATOR_ACTION_EXTERNALID);
+ status = Status.valueOf((String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_STATUS));
+ lastModifiedTime = JsonUtils.parseDateRfc822((String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_LAST_MODIFIED_TIME));
+ /*
+ * startTime = JsonUtils.parseDateRfc822((String) jsonObject
+ * .get(JsonTags.COORDINATOR_ACTION_START_TIME)); endTime =
+ * JsonUtils.parseDateRfc822((String) jsonObject
+ * .get(JsonTags.COORDINATOR_ACTION_END_TIME));
+ */
+ runConf = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_RUNTIME_CONF);
+ missingDependencies = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_MISSING_DEPS);
+ externalStatus = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_EXTERNAL_STATUS);
+ trackerUri = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_TRACKER_URI);
+ consoleUrl = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_CONSOLE_URL);
+ errorCode = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_ERROR_CODE);
+ errorMessage = (String) jsonObject
+ .get(JsonTags.COORDINATOR_ACTION_ERROR_MESSAGE);
+ }
+
+ @SuppressWarnings("unchecked")
+ public JSONObject toJSONObject() {
+ JSONObject json = new JSONObject();
+ json.put(JsonTags.COORDINATOR_ACTION_ID, id);
+ json.put(JsonTags.COORDINATOR_JOB_ID, jobId);
+ json.put(JsonTags.COORDINATOR_ACTION_TYPE, type);
+ json.put(JsonTags.COORDINATOR_ACTION_NUMBER, actionNumber);
+ json.put(JsonTags.COORDINATOR_ACTION_CREATED_CONF, createdConf);
+ json.put(JsonTags.COORDINATOR_ACTION_CREATED_TIME, JsonUtils
+ .formatDateRfc822(createdTime));
+ json.put(JsonTags.COORDINATOR_ACTION_EXTERNALID, externalId);
+ // json.put(JsonTags.COORDINATOR_ACTION_START_TIME, JsonUtils
+ // .formatDateRfc822(startTime));
+ json.put(JsonTags.COORDINATOR_ACTION_STATUS, status.toString());
+ json.put(JsonTags.COORDINATOR_ACTION_RUNTIME_CONF, runConf);
+ json.put(JsonTags.COORDINATOR_ACTION_LAST_MODIFIED_TIME, JsonUtils
+ .formatDateRfc822(lastModifiedTime));
+ // json.put(JsonTags.COORDINATOR_ACTION_START_TIME, JsonUtils
+ // .formatDateRfc822(startTime));
+ // json.put(JsonTags.COORDINATOR_ACTION_END_TIME, JsonUtils
+ // .formatDateRfc822(endTime));
+ json.put(JsonTags.COORDINATOR_ACTION_MISSING_DEPS, missingDependencies);
+ json.put(JsonTags.COORDINATOR_ACTION_EXTERNAL_STATUS, externalStatus);
+ json.put(JsonTags.COORDINATOR_ACTION_TRACKER_URI, trackerUri);
+ json.put(JsonTags.COORDINATOR_ACTION_CONSOLE_URL, consoleUrl);
+ json.put(JsonTags.COORDINATOR_ACTION_ERROR_CODE, errorCode);
+ json.put(JsonTags.COORDINATOR_ACTION_ERROR_MESSAGE, errorMessage);
+ return json;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(String id) {
+ this.jobId = id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getExternalId() {
+ return externalId;
+ }
+
+ public void setExternalId(String extId) {
+ this.externalId = extId;
+ }
+
+
+ public void setActionNumber(int actionNumber) {
+ this.actionNumber = actionNumber;
+ }
+
+ public int getActionNumber() {
+ return actionNumber;
+ }
+
+ public String getCreatedConf() {
+ return createdConf;
+ }
+
+ public void setCreatedConf(String createdConf) {
+ this.createdConf = createdConf;
+ }
+
+ public void setCreatedTime(Date createdTime) {
+ this.createdTime = createdTime;
+ }
+
+ public Date getCreatedTime() {
+ return createdTime;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public void setStatus(Status status) {
+ this.status = status;
+ }
+
+ public void setLastModifiedTime(Date lastModifiedTime) {
+ this.lastModifiedTime = lastModifiedTime;
+ }
+
+ public Date getLastModifiedTime() {
+ return lastModifiedTime;
+ }
+
+ public void setRunConf(String runConf) {
+ this.runConf = runConf;
+ }
+
+ public String getRunConf() {
+ return runConf;
+ }
+
+ public void setMissingDependencies(String missingDependencies) {
+ this.missingDependencies = missingDependencies;
+ }
+
+ public String getMissingDependencies() {
+ return missingDependencies;
+ }
+
+ public String getExternalStatus() {
+ return externalStatus;
+ }
+
+ public void setExternalStatus(String externalStatus) {
+ this.externalStatus = externalStatus;
+ }
+
+ public String getTrackerUri() {
+ return trackerUri;
+ }
+
+ public void setTrackerUri(String trackerUri) {
+ this.trackerUri = trackerUri;
+ }
+
+ public String getConsoleUrl() {
+ return consoleUrl;
+ }
+
+ public void setConsoleUrl(String consoleUrl) {
+ this.consoleUrl = consoleUrl;
+ }
+
+ public String getErrorCode() {
+ return errorCode;
+ }
+
+ public String getErrorMessage() {
+ return errorMessage;
+ }
+
+ public void setErrorInfo(String errorCode, String errorMessage) {
+ this.errorCode = errorCode;
+ this.errorMessage = errorMessage;
+ }
+
+ public String getActionXml() {
+ return actionXml;
+ }
+
+ public void setActionXml(String actionXml) {
+ this.actionXml = actionXml;
+ }
+
+ public String toString() {
+ return MessageFormat.format("WorkflowAction name[{0}] status[{1}]",
+ getId(), getStatus());
+ }
+
+ public Date getNominalTime() {
+ return nominalTime;
+ }
+
+ public void setNominalTime(Date nominalTime) {
+ this.nominalTime = nominalTime;
+ }
+
+ public int getTimeOut() {
+ return timeOut;
+ }
+
+ public void setTimeOut(int timeOut) {
+ this.timeOut = timeOut;
+ }
+
+
+ public void setErrorCode(String errorCode) {
+ this.errorCode = errorCode;
+ }
+
+ public void setErrorMessage(String errorMessage) {
+ this.errorMessage = errorMessage;
+ }
+
+ /**
+ * Convert a nodes list into a JSONArray.
+ *
+ * @param nodes nodes list.
+ * @return the corresponding JSON array.
+ */
+ @SuppressWarnings("unchecked")
+ public static JSONArray toJSONArray(
+ List extends JsonCoordinatorAction> actions) {
+ JSONArray array = new JSONArray();
+ for (JsonCoordinatorAction action : actions) {
+ array.add(action.toJSONObject());
+ }
+ return array;
+ }
+
+ /**
+ * Convert a JSONArray into a nodes list.
+ *
+ * @param array JSON array.
+ * @return the corresponding nodes list.
+ */
+ @SuppressWarnings("unchecked")
+ public static List fromJSONArray(JSONArray array) {
+ List list = new ArrayList();
+ for (Object obj : array) {
+ list.add(new JsonCoordinatorAction((JSONObject) obj));
+ }
+ return list;
+ }
+}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorJob.java b/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorJob.java
new file mode 100644
index 000000000..962d0db84
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonCoordinatorJob.java
@@ -0,0 +1,392 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client.rest;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.CoordinatorAction;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+import java.text.MessageFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.ArrayList;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "COORD_JOBS")
+@DiscriminatorColumn(name = "bean_type", discriminatorType = DiscriminatorType.STRING)
+public class JsonCoordinatorJob implements CoordinatorJob, JsonBean {
+
+ @Id
+ private String id;
+
+ @Basic
+ @Column(name = "app_path")
+ private String appPath = null;
+
+ @Basic
+ @Column(name = "app_name")
+ private String appName = null;
+
+ @Basic
+ @Column(name = "external_id")
+ private String externalId = null;
+
+ @Column(name = "conf")
+ @Lob
+ private String conf = null;
+
+ @Transient
+ private Status status = CoordinatorJob.Status.PREP;
+
+ @Transient
+ private Execution executionOrder = CoordinatorJob.Execution.LIFO;
+
+ @Transient
+ private Date startTime;
+
+ @Transient
+ private Date endTime;
+
+ @Basic
+ @Column(name = "frequency")
+ private int frequency = 0;
+
+ @Basic
+ @Column(name = "time_zone")
+ private String timeZone = null;
+
+ @Basic
+ @Column(name = "concurrency")
+ private int concurrency = 0;
+
+ @Transient
+ private Timeunit timeUnit = CoordinatorJob.Timeunit.MINUTE;
+
+ @Basic
+ @Column(name = "time_out")
+ private int timeOut = 0;
+
+ @Transient
+ private Date lastAction;
+
+ @Basic
+ @Column(name = "last_action_number")
+ private int lastActionNumber;
+
+ @Transient
+ private Date nextMaterializedTime;
+
+ @Basic
+ @Column(name = "user_name")
+ private String user = null;
+
+ @Basic
+ @Column(name = "group_name")
+ private String group = null;
+
+ @Basic
+ @Column(name = "bundle_id")
+ private String bundleId = null;
+
+ @Transient
+ private String consoleUrl;
+
+ @Transient
+ private List extends JsonCoordinatorAction> actions;
+
+ public JsonCoordinatorJob() {
+ actions = new ArrayList();
+ }
+
+ public JsonCoordinatorJob(JSONObject json) {
+ appPath = (String) json.get(JsonTags.COORDINATOR_JOB_PATH);
+ appName = (String) json.get(JsonTags.COORDINATOR_JOB_NAME);
+ id = (String) json.get(JsonTags.COORDINATOR_JOB_ID);
+ externalId = (String) json.get(JsonTags.COORDINATOR_JOB_EXTERNAL_ID);
+ conf = (String) json.get(JsonTags.COORDINATOR_JOB_CONF);
+ status = Status.valueOf((String) json.get(JsonTags.COORDINATOR_JOB_STATUS));
+ executionOrder = Execution.valueOf((String) json.get(JsonTags.COORDINATOR_JOB_EXECUTIONPOLICY));
+ startTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.COORDINATOR_JOB_START_TIME));
+ endTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.COORDINATOR_JOB_END_TIME));
+ frequency = (int) JsonUtils.getLongValue(json, JsonTags.COORDINATOR_JOB_FREQUENCY);
+ timeUnit = Timeunit.valueOf((String) json.get(JsonTags.COORDINATOR_JOB_TIMEUNIT));
+ timeZone = (String) json.get(JsonTags.COORDINATOR_JOB_TIMEZONE);
+ concurrency = (int) JsonUtils.getLongValue(json, JsonTags.COORDINATOR_JOB_CONCURRENCY);
+ timeOut = (int) JsonUtils.getLongValue(json, JsonTags.COORDINATOR_JOB_TIMEOUT);
+ lastAction = JsonUtils.parseDateRfc822((String) json.get(JsonTags.COORDINATOR_JOB_LAST_ACTION_TIME));
+ nextMaterializedTime = JsonUtils.parseDateRfc822((String) json
+ .get(JsonTags.COORDINATOR_JOB_NEXT_MATERIALIZED_TIME));
+ user = (String) json.get(JsonTags.COORDINATOR_JOB_USER);
+ group = (String) json.get(JsonTags.COORDINATOR_JOB_GROUP);
+ consoleUrl = (String) json.get(JsonTags.COORDINATOR_JOB_CONSOLE_URL);
+ actions = JsonCoordinatorAction.fromJSONArray((JSONArray) json.get(JsonTags.COORDINATOR_ACTIONS));
+ }
+
+ @SuppressWarnings("unchecked")
+ public JSONObject toJSONObject() {
+ JSONObject json = new JSONObject();
+ json.put(JsonTags.COORDINATOR_JOB_PATH, appPath);
+ json.put(JsonTags.COORDINATOR_JOB_NAME, appName);
+ json.put(JsonTags.COORDINATOR_JOB_ID, id);
+ json.put(JsonTags.COORDINATOR_JOB_EXTERNAL_ID, externalId);
+ json.put(JsonTags.COORDINATOR_JOB_CONF, conf);
+ json.put(JsonTags.COORDINATOR_JOB_STATUS, status.toString());
+ json.put(JsonTags.COORDINATOR_JOB_EXECUTIONPOLICY, executionOrder.toString());
+ json.put(JsonTags.COORDINATOR_JOB_FREQUENCY, frequency);
+ json.put(JsonTags.COORDINATOR_JOB_TIMEUNIT, timeUnit.toString());
+ json.put(JsonTags.COORDINATOR_JOB_TIMEZONE, timeZone);
+ json.put(JsonTags.COORDINATOR_JOB_CONCURRENCY, concurrency);
+ json.put(JsonTags.COORDINATOR_JOB_TIMEOUT, timeOut);
+ json.put(JsonTags.COORDINATOR_JOB_LAST_ACTION_TIME, JsonUtils.formatDateRfc822(lastAction));
+ json.put(JsonTags.COORDINATOR_JOB_NEXT_MATERIALIZED_TIME, JsonUtils.formatDateRfc822(nextMaterializedTime));
+ json.put(JsonTags.COORDINATOR_JOB_START_TIME, JsonUtils.formatDateRfc822(startTime));
+ json.put(JsonTags.COORDINATOR_JOB_END_TIME, JsonUtils.formatDateRfc822(endTime));
+ json.put(JsonTags.COORDINATOR_JOB_USER, user);
+ json.put(JsonTags.COORDINATOR_JOB_GROUP, group);
+ json.put(JsonTags.COORDINATOR_JOB_CONSOLE_URL, consoleUrl);
+ json.put(JsonTags.COORDINATOR_ACTIONS, JsonCoordinatorAction.toJSONArray(actions));
+
+ return json;
+ }
+
+ public String getAppPath() {
+ return appPath;
+ }
+
+ public void setAppPath(String appPath) {
+ this.appPath = appPath;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ public void setAppName(String appName) {
+ this.appName = appName;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public void setExternalId(String externalId) {
+ this.externalId = externalId;
+ }
+
+ public String getExternalId() {
+ return externalId;
+ }
+
+ public String getConf() {
+ return conf;
+ }
+
+ public void setConf(String conf) {
+ this.conf = conf;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public void setStatus(Status status) {
+ this.status = status;
+ }
+
+ public void setFrequency(int frequency) {
+ this.frequency = frequency;
+ }
+
+ public int getFrequency() {
+ return frequency;
+ }
+
+ public void setTimeUnit(Timeunit timeUnit) {
+ this.timeUnit = timeUnit;
+ }
+
+ public Timeunit getTimeUnit() {
+ return timeUnit;
+ }
+
+ public void setTimeZone(String timeZone) {
+ this.timeZone = timeZone;
+ }
+
+ public String getTimeZone() {
+ return timeZone;
+ }
+
+ public void setConcurrency(int concurrency) {
+ this.concurrency = concurrency;
+ }
+
+ public int getConcurrency() {
+ return concurrency;
+ }
+
+ public void setExecutionOrder(Execution order) {
+ this.executionOrder = order;
+ }
+
+ public Execution getExecutionOrder() {
+ return executionOrder;
+ }
+
+ public void setTimeout(int timeOut) {
+ this.timeOut = timeOut;
+ }
+
+ public int getTimeout() {
+ return timeOut;
+ }
+
+ public void setLastActionTime(Date lastAction) {
+ this.lastAction = lastAction;
+ }
+
+ public Date getLastActionTime() {
+ return lastAction;
+ }
+
+ public Date getNextMaterializedTime() {
+ return nextMaterializedTime;
+ }
+
+ public void setNextMaterializedTime(Date nextMaterializedTime) {
+ this.nextMaterializedTime = nextMaterializedTime;
+ }
+
+ public Date getStartTime() {
+ return startTime;
+ }
+
+ public void setStartTime(Date startTime) {
+ this.startTime = startTime;
+ }
+
+ public Date getEndTime() {
+ return endTime;
+ }
+
+ public void setEndTime(Date endTime) {
+ this.endTime = endTime;
+ }
+
+ public String getUser() {
+ return user;
+ }
+
+ public void setUser(String user) {
+ this.user = user;
+ }
+
+ public String getGroup() {
+ return group;
+ }
+
+ public void setGroup(String group) {
+ this.group = group;
+ }
+
+ public String getBundleId() {
+ return bundleId;
+ }
+
+ public void setBundleId(String bundleId) {
+ this.bundleId = bundleId;
+ }
+
+ /**
+ * Return the coordinate application console URL.
+ *
+ * @return the coordinate application console URL.
+ */
+ public String getConsoleUrl() {
+ return consoleUrl;
+ }
+
+ /**
+ * Set the coordinate application console URL.
+ *
+ * @param consoleUrl the coordinate application console URL.
+ */
+ public void setConsoleUrl(String consoleUrl) {
+ this.consoleUrl = consoleUrl;
+ }
+
+ public String toString() {
+ return MessageFormat.format("Coornidator application id[{0}] status[{1}]", getId(), getStatus());
+ }
+
+ public void setActions(List extends JsonCoordinatorAction> nodes) {
+ this.actions = (nodes != null) ? nodes : new ArrayList();
+ }
+
+ @SuppressWarnings("unchecked")
+ public List getActions() {
+ return (List) actions;
+ }
+
+ /**
+ * Convert a coordinator application list into a JSONArray.
+ *
+ * @param application list.
+ * @return the corresponding JSON array.
+ */
+ @SuppressWarnings("unchecked")
+ public static JSONArray toJSONArray(List extends JsonCoordinatorJob> applications) {
+ JSONArray array = new JSONArray();
+ if (applications != null) {
+ for (JsonCoordinatorJob application : applications) {
+ array.add(application.toJSONObject());
+ }
+ }
+ return array;
+ }
+
+ /**
+ * Convert a JSONArray into a application list.
+ *
+ * @param array JSON array.
+ * @return the corresponding application list.
+ */
+ @SuppressWarnings("unchecked")
+ public static List fromJSONArray(JSONArray applications) {
+ List list = new ArrayList();
+ for (Object obj : applications) {
+ list.add(new JsonCoordinatorJob((JSONObject) obj));
+ }
+ return list;
+ }
+
+ public int getLastActionNumber() {
+ return lastActionNumber;
+ }
+
+ public void setLastActionNumber(int lastActionNumber) {
+ this.lastActionNumber = lastActionNumber;
+ }
+}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonSLAEvent.java b/client/src/main/java/org/apache/oozie/client/rest/JsonSLAEvent.java
new file mode 100644
index 000000000..fbee9a21b
--- /dev/null
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonSLAEvent.java
@@ -0,0 +1,311 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client.rest;
+
+import java.util.Date;
+
+import javax.persistence.Basic;
+import javax.persistence.Column;
+import javax.persistence.DiscriminatorColumn;
+import javax.persistence.DiscriminatorType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+import javax.persistence.Transient;
+
+import org.apache.oozie.client.SLAEvent;
+import org.json.simple.JSONObject;
+
+@Entity
+@Table(name = "SLA_EVENTS")
+@DiscriminatorColumn(name = "bean_type", discriminatorType = DiscriminatorType.STRING)
+public class JsonSLAEvent implements SLAEvent, JsonBean {
+ // Primary key
+ @Id
+ @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "EVENT_SEQ")
+ @SequenceGenerator(name = "EVENT_SEQ", sequenceName = "EVENT_SEQ", allocationSize = 50)
+ private long event_id;
+
+ @Basic
+ @Column(name = "sla_id")
+ private String slaId;
+
+ @Transient
+ private SlaAppType appType = null;
+
+ @Basic
+ @Column(name = "app_name")
+ private String appName = null;
+
+ @Basic
+ @Column(name = "user_name")
+ private String user = null;
+
+ @Basic
+ @Column(name = "group_name")
+ private String groupName = null;
+
+ @Basic
+ @Column(name = "parent_client_id")
+ private String parentClientId = null;
+
+ @Basic
+ @Column(name = "parent_sla_id")
+ private String parentSlaId = null;
+
+ @Transient
+ private Date expectedStart = null;
+
+ @Transient
+ private Date expectedEnd = null;
+
+ @Transient
+ private Date statusTimestamp = null;
+
+ @Column(name = "notification_msg")
+ @Lob
+ private String notificationMsg = null;
+
+ @Basic
+ @Column(name = "alert_contact")
+ private String alertContact = null;
+
+ @Basic
+ @Column(name = "dev_contact")
+ private String devContact = null;
+
+ @Basic
+ @Column(name = "qa_contact")
+ private String qaContact = null;
+
+ @Basic
+ @Column(name = "se_contact")
+ private String seContact = null;
+
+ @Basic
+ @Column(name = "alert_frequency")
+ private String alertFrequency = null;
+
+ @Basic
+ @Column(name = "alert_percentage")
+ private String alertPercentage = null;
+
+ @Column(name = "upstream_apps")
+ @Lob
+ private String upstreamApps = null;
+
+ @Transient
+ private Status jobStatus = null;
+
+ @Column(name = "job_data")
+ @Lob
+ private String jobData = null;
+
+ public long getEvent_id() {
+ return event_id;
+ }
+
+ public void setEvent_id(long id) {
+ this.event_id = id;
+ }
+
+ public String getSlaId() {
+ return slaId;
+ }
+
+ public void setSlaId(String slaId) {
+ this.slaId = slaId;
+ }
+
+ /*
+ * public String getClientId() { return clientId; }
+ *
+ * public void setClientId(String clientId) { this.clientId = clientId; }
+ */
+ public SlaAppType getAppType() {
+ return appType;
+ }
+
+ public void setAppType(SlaAppType appType) {
+ this.appType = appType;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ public void setAppName(String appName) {
+ this.appName = appName;
+ }
+
+ public String getUser() {
+ return user;
+ }
+
+ public void setUser(String user) {
+ this.user = user;
+ }
+
+ public String getGroupName() {
+ return groupName;
+ }
+
+ public void setGroupName(String groupName) {
+ this.groupName = groupName;
+ }
+
+ public String getParentClientId() {
+ return parentClientId;
+ }
+
+ public void setParentClientId(String parentClientId) {
+ this.parentClientId = parentClientId;
+ }
+
+ public String getParentSlaId() {
+ return parentSlaId;
+ }
+
+ public void setParentSlaId(String parentSlaId) {
+ this.parentSlaId = parentSlaId;
+ }
+
+ public Date getExpectedStart() {
+ return expectedStart;
+ }
+
+ public void setExpectedStart(Date expectedStart) {
+ this.expectedStart = expectedStart;
+ }
+
+ public Date getExpectedEnd() {
+ return expectedEnd;
+ }
+
+ public void setExpectedEnd(Date expectedEnd) {
+ this.expectedEnd = expectedEnd;
+ }
+
+ public Date getStatusTimestamp() {
+ return statusTimestamp;
+ }
+
+ public void setStatusTimestamp(Date statusTimestamp) {
+ this.statusTimestamp = statusTimestamp;
+ }
+
+ public String getNotificationMsg() {
+ return notificationMsg;
+ }
+
+ public void setNotificationMsg(String notificationMsg) {
+ this.notificationMsg = notificationMsg;
+ }
+
+ public String getAlertContact() {
+ return alertContact;
+ }
+
+ public void setAlertContact(String alertContact) {
+ this.alertContact = alertContact;
+ }
+
+ public String getDevContact() {
+ return devContact;
+ }
+
+ public void setDevContact(String devContact) {
+ this.devContact = devContact;
+ }
+
+ public String getQaContact() {
+ return qaContact;
+ }
+
+ public void setQaContact(String qaContact) {
+ this.qaContact = qaContact;
+ }
+
+ public String getSeContact() {
+ return seContact;
+ }
+
+ public void setSeContact(String seContact) {
+ this.seContact = seContact;
+ }
+
+ public String getAlertFrequency() {
+ return alertFrequency;
+ }
+
+ public void setAlertFrequency(String alertFrequency) {
+ this.alertFrequency = alertFrequency;
+ }
+
+ public String getAlertPercentage() {
+ return alertPercentage;
+ }
+
+ public void setAlertPercentage(String alertPercentage) {
+ this.alertPercentage = alertPercentage;
+ }
+
+ public String getUpstreamApps() {
+ return upstreamApps;
+ }
+
+ public void setUpstreamApps(String upstreamApps) {
+ this.upstreamApps = upstreamApps;
+ }
+
+ public Status getJobStatus() {
+ return jobStatus;
+ }
+
+ public void setJobStatus(Status jobStatus) {
+ this.jobStatus = jobStatus;
+ }
+
+ public String getJobData() {
+ return jobData;
+ }
+
+ public void setJobData(String jobData) {
+ this.jobData = jobData;
+ }
+
+ @Override
+ public JSONObject toJSONObject() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public JsonSLAEvent() {
+
+ }
+
+ @SuppressWarnings("unchecked")
+ public JsonSLAEvent(JSONObject json) {
+
+ }
+
+}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonTags.java b/client/src/main/java/org/apache/oozie/client/rest/JsonTags.java
index 7465b9411..be53934af 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/JsonTags.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonTags.java
@@ -22,7 +22,8 @@
*/
public interface JsonTags {
- public static final String SYSTEM_SAFE_MODE = "safeMode";
+ public static final String OOZIE_SAFE_MODE = "safeMode"; //Applicable for V0 only
+ public static final String OOZIE_SYSTEM_MODE = "systemMode";
public static final String BUILD_VERSION = "buildVersion";
public static final String JOB_ID = "id";
@@ -48,22 +49,66 @@ public interface JsonTags {
public static final String WORKFLOWS_OFFSET = "offset";
public static final String WORKFLOWS_LEN = "len";
- public static final String ACTION_ID = "id";
- public static final String ACTION_NAME = "name";
- public static final String ACTION_TYPE = "type";
- public static final String ACTION_CONF = "conf";
- public static final String ACTION_RETRIES = "retries";
- public static final String ACTION_START_TIME = "startTime";
- public static final String ACTION_END_TIME = "endTime";
- public static final String ACTION_STATUS = "status";
- public static final String ACTION_TRANSITION = "transition";
- public static final String ACTION_DATA = "data";
- public static final String ACTION_EXTERNAL_ID = "externalId";
- public static final String ACTION_EXTERNAL_STATUS = "externalStatus";
- public static final String ACTION_TRACKER_URI = "trackerUri";
- public static final String ACTION_CONSOLE_URL = "consoleUrl";
- public static final String ACTION_ERROR_CODE = "errorCode";
- public static final String ACTION_ERROR_MESSAGE = "errorMessage";
+ public static final String WORKFLOW_ACTION_ID = "id";
+ public static final String WORKFLOW_ACTION_NAME = "name";
+ public static final String WORKFLOW_ACTION_TYPE = "type";
+ public static final String WORKFLOW_ACTION_CONF = "conf";
+ public static final String WORKFLOW_ACTION_RETRIES = "retries";
+ public static final String WORKFLOW_ACTION_START_TIME = "startTime";
+ public static final String WORKFLOW_ACTION_END_TIME = "endTime";
+ public static final String WORKFLOW_ACTION_STATUS = "status";
+ public static final String WORKFLOW_ACTION_TRANSITION = "transition";
+ public static final String WORKFLOW_ACTION_DATA = "data";
+ public static final String WORKFLOW_ACTION_EXTERNAL_ID = "externalId";
+ public static final String WORKFLOW_ACTION_EXTERNAL_STATUS = "externalStatus";
+ public static final String WORKFLOW_ACTION_TRACKER_URI = "trackerUri";
+ public static final String WORKFLOW_ACTION_CONSOLE_URL = "consoleUrl";
+ public static final String WORKFLOW_ACTION_ERROR_CODE = "errorCode";
+ public static final String WORKFLOW_ACTION_ERROR_MESSAGE = "errorMessage";
+
+
+ public static final String COORDINATOR_JOB_ID = "coordJobId";
+ public static final String COORDINATOR_JOB_NAME = "coordJobName";
+ public static final String COORDINATOR_JOB_PATH = "coordJobPath";
+ public static final String COORDINATOR_JOB_FREQUENCY = "frequency";
+ public static final String COORDINATOR_JOB_TIMEUNIT = "timeUnit";
+ public static final String COORDINATOR_JOB_TIMEZONE = "timeZone";
+ public static final String COORDINATOR_JOB_CONCURRENCY = "concurrency";
+ public static final String COORDINATOR_JOB_EXECUTION = "execution";
+ public static final String COORDINATOR_JOB_TIMEOUT = "timeOut";
+ public static final String COORDINATOR_JOB_LAST_ACTION_TIME = "lastAction";
+ public static final String COORDINATOR_JOB_NEXT_MATERIALIZED_TIME = "nextMaterializedTime";
+ public static final String COORDINATOR_JOB_CONF = "conf";
+ public static final String COORDINATOR_JOB_STATUS = "status";
+ public static final String COORDINATOR_JOB_EXECUTIONPOLICY = "executionPolicy";
+ public static final String COORDINATOR_JOB_START_TIME = "startTime";
+ public static final String COORDINATOR_JOB_END_TIME = "endTime";
+ public static final String COORDINATOR_JOB_CONSOLE_URL = "consoleUrl";
+ public static final String COORDINATOR_JOB_ACTIONS = "actions";
+ public static final String COORDINATOR_JOB_USER = "user";
+ public static final String COORDINATOR_JOB_GROUP = "group";
+ public static final String COORDINATOR_JOB_EXTERNAL_ID = "coordExternalId";
+
+ public static final String COORDINATOR_ACTION_ID = "id";
+ public static final String COORDINATOR_ACTION_NAME = "name";
+ public static final String COORDINATOR_ACTION_TYPE = "type";
+ public static final String COORDINATOR_ACTION_CREATED_CONF = "createdConf";
+ public static final String COORDINATOR_ACTION_RUNTIME_CONF = "runConf";
+ public static final String COORDINATOR_ACTION_NUMBER = "actionNumber";
+ public static final String COORDINATOR_ACTION_CREATED_TIME = "createdTime";
+ public static final String COORDINATOR_ACTION_EXTERNALID = "externalId";
+ public static final String COORDINATOR_ACTION_LAST_MODIFIED_TIME = "lastModifiedTime";
+ public static final String COORDINATOR_ACTION_NOMINAL_TIME = "nominalTime";
+ public static final String COORDINATOR_ACTION_STATUS = "status";
+ public static final String COORDINATOR_ACTION_MISSING_DEPS = "missingDependencies";
+ public static final String COORDINATOR_ACTION_EXTERNAL_STATUS = "externalStatus";
+ public static final String COORDINATOR_ACTION_TRACKER_URI = "trackerUri";
+ public static final String COORDINATOR_ACTION_CONSOLE_URL = "consoleUrl";
+ public static final String COORDINATOR_ACTION_ERROR_CODE = "errorCode";
+ public static final String COORDINATOR_ACTION_ERROR_MESSAGE = "errorMessage";
+ public static final String COORDINATOR_ACTIONS = "actions";
+ public static final String COORDINATOR_ACTION_DATA = "data";
+ public static final String COORDINATOR_JOB_DATA = "data";
public static final String ERROR = "error";
public static final String ERROR_CODE = "code";
@@ -91,5 +136,9 @@ public interface JsonTags {
public static final String INSTR_VARIABLE_VALUE = "value";
public static final String INSTR_SAMPLER_VALUE = "value";
+ public static final Object COORDINATOR_JOBS = "coordinatorjobs";
+ public static final Object COORD_JOB_TOTAL = "total";
+ public static final Object COORD_JOB_OFFSET = "offset";
+ public static final Object COORD_JOB_LEN = "len";
-}
\ No newline at end of file
+}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonUtils.java b/client/src/main/java/org/apache/oozie/client/rest/JsonUtils.java
index a6202c240..184c07c97 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/JsonUtils.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonUtils.java
@@ -18,12 +18,16 @@
package org.apache.oozie.client.rest;
import org.json.simple.JSONObject;
+import org.json.simple.JSONArray;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
+import java.util.List;
+import java.util.ArrayList;
+
/**
* Json utils methods.
@@ -77,4 +81,23 @@ public static long getLongValue(JSONObject map, String name) {
return (l != null) ? l : 0;
}
+ /**
+ * Return a List value from a JSONObject.
+ *
+ * @param map JSON object.
+ * @param name name of the property.
+ * @return the List value associated with it, or null if not defined.
+ */
+ public static List getListString(JSONObject json, String name) {
+ ArrayList values = new ArrayList();
+ JSONArray array = (JSONArray) json.get(name);
+ if (array == null) {
+ return null;
+ }
+
+ for (Object o : array) {
+ values.add((String) o);
+ }
+ return values;
+ }
}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowAction.java b/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowAction.java
index efcdd9ede..b43dbbcb4 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowAction.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowAction.java
@@ -26,69 +26,119 @@
import java.util.Date;
import java.util.List;
+import javax.persistence.*;
+
/**
* Json Bean that represents an Oozie workflow node.
*/
+@Entity
+@Table(name = "WF_ACTIONS")
+@DiscriminatorColumn(name = "bean_type", discriminatorType = DiscriminatorType.STRING)
+
public class JsonWorkflowAction implements WorkflowAction, JsonBean {
+ @Id
private String id;
- private String name;
- private String type;
- private String conf;
+
+ @Basic
+ @Column(name = "name")
+ private String name = null;
+
+ @Basic
+ @Column(name = "type")
+ private String type = null;
+
+ @Basic
+ @Column(name = "conf")
+ @Lob
+ private String conf = null;
+
+ @Transient
private Status status = WorkflowAction.Status.PREP;
+
+ @Basic
+ @Column(name = "retries")
private int retries;
+
+ @Transient
private Date startTime;
+
+ @Transient
private Date endTime;
- private String transition;
- private String data;
- private String externalId;
- private String externalStatus;
- private String trackerUri;
- private String consoleUrl;
- private String errorCode;
- private String errorMessage;
+
+ @Basic
+ @Column(name = "transition")
+ private String transition = null;
+
+ @Column(name = "data")
+ @Lob
+ private String data = null;
+
+ @Basic
+ @Column(name = "external_id")
+ private String externalId = null;
+
+ @Basic
+ @Column(name = "external_status")
+ private String externalStatus = null;
+
+ @Basic
+ @Column(name = "tracker_uri")
+ private String trackerUri = null;
+
+ @Basic
+ @Column(name = "console_url")
+ private String consoleUrl = null;
+
+ @Basic
+ @Column(name = "error_code")
+ private String errorCode = null;
+
+ @Column(name = "error_message")
+ @Lob
+ private String errorMessage = null;
public JsonWorkflowAction() {
}
public JsonWorkflowAction(JSONObject jsonObject) {
- id = (String) jsonObject.get(JsonTags.ACTION_ID);
- name = (String) jsonObject.get(JsonTags.ACTION_NAME);
- type = (String) jsonObject.get(JsonTags.ACTION_TYPE);
- conf = (String) jsonObject.get(JsonTags.ACTION_CONF);
- status = Status.valueOf((String) jsonObject.get(JsonTags.ACTION_STATUS));
- retries = (int) JsonUtils.getLongValue(jsonObject, JsonTags.ACTION_RETRIES);
- startTime = JsonUtils.parseDateRfc822((String) jsonObject.get(JsonTags.ACTION_START_TIME));
- endTime = JsonUtils.parseDateRfc822((String) jsonObject.get(JsonTags.ACTION_END_TIME));
- transition = (String) jsonObject.get(JsonTags.ACTION_TRANSITION);
- data = (String) jsonObject.get(JsonTags.ACTION_DATA);
- externalId = (String) jsonObject.get(JsonTags.ACTION_EXTERNAL_ID);
- externalStatus = (String) jsonObject.get(JsonTags.ACTION_EXTERNAL_STATUS);
- trackerUri = (String) jsonObject.get(JsonTags.ACTION_TRACKER_URI);
- consoleUrl = (String) jsonObject.get(JsonTags.ACTION_CONSOLE_URL);
- errorCode = (String) jsonObject.get(JsonTags.ACTION_ERROR_CODE);
- errorMessage = (String) jsonObject.get(JsonTags.ACTION_ERROR_MESSAGE);
+ id = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_ID);
+ name = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_NAME);
+ type = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_TYPE);
+ conf = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_CONF);
+ status = Status.valueOf((String) jsonObject.get(JsonTags.WORKFLOW_ACTION_STATUS));
+ retries = (int) JsonUtils.getLongValue(jsonObject, JsonTags.WORKFLOW_ACTION_RETRIES);
+ startTime = JsonUtils.parseDateRfc822((String) jsonObject.get(JsonTags.WORKFLOW_ACTION_START_TIME));
+ endTime = JsonUtils.parseDateRfc822((String) jsonObject.get(JsonTags.WORKFLOW_ACTION_END_TIME));
+ transition = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_TRANSITION);
+ data = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_DATA);
+ externalId = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_EXTERNAL_ID);
+ externalStatus = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_EXTERNAL_STATUS);
+ trackerUri = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_TRACKER_URI);
+ consoleUrl = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_CONSOLE_URL);
+ errorCode = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_ERROR_CODE);
+ errorMessage = (String) jsonObject.get(JsonTags.WORKFLOW_ACTION_ERROR_MESSAGE);
}
@SuppressWarnings("unchecked")
public JSONObject toJSONObject() {
JSONObject json = new JSONObject();
- json.put(JsonTags.ACTION_ID, id);
- json.put(JsonTags.ACTION_NAME, name);
- json.put(JsonTags.ACTION_TYPE, type);
- json.put(JsonTags.ACTION_CONF, conf);
- json.put(JsonTags.ACTION_START_TIME, JsonUtils.formatDateRfc822(startTime));
- json.put(JsonTags.ACTION_STATUS, status.toString());
- json.put(JsonTags.ACTION_RETRIES, (long) retries);
- json.put(JsonTags.ACTION_START_TIME, JsonUtils.formatDateRfc822(startTime));
- json.put(JsonTags.ACTION_END_TIME, JsonUtils.formatDateRfc822(endTime));
- json.put(JsonTags.ACTION_TRANSITION, transition);
- json.put(JsonTags.ACTION_DATA, data);
- json.put(JsonTags.ACTION_EXTERNAL_ID, externalId);
- json.put(JsonTags.ACTION_EXTERNAL_STATUS, externalStatus);
- json.put(JsonTags.ACTION_TRACKER_URI, trackerUri);
- json.put(JsonTags.ACTION_CONSOLE_URL, consoleUrl);
- json.put(JsonTags.ACTION_ERROR_CODE, errorCode);
- json.put(JsonTags.ACTION_ERROR_MESSAGE, errorMessage);
+ json.put(JsonTags.WORKFLOW_ACTION_ID, id);
+ json.put(JsonTags.WORKFLOW_ACTION_NAME, name);
+ json.put(JsonTags.WORKFLOW_ACTION_TYPE, type);
+ json.put(JsonTags.WORKFLOW_ACTION_CONF, conf);
+ json.put(JsonTags.WORKFLOW_ACTION_START_TIME, JsonUtils.formatDateRfc822(startTime));
+ json.put(JsonTags.WORKFLOW_ACTION_STATUS, status.toString());
+ json.put(JsonTags.WORKFLOW_ACTION_RETRIES, (long) retries);
+ json.put(JsonTags.WORKFLOW_ACTION_START_TIME, JsonUtils.formatDateRfc822(startTime));
+ json.put(JsonTags.WORKFLOW_ACTION_END_TIME, JsonUtils.formatDateRfc822(endTime));
+ json.put(JsonTags.WORKFLOW_ACTION_TRANSITION, transition);
+ json.put(JsonTags.WORKFLOW_ACTION_DATA, data);
+ json.put(JsonTags.WORKFLOW_ACTION_EXTERNAL_ID, externalId);
+ json.put(JsonTags.WORKFLOW_ACTION_EXTERNAL_STATUS, externalStatus);
+ json.put(JsonTags.WORKFLOW_ACTION_TRACKER_URI, trackerUri);
+ json.put(JsonTags.WORKFLOW_ACTION_CONSOLE_URL, consoleUrl);
+ json.put(JsonTags.WORKFLOW_ACTION_ERROR_CODE, errorCode);
+ json.put(JsonTags.WORKFLOW_ACTION_ERROR_MESSAGE, errorMessage);
return json;
}
@@ -216,7 +266,7 @@ public void setErrorInfo(String errorCode, String errorMessage) {
this.errorCode = errorCode;
this.errorMessage = errorMessage;
}
-
+
public String toString() {
return MessageFormat.format("Action name[{0}] status[{1}]", getName(), getStatus());
}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowJob.java b/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowJob.java
index 6158d47d4..29c8169bc 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowJob.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/JsonWorkflowJob.java
@@ -27,24 +27,67 @@
import java.util.Date;
import java.util.List;
+import javax.persistence.*;
+
/**
* Json Bean that represents an Oozie workflow job.
*/
+
+@Entity
+@Table(name = "WF_JOBS")
+@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
+@DiscriminatorColumn(name = "bean_type", discriminatorType = DiscriminatorType.STRING)
public class JsonWorkflowJob implements WorkflowJob, JsonBean {
- private String appPath;
- private String appName;
+
+ @Id
private String id;
- private String externalId;
- private String conf;
+
+ @Basic
+ @Column(name = "app_name")
+ private String appName = null;
+
+ @Basic
+ @Column(name = "app_path")
+ private String appPath = null;
+
+ @Transient
+ private String externalId = null;
+
+ @Column(name = "conf")
+ @Lob
+ private String conf = null;
+
+ @Transient
private Status status = WorkflowJob.Status.PREP;
+
+ @Transient
private Date createdTime;
+
+ @Transient
private Date startTime;
+
+ @Transient
private Date endTime;
- private Date lastModTime;
- private String user;
+
+ @Transient
+ private Date lastModifiedTime;
+
+ @Basic
+ @Column(name = "user_name")
+ private String user = null;
+
+ @Basic
+ @Column(name = "group_name")
private String group;
+
+ @Basic
+ @Column(name = "run")
private int run = 1;
+
+ @Transient
private String consoleUrl;
+
+ @Transient
private List extends JsonWorkflowAction> actions;
public JsonWorkflowJob() {
@@ -59,7 +102,7 @@ public JsonWorkflowJob(JSONObject json) {
externalId = (String) json.get(JsonTags.WORKFLOW_EXTERNAL_ID);
conf = (String) json.get(JsonTags.WORKFLOW_CONF);
status = Status.valueOf((String) json.get(JsonTags.WORKFLOW_STATUS));
- lastModTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.WORKFLOW_LAST_MOD_TIME));
+ lastModifiedTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.WORKFLOW_LAST_MOD_TIME));
createdTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.WORKFLOW_CREATED_TIME));
startTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.WORKFLOW_START_TIME));
endTime = JsonUtils.parseDateRfc822((String) json.get(JsonTags.WORKFLOW_END_TIME));
@@ -79,7 +122,7 @@ public JSONObject toJSONObject() {
json.put(JsonTags.WORKFLOW_EXTERNAL_ID, externalId);
json.put(JsonTags.WORKFLOW_CONF, conf);
json.put(JsonTags.WORKFLOW_STATUS, status.toString());
- json.put(JsonTags.WORKFLOW_LAST_MOD_TIME, JsonUtils.formatDateRfc822(lastModTime));
+ json.put(JsonTags.WORKFLOW_LAST_MOD_TIME, JsonUtils.formatDateRfc822(lastModifiedTime));
json.put(JsonTags.WORKFLOW_CREATED_TIME, JsonUtils.formatDateRfc822(createdTime));
json.put(JsonTags.WORKFLOW_START_TIME, JsonUtils.formatDateRfc822(startTime));
json.put(JsonTags.WORKFLOW_END_TIME, JsonUtils.formatDateRfc822(endTime));
@@ -139,12 +182,12 @@ public void setStatus(Status status) {
this.status = status;
}
- public Date getLastModTime() {
- return lastModTime;
+ public Date getLastModifiedTime() {
+ return lastModifiedTime;
}
- public void setLastModTime(Date lastModTime) {
- this.lastModTime = lastModTime;
+ public void setLastModifiedTime(Date lastModTime) {
+ this.lastModifiedTime = lastModTime;
}
public Date getCreatedTime() {
@@ -235,10 +278,10 @@ public String toString() {
@SuppressWarnings("unchecked")
public static JSONArray toJSONArray(List extends JsonWorkflowJob> workflows) {
JSONArray array = new JSONArray();
- if(workflows!=null){
- for (JsonWorkflowJob node : workflows) {
- array.add(node.toJSONObject());
- }
+ if (workflows != null) {
+ for (JsonWorkflowJob node : workflows) {
+ array.add(node.toJSONObject());
+ }
}
return array;
}
@@ -258,4 +301,4 @@ public static List fromJSONArray(JSONArray array) {
return list;
}
-}
\ No newline at end of file
+}
diff --git a/client/src/main/java/org/apache/oozie/client/rest/RestConstants.java b/client/src/main/java/org/apache/oozie/client/rest/RestConstants.java
index f73e96efa..7bbf38926 100644
--- a/client/src/main/java/org/apache/oozie/client/rest/RestConstants.java
+++ b/client/src/main/java/org/apache/oozie/client/rest/RestConstants.java
@@ -35,7 +35,7 @@ public interface RestConstants {
public static final String XML_CONTENT_TYPE = "application/xml";
public static final String FORM_CONTENT_TYPE = "application/x-www-form-urlencoded";
-
+
public static final String TEXT_CONTENT_TYPE = "text/plain";
public static final String ACTION_PARAM = "action";
@@ -48,6 +48,8 @@ public interface RestConstants {
public static final String JOB_ACTION_START = "start";
+ public static final String JOB_ACTION_DRYRUN = "dryrun";
+
public static final String JOB_ACTION_SUSPEND = "suspend";
public static final String JOB_ACTION_RESUME = "resume";
@@ -58,7 +60,6 @@ public interface RestConstants {
public static final String JOB_SHOW_PARAM = "show";
-
public static final String JOB_SHOW_CONFIG = "config";
public static final String JOB_SHOW_INFO = "info";
@@ -67,7 +68,6 @@ public interface RestConstants {
public static final String JOB_SHOW_DEFINITION = "definition";
-
public static final String JOBS_FILTER_PARAM = "filter";
public static final String JOBS_EXTERNAL_ID_PARAM = "external-id";
@@ -76,6 +76,8 @@ public interface RestConstants {
public static final String ADMIN_SAFE_MODE_PARAM = "safemode";
+ public static final String ADMIN_SYSTEM_MODE_PARAM = "systemmode";
+
public static final String ADMIN_LOG_RESOURCE = "log";
public static final String ADMIN_OS_ENV_RESOURCE = "os-env";
@@ -89,6 +91,14 @@ public interface RestConstants {
public static final String ADMIN_BUILD_VERSION_RESOURCE = "build-version";
public static final String OOZIE_ERROR_CODE = "oozie-error-code";
-
+
public static final String OOZIE_ERROR_MESSAGE = "oozie-error-message";
+
+ public static final String JOBTYPE_PARAM = "jobtype";
+
+ public static final String SLA_GT_SEQUENCE_ID = "gt-sequence-id";
+
+ public static final String MAX_EVENTS = "max-events";
+
+ public static final String SLA = "sla";
}
diff --git a/client/src/main/resources/META-INF/persistence.xml b/client/src/main/resources/META-INF/persistence.xml
new file mode 100644
index 000000000..bc4c73ee3
--- /dev/null
+++ b/client/src/main/resources/META-INF/persistence.xml
@@ -0,0 +1,123 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.oozie.WorkflowActionBean
+ org.apache.oozie.WorkflowJobBean
+ org.apache.oozie.CoordinatorJobBean
+ org.apache.oozie.CoordinatorActionBean
+ org.apache.oozie.SLAEventBean
+ org.apache.oozie.client.rest.JsonWorkflowJob
+ org.apache.oozie.client.rest.JsonWorkflowAction
+ org.apache.oozie.client.rest.JsonCoordinatorJob
+ org.apache.oozie.client.rest.JsonCoordinatorAction
+ org.apache.oozie.client.rest.JsonSLAEvent
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/main/resources/gms-oozie-sla-0.1.xsd b/client/src/main/resources/gms-oozie-sla-0.1.xsd
new file mode 100644
index 000000000..5e637e941
--- /dev/null
+++ b/client/src/main/resources/gms-oozie-sla-0.1.xsd
@@ -0,0 +1,61 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/main/resources/oozie-coordinator-0.1.xsd b/client/src/main/resources/oozie-coordinator-0.1.xsd
new file mode 100644
index 000000000..c85d7491e
--- /dev/null
+++ b/client/src/main/resources/oozie-coordinator-0.1.xsd
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/main/resources/oozie-sla-0.1.xsd b/client/src/main/resources/oozie-sla-0.1.xsd
new file mode 100644
index 000000000..85c194ad6
--- /dev/null
+++ b/client/src/main/resources/oozie-sla-0.1.xsd
@@ -0,0 +1,46 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/main/resources/oozie-workflow-0.1.xsd b/client/src/main/resources/oozie-workflow-0.1.xsd
index 572112aa5..a802f6124 100644
--- a/client/src/main/resources/oozie-workflow-0.1.xsd
+++ b/client/src/main/resources/oozie-workflow-0.1.xsd
@@ -1,23 +1,6 @@
-
+ elementFormDefault="qualified" targetNamespace="uri:oozie:workflow:0.1">
@@ -293,9 +276,9 @@
-
-
-
+
+
+
\ No newline at end of file
diff --git a/client/src/main/resources/oozie-workflow-0.2.xsd b/client/src/main/resources/oozie-workflow-0.2.xsd
new file mode 100644
index 000000000..f906e9e64
--- /dev/null
+++ b/client/src/main/resources/oozie-workflow-0.2.xsd
@@ -0,0 +1,246 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/client/src/test/java/org/apache/oozie/cli/TestCLIParser.java b/client/src/test/java/org/apache/oozie/cli/TestCLIParser.java
index 5d0d4888d..88a14c562 100644
--- a/client/src/test/java/org/apache/oozie/cli/TestCLIParser.java
+++ b/client/src/test/java/org/apache/oozie/cli/TestCLIParser.java
@@ -25,7 +25,7 @@ public class TestCLIParser extends TestCase {
public void testEmptyParser() throws Exception {
try {
- CLIParser parser = new CLIParser("oozie", new String[] {});
+ CLIParser parser = new CLIParser("oozie", new String[]{});
CLIParser.Command c = parser.parse(new String[]{"a"});
fail();
}
@@ -36,7 +36,7 @@ public void testEmptyParser() throws Exception {
public void testCommandParser() throws Exception {
try {
- CLIParser parser = new CLIParser("oozie", new String[] {});
+ CLIParser parser = new CLIParser("oozie", new String[]{});
parser.addCommand("a", "", "AAAAA", new Options(), false);
CLIParser.Command c = parser.parse(new String[]{"a", "b"});
assertEquals("a", c.getName());
diff --git a/client/src/test/java/org/apache/oozie/cli/TestValidation.java b/client/src/test/java/org/apache/oozie/cli/TestValidation.java
index e9e6f60e0..0beb3fcfa 100644
--- a/client/src/test/java/org/apache/oozie/cli/TestValidation.java
+++ b/client/src/test/java/org/apache/oozie/cli/TestValidation.java
@@ -31,6 +31,7 @@ private String getPath(String resource) throws Exception {
File file = new File(uri.getPath());
return file.getAbsolutePath();
}
+
public void testValid() throws Exception {
String[] args = new String[]{"validate", getPath("valid.xml")};
assertEquals(0, new OozieCLI().run(args));
diff --git a/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorAction.java b/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorAction.java
new file mode 100644
index 000000000..c026befae
--- /dev/null
+++ b/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorAction.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client.rest;
+
+import junit.framework.TestCase;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.rest.JsonCoordinatorAction;
+import org.apache.oozie.client.rest.JsonUtils;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+public class TestJsonCoordinatorAction extends TestCase {
+
+
+ static String START_TIME = "Fri, 04 Sep 2009 00:00:00 GMT";
+ static String END_TIME = "Sat, 05 Sep 2009 00:00:00 GMT";
+ static String CREATE_TIME = "Sat, 05 Sep 2009 00:00:00 GMT";
+ static String LAST_MODIFIED_TIME = "Sat, 05 Sep 2009 00:00:00 GMT";
+ //static List missingDependencies = Arrays.asList("a:a", "a/a", "a//a");
+ static String missingDependencies = "a:a, a/a, a//a";
+
+ static JsonCoordinatorAction createAppAction() {
+ JsonCoordinatorAction app = new JsonCoordinatorAction();
+ app.setJobId("a");
+ app.setId("c");
+ app.setActionNumber(1);
+ app.setRunConf("cc");
+ app.setCreatedConf("cc");
+ app.setExternalId("c_e");
+ app.setCreatedTime(JsonUtils.parseDateRfc822(CREATE_TIME));
+ app.setLastModifiedTime(JsonUtils.parseDateRfc822(LAST_MODIFIED_TIME));
+ app.setStatus(CoordinatorAction.Status.WAITING);
+ //app.setStartTime(JsonUtils.parseDateRfc822(START_TIME));
+ //app.setEndTime(JsonUtils.parseDateRfc822(END_TIME));
+ app.setConsoleUrl("http://consoleurl:8080");
+ app.setMissingDependencies(missingDependencies);
+ return app;
+ }
+
+ public void testProperties() {
+ JsonCoordinatorAction app = createAppAction();
+ assertEquals("a", app.getJobId());
+ assertEquals("c", app.getId());
+ assertEquals(1, app.getActionNumber());
+ assertEquals("cc", app.getRunConf());
+ assertEquals("cc", app.getCreatedConf());
+ assertEquals("c_e", app.getExternalId());
+ assertEquals(JsonUtils.parseDateRfc822(CREATE_TIME), app.getCreatedTime());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_MODIFIED_TIME), app.getLastModifiedTime());
+ assertEquals(CoordinatorAction.Status.WAITING, app.getStatus());
+ //assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ //assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("http://consoleurl:8080", app.getConsoleUrl());
+ assertEquals(missingDependencies, app.getMissingDependencies());
+ //assertEquals(3, app.getMissingDependencies().size());
+
+ }
+
+ public void testJsonAndBack() throws Exception {
+ JsonCoordinatorAction app = createAppAction();
+ StringWriter sw = new StringWriter();
+ app.toJSONObject().writeJSONString(sw);
+ sw.close();
+ JSONObject json = (JSONObject) JSONValue.parse(new StringReader(sw.toString()));
+ app = new JsonCoordinatorAction(json);
+
+ assertEquals("a", app.getJobId());
+ assertEquals("c", app.getId());
+ assertEquals(1, app.getActionNumber());
+ assertEquals("cc", app.getRunConf());
+ assertEquals("cc", app.getCreatedConf());
+ assertEquals("c_e", app.getExternalId());
+ assertEquals(JsonUtils.parseDateRfc822(CREATE_TIME), app.getCreatedTime());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_MODIFIED_TIME), app.getLastModifiedTime());
+ assertEquals(CoordinatorAction.Status.WAITING, app.getStatus());
+ // assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ //assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("http://consoleurl:8080", app.getConsoleUrl());
+ assertEquals(missingDependencies, app.getMissingDependencies());
+ //assertEquals(3, app.getMissingDependencies().size());
+
+ sw = new StringWriter();
+ app.toJSONObject().writeJSONString(sw);
+ sw.close();
+ json = (JSONObject) JSONValue.parse(new StringReader(sw.toString()));
+ app = new JsonCoordinatorAction(json);
+
+ assertEquals("a", app.getJobId());
+ assertEquals("c", app.getId());
+ assertEquals(1, app.getActionNumber());
+ assertEquals("cc", app.getRunConf());
+ assertEquals("cc", app.getCreatedConf());
+ assertEquals("c_e", app.getExternalId());
+ assertEquals(JsonUtils.parseDateRfc822(CREATE_TIME), app.getCreatedTime());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_MODIFIED_TIME), app.getLastModifiedTime());
+ assertEquals(CoordinatorAction.Status.WAITING, app.getStatus());
+ //assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ //assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("http://consoleurl:8080", app.getConsoleUrl());
+ assertEquals(missingDependencies, app.getMissingDependencies());
+ //assertEquals(3, app.getMissingDependencies().size());
+ }
+
+ public void testList() throws Exception {
+ List actions = Arrays.asList(createAppAction(), createAppAction());
+ JSONArray array = JsonCoordinatorAction.toJSONArray(actions);
+ StringWriter sw = new StringWriter();
+ array.writeJSONString(sw);
+ sw.close();
+ array = (JSONArray) JSONValue.parse(new StringReader(sw.toString()));
+ List readActions = JsonCoordinatorAction.fromJSONArray(array);
+ assertEquals(2, readActions.size());
+ }
+}
diff --git a/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorJob.java b/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorJob.java
new file mode 100644
index 000000000..284b4ff80
--- /dev/null
+++ b/client/src/test/java/org/apache/oozie/client/rest/TestJsonCoordinatorJob.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.client.rest;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.rest.JsonCoordinatorAction;
+import org.apache.oozie.client.rest.JsonCoordinatorJob;
+import org.apache.oozie.client.rest.JsonUtils;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+import junit.framework.TestCase;
+
+public class TestJsonCoordinatorJob extends TestCase {
+
+ static String LAST_ACTION_TIME = "Wed, 02 Sep 2009 00:00:00 GMT";
+ static String NEXT_MATERIALIZED_TIME = "Thu, 03 Sep 2009 00:00:00 GMT";
+ static String START_TIME = "Fri, 04 Sep 2009 00:00:00 GMT";
+ static String END_TIME = "Sat, 05 Sep 2009 00:00:00 GMT";
+
+
+ static JsonCoordinatorJob createApplication() {
+ JsonCoordinatorJob app = new JsonCoordinatorJob();
+ app.setAppPath("a");
+ app.setAppName("b");
+ app.setId("c");
+ app.setConf("cc");
+ app.setStatus(CoordinatorJob.Status.PREP);
+ app.setFrequency(100);
+ app.setTimeUnit(CoordinatorJob.Timeunit.WEEK);
+ app.setTimeZone("timeZone");
+ app.setConcurrency(10);
+ app.setExecutionOrder(CoordinatorJob.Execution.FIFO);
+ app.setTimeout(100);
+ app.setLastActionTime(JsonUtils.parseDateRfc822(LAST_ACTION_TIME));
+ app.setNextMaterializedTime(JsonUtils.parseDateRfc822(NEXT_MATERIALIZED_TIME));
+ app.setStartTime(JsonUtils.parseDateRfc822(START_TIME));
+ app.setEndTime(JsonUtils.parseDateRfc822(END_TIME));
+ app.setUser("d");
+ app.setGroup("e");
+ app.setConsoleUrl("cu");
+ return app;
+ }
+
+ public void testProperties() {
+ JsonCoordinatorJob app = createApplication();
+ assertEquals("a", app.getAppPath());
+ assertEquals("b", app.getAppName());
+ assertEquals("c", app.getId());
+ assertEquals("cc", app.getConf());
+ assertEquals(CoordinatorJob.Status.PREP, app.getStatus());
+ assertEquals(100, app.getFrequency());
+ assertEquals(CoordinatorJob.Timeunit.WEEK, app.getTimeUnit());
+ assertEquals("timeZone", app.getTimeZone());
+ assertEquals(10, app.getConcurrency());
+ assertEquals(CoordinatorJob.Execution.FIFO, app.getExecutionOrder());
+ assertEquals(100, app.getTimeout());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_ACTION_TIME), app.getLastActionTime());
+ assertEquals(JsonUtils.parseDateRfc822(NEXT_MATERIALIZED_TIME), app.getNextMaterializedTime());
+ assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("d", app.getUser());
+ assertEquals("e", app.getGroup());
+ assertEquals("cu", app.getConsoleUrl());
+
+ }
+
+ public void testJsonAndBack() throws Exception {
+ JsonCoordinatorJob app = createApplication();
+ StringWriter sw = new StringWriter();
+ app.toJSONObject().writeJSONString(sw);
+ sw.close();
+ JSONObject json = (JSONObject) JSONValue.parse(new StringReader(sw.toString()));
+ app = new JsonCoordinatorJob(json);
+
+ assertEquals("a", app.getAppPath());
+ assertEquals("b", app.getAppName());
+ assertEquals("c", app.getId());
+ assertEquals("cc", app.getConf());
+ assertEquals(CoordinatorJob.Status.PREP, app.getStatus());
+ assertEquals(100, app.getFrequency());
+ assertEquals(CoordinatorJob.Timeunit.WEEK, app.getTimeUnit());
+ assertEquals("timeZone", app.getTimeZone());
+ assertEquals(10, app.getConcurrency());
+ assertEquals(CoordinatorJob.Execution.FIFO, app.getExecutionOrder());
+ assertEquals(100, app.getTimeout());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_ACTION_TIME), app.getLastActionTime());
+ assertEquals(JsonUtils.parseDateRfc822(NEXT_MATERIALIZED_TIME), app.getNextMaterializedTime());
+ assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("d", app.getUser());
+ assertEquals("e", app.getGroup());
+ assertEquals("cu", app.getConsoleUrl());
+
+ sw = new StringWriter();
+ app.toJSONObject().writeJSONString(sw);
+ sw.close();
+ json = (JSONObject) JSONValue.parse(new StringReader(sw.toString()));
+ app = new JsonCoordinatorJob(json);
+
+ assertEquals("a", app.getAppPath());
+ assertEquals("b", app.getAppName());
+ assertEquals("c", app.getId());
+ assertEquals("cc", app.getConf());
+ assertEquals(CoordinatorJob.Status.PREP, app.getStatus());
+ assertEquals(100, app.getFrequency());
+ assertEquals(CoordinatorJob.Timeunit.WEEK, app.getTimeUnit());
+ assertEquals("timeZone", app.getTimeZone());
+ assertEquals(10, app.getConcurrency());
+ assertEquals(CoordinatorJob.Execution.FIFO, app.getExecutionOrder());
+ assertEquals(100, app.getTimeout());
+ assertEquals(JsonUtils.parseDateRfc822(LAST_ACTION_TIME), app.getLastActionTime());
+ assertEquals(JsonUtils.parseDateRfc822(NEXT_MATERIALIZED_TIME), app.getNextMaterializedTime());
+ assertEquals(JsonUtils.parseDateRfc822(START_TIME), app.getStartTime());
+ assertEquals(JsonUtils.parseDateRfc822(END_TIME), app.getEndTime());
+ assertEquals("d", app.getUser());
+ assertEquals("e", app.getGroup());
+ assertEquals("cu", app.getConsoleUrl());
+ }
+
+ public void testList() throws Exception {
+ List nodes = Arrays.asList(createApplication(), createApplication());
+ JSONArray array = JsonCoordinatorJob.toJSONArray(nodes);
+ StringWriter sw = new StringWriter();
+ array.writeJSONString(sw);
+ sw.close();
+ array = (JSONArray) JSONValue.parse(new StringReader(sw.toString()));
+ List readApplications = JsonCoordinatorJob.fromJSONArray(array);
+ assertEquals(2, readApplications.size());
+ }
+
+}
diff --git a/client/src/test/java/org/apache/oozie/client/rest/TestJsonUtils.java b/client/src/test/java/org/apache/oozie/client/rest/TestJsonUtils.java
index 7468cd31a..4e694bf85 100644
--- a/client/src/test/java/org/apache/oozie/client/rest/TestJsonUtils.java
+++ b/client/src/test/java/org/apache/oozie/client/rest/TestJsonUtils.java
@@ -19,8 +19,11 @@
import junit.framework.TestCase;
import org.json.simple.JSONObject;
+import org.json.simple.JSONArray;
import org.apache.oozie.client.rest.JsonUtils;
+import java.util.Arrays;
+import java.util.List;
import java.util.Date;
public class TestJsonUtils extends TestCase {
@@ -53,4 +56,28 @@ public void testGetLong() {
assertEquals(0l, JsonUtils.getLongValue(json, "ll"));
}
+ public void testGetListString() {
+ JSONObject json = new JSONObject();
+ JSONArray array = new JSONArray();
+ List sList = Arrays.asList("hello", "world");
+ array.add("hello");
+ array.add("world");
+ json.put("list", array);
+ assertEquals(array, json.get("list"));
+ assertEquals(sList, JsonUtils.getListString(json, "list"));
+ assertEquals(sList.size(), JsonUtils.getListString(json, "list").size());
+ }
+
+ public void testGetListStringWithNull() {
+ JSONObject json = new JSONObject();
+ JSONArray array = new JSONArray();
+ List sList = Arrays.asList("hello", null, "world");
+ array.add("hello");
+ array.add(null);
+ array.add("world");
+ json.put("list", array);
+ assertEquals(array, json.get("list"));
+ assertEquals(sList, JsonUtils.getListString(json, "list"));
+ assertEquals(sList.size(), JsonUtils.getListString(json, "list").size());
+ }
}
diff --git a/core/pom.xml b/core/pom.xml
index 6bd0ed10d..63a915c67 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -36,6 +36,42 @@
+
+ org.apache.openjpa
+ openjpa-persistence
+ 1.2.1
+
+
+
+ org.apache.openjpa
+ openjpa-jdbc
+ 1.2.1
+
+
+
+ org.apache.openjpa
+ openjpa-persistence-jdbc
+ 1.2.1
+
+
+
+ javax.persistence
+ persistence-api
+ 1.0
+
+
+
+ mysql
+ mysql-connector-java
+ 5.1.6
+
+
+
+ com.oracle
+ ojdbc6
+ 11.1.0.7.0
+
+
junit
junit
@@ -71,75 +107,13 @@
- org.apache.hadoop
+ ${hadoopGroupId}
hadoop-core
${hadoopVersion}
compile
-
-
- commons-cli
- commons-cli
-
-
- log4j
- log4j
-
-
- commons-httpclient
- commons-httpclient
-
-
- tomcat
- jasper-compiler
-
-
- tomcat
- jasper-runtime
-
-
- javax.servlet
- servlet-api
-
-
- javax.servlet
- jsp-api
-
-
- org.slf4j
- slf4j-api
-
-
- org.slf4j
- slf4j-log4j12
-
-
- commons-logging
- commons-logging-api
-
-
- jetty
- org.mortbay.jetty
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- org.mortbay.jetty
- jsp-api-2.1
-
-
- org.mortbay.jetty
- servlet-api-2.5
-
-
- org.apache.hadoop
+ ${hadoopGroupId}
hadoop-test
${hadoopVersion}
test
@@ -152,16 +126,33 @@
- org.apache.hadoop
+ ${hadoopGroupId}
hadoop-streaming
${hadoopVersion}
test
- org.apache.hadoop
+ ${pigGroupId}
pig
${pigVersion}
provided
+
+
+ ${hadoopGroupId}
+ hadoop-core
+
+
+ org.apache.hadoop
+ hadoop-core
+
+
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ 1.4.3
+ test
@@ -275,7 +266,7 @@
commons-dbcp
commons-dbcp
- 1.2.2
+ 1.4
compile
@@ -303,10 +294,44 @@
+
+ maven-antrun-plugin
+
+
+ process-classes
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ run
+
+
+
+
-
@@ -319,7 +344,7 @@
- preHadoopSecurity
+ hadoopSecurityPre
true
@@ -384,7 +409,7 @@
-
+
hadoopSecurityKerberos
diff --git a/core/src/main/java/org/apache/oozie/BaseEngine.java b/core/src/main/java/org/apache/oozie/BaseEngine.java
new file mode 100644
index 000000000..dfb7c605b
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/BaseEngine.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.command.wf.CompletedActionCommand;
+import org.apache.oozie.command.wf.DefinitionCommand;
+import org.apache.oozie.command.wf.ExternalIdCommand;
+import org.apache.oozie.command.wf.JobCommand;
+import org.apache.oozie.command.wf.JobsCommand;
+import org.apache.oozie.command.wf.KillCommand;
+import org.apache.oozie.command.wf.ReRunCommand;
+import org.apache.oozie.command.wf.ResumeCommand;
+import org.apache.oozie.command.wf.StartCommand;
+import org.apache.oozie.command.wf.SubmitCommand;
+import org.apache.oozie.command.wf.SuspendCommand;
+import org.apache.oozie.service.DagXLogInfoService;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.XLogService;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XLogStreamer;
+
+public abstract class BaseEngine {
+
+ protected String user;
+ protected String authToken;
+
+ /**
+ * Return the user name.
+ *
+ * @return the user name.
+ */
+ public String getUser() {
+ return user;
+ }
+
+ /**
+ * Return the authentication token.
+ *
+ * @return the authentication token.
+ */
+ protected String getAuthToken() {
+ return authToken;
+ }
+
+ /**
+ * Submit a job. It validates configuration properties.
+ *
+ * @param conf job configuration.
+ * @param startJob indicates if the job should be started or not.
+ * @return the job Id.
+ * @throws BaseEngineException thrown if the job could not be created.
+ */
+ public abstract String submitJob(Configuration conf, boolean startJob) throws BaseEngineException;
+
+ /**
+ * Start a job.
+ *
+ * @param jobId job Id.
+ * @throws BaseEngineException thrown if the job could not be started.
+ */
+ public abstract void start(String jobId) throws BaseEngineException;
+
+ /**
+ * Resume a job.
+ *
+ * @param jobId job Id.
+ * @throws BaseEngineException thrown if the job could not be resumed.
+ */
+ public abstract void resume(String jobId) throws BaseEngineException;
+
+ /**
+ * Suspend a job.
+ *
+ * @param jobId job Id.
+ * @throws BaseEngineException thrown if the job could not be suspended.
+ */
+ public abstract void suspend(String jobId) throws BaseEngineException;
+
+ /**
+ * Kill a job.
+ *
+ * @param jobId job Id.
+ * @throws BaseEngineException thrown if the job could not be killed.
+ */
+ public abstract void kill(String jobId) throws BaseEngineException;
+
+ /**
+ * Rerun a job.
+ *
+ * @param jobId job Id to rerun.
+ * @param conf configuration information for the rerun.
+ * @throws BaseEngineException thrown if the job could not be rerun.
+ */
+ public abstract void reRun(String jobId, Configuration conf) throws BaseEngineException;
+
+
+ /**
+ * Return the info about a wf job.
+ *
+ * @param jobId job Id.
+ * @return the workflow job info.
+ * @throws DagEngineException thrown if the job info could not be obtained.
+ */
+ public abstract WorkflowJob getJob(String jobId) throws BaseEngineException;
+
+ /**
+ * Return the info about a wf job with actions subset.
+ *
+ * @param jobId job Id
+ * @param start starting from this index in the list of actions belonging to the job
+ * @param length number of actions to be returned
+ * @return the workflow job info.
+ * @throws DagEngineException thrown if the job info could not be obtained.
+ */
+ public abstract WorkflowJob getJob(String jobId, int start, int length) throws BaseEngineException;
+
+ /**
+ * Return the info about a coord job.
+ *
+ * @param jobId job Id.
+ * @return the coord job info.
+ * @throws BaseEngineException thrown if the job info could not be obtained.
+ */
+ public abstract CoordinatorJob getCoordJob(String jobId) throws BaseEngineException;
+
+ /**
+ * Return the info about a coord job with actions subset.
+ *
+ * @param jobId job Id.
+ * @param start starting from this index in the list of actions belonging to the job
+ * @param length number of actions to be returned
+ * @return the coord job info.
+ * @throws BaseEngineException thrown if the job info could not be obtained.
+ */
+ public abstract CoordinatorJob getCoordJob(String jobId, int start, int length) throws BaseEngineException;
+
+ /**
+ * Return the a job definition.
+ *
+ * @param jobId job Id.
+ * @return the job definition.
+ * @throws BaseEngineException thrown if the job definition could no be obtained.
+ */
+ public abstract String getDefinition(String jobId) throws BaseEngineException;
+
+ /**
+ * Stream the log of a job.
+ *
+ * @param jobId job Id.
+ * @param writer writer to stream the log to.
+ * @throws IOException thrown if the log cannot be streamed.
+ * @throws BaseEngineException thrown if there is error in getting the Workflow/Coordinator Job Information for
+ * jobId.
+ */
+ public abstract void streamLog(String jobId, Writer writer) throws IOException, BaseEngineException;
+
+ /**
+ * Return the workflow Job ID for an external ID. This is reverse lookup for recovery purposes.
+ *
+ * @param externalId external ID provided at job submission time.
+ * @return the associated workflow job ID if any, null
if none.
+ * @throws BaseEngineException thrown if the lookup could not be done.
+ */
+ public abstract String getJobIdForExternalId(String externalId) throws BaseEngineException;
+
+ public abstract String dryrunSubmit(Configuration conf, boolean startJob)
+ throws BaseEngineException;
+
+}
diff --git a/core/src/main/java/org/apache/oozie/BaseEngineException.java b/core/src/main/java/org/apache/oozie/BaseEngineException.java
new file mode 100644
index 000000000..28c964d30
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/BaseEngineException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+/**
+ * Exception thrown by the {@link DagEngine}.
+ */
+public class BaseEngineException extends XException {
+
+ /**
+ * Create an engine exception from a XException.
+ *
+ * @param cause the XException cause.
+ */
+ public BaseEngineException(XException cause) {
+ super(cause);
+ }
+
+ /**
+ * Create an engine exception.
+ *
+ * @param errorCode error code.
+ * @param params parameters for the error code message template.
+ */
+ public BaseEngineException(ErrorCode errorCode, Object... params) {
+ super(errorCode, params);
+ }
+
+
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/CoordinatorActionBean.java b/core/src/main/java/org/apache/oozie/CoordinatorActionBean.java
new file mode 100644
index 000000000..b3a4935cd
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/CoordinatorActionBean.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.rest.JsonCoordinatorAction;
+
+import java.util.Date;
+
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.WritableUtils;
+import org.apache.openjpa.persistence.jdbc.Index;
+import org.apache.hadoop.io.Writable;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.DataInput;
+
+import javax.persistence.Entity;
+import javax.persistence.Column;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.NamedNativeQuery;
+import javax.persistence.NamedNativeQueries;
+import javax.persistence.SqlResultSetMapping;
+import javax.persistence.ColumnResult;
+import javax.persistence.Basic;
+import javax.persistence.Lob;
+
+import java.sql.Timestamp;
+
+@SqlResultSetMapping(
+ name = "CoordActionJobIdLmt",
+ columns = {@ColumnResult(name = "job_id"),
+ @ColumnResult(name = "min_lmt")})
+
+@Entity
+@NamedQueries({
+
+ @NamedQuery(name = "UPDATE_COORD_ACTION", query = "update CoordinatorActionBean w set w.actionNumber = :actionNumber, w.actionXml = :actionXml, w.consoleUrl = :consoleUrl, w.createdConf = :createdConf, w.errorCode = :errorCode, w.errorMessage = :errorMessage, w.externalStatus = :externalStatus, w.missingDependencies = :missingDependencies, w.runConf = :runConf, w.timeOut = :timeOut, w.trackerUri = :trackerUri, w.type = :type, w.createdTimestamp = :createdTime, w.externalId = :externalId, w.jobId = :jobId, w.lastModifiedTimestamp = :lastModifiedTime, w.nominalTimestamp = :nominalTime, w.slaXml = :slaXml, w.status = :status where w.id = :id"),
+
+ @NamedQuery(name = "DELETE_COMPLETED_COORD_ACTIONS", query = "delete from CoordinatorActionBean a where a.id = :id and (a.status = 'SUCCEEDED' OR a.status = 'FAILED' OR a.status = 'KILLED')"),
+
+ @NamedQuery(name = "GET_COORD_ACTIONS", query = "select OBJECT(w) from CoordinatorActionBean w"),
+
+ @NamedQuery(name = "GET_COMPLETED_ACTIONS_OLDER_THAN", query = "select OBJECT(a) from CoordinatorActionBean a where a.createdTimestamp < :createdTime and (a.status = 'SUCCEEDED' OR a.status = 'FAILED' OR a.status = 'KILLED')"),
+
+ @NamedQuery(name = "GET_COORD_ACTION", query = "select OBJECT(a) from CoordinatorActionBean a where a.id = :id"),
+
+ @NamedQuery(name = "GET_COORD_ACTION_FOR_EXTERNALID", query = "select OBJECT(a) from CoordinatorActionBean a where a.externalId = :externalId"),
+
+ @NamedQuery(name = "GET_COORD_ACTIONS_FOR_JOB_FIFO", query = "select OBJECT(a) from CoordinatorActionBean a where a.jobId = :jobId AND a.status = 'READY' order by a.nominalTimestamp"),
+
+ @NamedQuery(name = "GET_COORD_ACTIONS_FOR_JOB_LIFO", query = "select OBJECT(a) from CoordinatorActionBean a where a.jobId = :jobId AND a.status = 'READY' order by a.nominalTimestamp desc"),
+
+ @NamedQuery(name = "GET_COORD_RUNNING_ACTIONS_COUNT", query = "select count(a) from CoordinatorActionBean a where a.jobId = :jobId AND (a.status = 'RUNNING' OR a.status='SUBMITTED')"),
+
+ @NamedQuery(name = "GET_COORD_ACTIONS_COUNT_BY_JOBID", query = "select count(a) from CoordinatorActionBean a where a.jobId = :jobId"),
+
+ @NamedQuery(name = "GET_ACTIONS_FOR_COORD_JOB", query = "select OBJECT(a) from CoordinatorActionBean a where a.jobId = :jobId"),
+
+ @NamedQuery(name = "GET_RUNNING_ACTIONS_FOR_COORD_JOB", query = "select OBJECT(a) from CoordinatorActionBean a where a.jobId = :jobId AND a.status = 'RUNNING'"),
+
+ @NamedQuery(name = "GET_RUNNING_ACTIONS_OLDER_THAN", query = "select OBJECT(a) from CoordinatorActionBean a where a.status = 'RUNNING' AND a.lastModifiedTimestamp <= :lastModifiedTime"),
+
+ @NamedQuery(name = "GET_WAITING_SUBMITTED_ACTIONS_OLDER_THAN", query = "select OBJECT(a) from CoordinatorActionBean a where (a.status = 'WAITING' OR a.status = 'SUBMITTED') AND a.lastModifiedTimestamp <= :lastModifiedTime"),
+
+ @NamedQuery(name = "GET_COORD_ACTIONS_COUNT", query = "select count(w) from CoordinatorActionBean w")})
+
+@NamedNativeQueries({
+
+ @NamedNativeQuery(name = "GET_READY_ACTIONS_GROUP_BY_JOBID", query = "select a.job_id as job_id, MIN(a.last_modified_time) as min_lmt from COORD_ACTIONS a where a.status = 'READY' GROUP BY a.job_id HAVING MIN(a.last_modified_time) < ?", resultSetMapping = "CoordActionJobIdLmt")
+ })
+public class CoordinatorActionBean extends JsonCoordinatorAction implements
+ Writable {
+
+ @Basic
+ @Index
+ @Column(name = "job_id")
+ private String jobId;
+
+ @Basic
+ @Index
+ @Column(name = "status")
+ private String status = null;
+
+ @Basic
+ @Column(name = "nominal_time")
+ private java.sql.Timestamp nominalTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "last_modified_time")
+ private java.sql.Timestamp lastModifiedTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "created_time")
+ private java.sql.Timestamp createdTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "external_id")
+ private String externalId;
+
+ @Column(name = "sla_xml")
+ @Lob
+ private String slaXml = null;
+
+ public CoordinatorActionBean() {
+ }
+
+ /**
+ * Serialize the coordinator bean to a data output.
+ *
+ * @param dataOutput data output.
+ * @throws IOException thrown if the coordinator bean could not be serialized.
+ */
+ public void write(DataOutput dataOutput) throws IOException {
+ WritableUtils.writeStr(dataOutput, getJobId());
+ WritableUtils.writeStr(dataOutput, getType());
+ WritableUtils.writeStr(dataOutput, getId());
+ WritableUtils.writeStr(dataOutput, getCreatedConf());
+ WritableUtils.writeStr(dataOutput, getStatus().toString());
+ dataOutput.writeInt(getActionNumber());
+ WritableUtils.writeStr(dataOutput, getRunConf());
+ WritableUtils.writeStr(dataOutput, getExternalStatus());
+ WritableUtils.writeStr(dataOutput, getTrackerUri());
+ WritableUtils.writeStr(dataOutput, getErrorCode());
+ WritableUtils.writeStr(dataOutput, getErrorMessage());
+ }
+
+ /**
+ * Deserialize a coordinator bean from a data input.
+ *
+ * @param dataInput data input.
+ * @throws IOException thrown if the workflow bean could not be deserialized.
+ */
+ public void readFields(DataInput dataInput) throws IOException {
+ setJobId(WritableUtils.readStr(dataInput));
+ setType(WritableUtils.readStr(dataInput));
+ setId(WritableUtils.readStr(dataInput));
+ setCreatedConf(WritableUtils.readStr(dataInput));
+ setStatus(CoordinatorAction.Status.valueOf(WritableUtils
+ .readStr(dataInput)));
+ setRunConf(WritableUtils.readStr(dataInput));
+ setExternalStatus(WritableUtils.readStr(dataInput));
+ setTrackerUri(WritableUtils.readStr(dataInput));
+ setConsoleUrl(WritableUtils.readStr(dataInput));
+ long d = dataInput.readLong();
+ if (d != -1) {
+ setCreatedTime(new Date(d));
+ }
+ d = dataInput.readLong();
+ if (d != -1) {
+ setLastModifiedTime(new Date(d));
+ }
+ d = dataInput.readLong();
+ d = dataInput.readLong();
+ }
+
+ @Override
+ public String getJobId() {
+ return this.jobId;
+ }
+
+ @Override
+ public void setJobId(String id) {
+ super.setJobId(id);
+ this.jobId = id;
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.valueOf(status);
+ }
+
+ @Override
+ public void setStatus(Status status) {
+ super.setStatus(status);
+ this.status = status.toString();
+ }
+
+ @Override
+ public void setCreatedTime(Date createdTime) {
+ this.createdTimestamp = DateUtils.convertDateToTimestamp(createdTime);
+ super.setCreatedTime(createdTime);
+ }
+
+ @Override
+ public void setNominalTime(Date nominalTime) {
+ this.nominalTimestamp = DateUtils.convertDateToTimestamp(nominalTime);
+ super.setNominalTime(nominalTime);
+ }
+
+ @Override
+ public void setLastModifiedTime(Date lastModifiedTime) {
+ this.lastModifiedTimestamp = DateUtils.convertDateToTimestamp(lastModifiedTime);
+ super.setLastModifiedTime(lastModifiedTime);
+ }
+
+ @Override
+ public Date getCreatedTime() {
+ return DateUtils.toDate(createdTimestamp);
+ }
+
+ public Timestamp getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ @Override
+ public Date getLastModifiedTime() {
+ return DateUtils.toDate(lastModifiedTimestamp);
+ }
+
+ public Timestamp getLastModifiedTimestamp() {
+ return lastModifiedTimestamp;
+ }
+
+ @Override
+ public Date getNominalTime() {
+ return DateUtils.toDate(nominalTimestamp);
+ }
+
+ public Timestamp getNominalTimestamp() {
+ return nominalTimestamp;
+ }
+
+ @Override
+ public String getExternalId() {
+ return externalId;
+ }
+
+ @Override
+ public void setExternalId(String externalId) {
+ super.setExternalId(externalId);
+ this.externalId = externalId;
+ }
+
+ public String getSlaXml() {
+ return slaXml;
+ }
+
+ public void setSlaXml(String slaXml) {
+ this.slaXml = slaXml;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/CoordinatorEngine.java b/core/src/main/java/org/apache/oozie/CoordinatorEngine.java
new file mode 100644
index 000000000..5f917a582
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/CoordinatorEngine.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.command.coord.CoordJobsCommand;
+import org.apache.oozie.command.coord.CoordKillCommand;
+import org.apache.oozie.command.coord.CoordResumeCommand;
+import org.apache.oozie.command.coord.CoordSuspendCommand;
+import org.apache.oozie.command.coord.CoordSubmitCommand;
+import org.apache.oozie.command.coord.CoordActionInfoCommand;
+import org.apache.oozie.command.coord.CoordJobCommand;
+import org.apache.oozie.service.DagXLogInfoService;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.XLogService;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XLogStreamer;
+
+public class CoordinatorEngine extends BaseEngine {
+
+ /**
+ * Create a system Coordinator engine, with no user and no group.
+ */
+ public CoordinatorEngine() {
+ }
+
+ /**
+ * Create a Coordinator engine to perform operations on behave of a user.
+ *
+ * @param user user name.
+ * @param authToken the authentication token.
+ */
+ public CoordinatorEngine(String user, String authToken) {
+ this.user = ParamChecker.notEmpty(user, "user");
+ this.authToken = ParamChecker.notEmpty(authToken, "authToken");
+ }
+
+ @Override
+ public String getDefinition(String jobId) throws BaseEngineException {
+ CoordinatorJobBean job = getCoordJobWithNoActionInfo(jobId);
+ return job.getOrigJobXml();
+ }
+
+ private CoordinatorJobBean getCoordJobWithNoActionInfo(String jobId) throws BaseEngineException {
+ try {
+ return new CoordJobCommand(jobId, false).call();
+ }
+ catch (CommandException ex) {
+ throw new BaseEngineException(ex);
+ }
+ }
+
+ public CoordinatorActionBean getCoordAction(String actionId) throws BaseEngineException {
+ try {
+ return new CoordActionInfoCommand(actionId).call();
+ }
+ catch (CommandException ex) {
+ throw new BaseEngineException(ex);
+ }
+ }
+
+ @Override
+ public CoordinatorJobBean getCoordJob(String jobId) throws BaseEngineException {
+ try {
+ return new CoordJobCommand(jobId).call();
+ }
+ catch (CommandException ex) {
+ throw new BaseEngineException(ex);
+ }
+ }
+
+ @Override
+ public CoordinatorJobBean getCoordJob(String jobId, int start, int length) throws BaseEngineException {
+ try {
+ return new CoordJobCommand(jobId, start, length).call();
+ }
+ catch (CommandException ex) {
+ throw new BaseEngineException(ex);
+ }
+ }
+
+ @Override
+ public String getJobIdForExternalId(String externalId) throws CoordinatorEngineException {
+ return null;
+ }
+
+ @Override
+ public void kill(String jobId) throws CoordinatorEngineException {
+ try {
+ new CoordKillCommand(jobId).call();
+ XLog.getLog(getClass()).info("User " + user + " killed the Coordinator job " + jobId);
+ }
+ catch (CommandException e) {
+ throw new CoordinatorEngineException(e);
+ }
+ }
+
+ @Override
+ public void reRun(String jobId, Configuration conf) throws CoordinatorEngineException {
+ }
+
+ @Override
+ public void resume(String jobId) throws CoordinatorEngineException {
+ try {
+ new CoordResumeCommand(jobId).call();
+ }
+ catch (CommandException e) {
+ throw new CoordinatorEngineException(e);
+ }
+ }
+
+ @Override
+ public void start(String jobId) throws CoordinatorEngineException {
+
+ }
+
+ @Override
+ public void streamLog(String jobId, Writer writer) throws IOException, BaseEngineException {
+ XLogStreamer.Filter filter = new XLogStreamer.Filter();
+ filter.setParameter(DagXLogInfoService.JOB, jobId);
+
+ CoordinatorJobBean job = getCoordJobWithNoActionInfo(jobId);
+ Services.get().get(XLogService.class).streamLog(filter, job.getCreatedTime(), new Date(), writer);
+
+ }
+
+ @Override
+ public String submitJob(Configuration conf, boolean startJob) throws CoordinatorEngineException {
+ CoordSubmitCommand submit = new CoordSubmitCommand(conf, getAuthToken());
+ try {
+ String jobId = submit.call();
+ return jobId;
+ }
+ catch (CommandException ex) {
+ throw new CoordinatorEngineException(ex);
+ }
+ }
+
+ @Override
+ public String dryrunSubmit(Configuration conf, boolean startJob) throws CoordinatorEngineException {
+ CoordSubmitCommand submit = new CoordSubmitCommand(true, conf, getAuthToken());
+ try {
+ String jobId = submit.call();
+ return jobId;
+ }
+ catch (CommandException ex) {
+ throw new CoordinatorEngineException(ex);
+ }
+ }
+
+ @Override
+ public void suspend(String jobId) throws CoordinatorEngineException {
+ try {
+ new CoordSuspendCommand(jobId).call();
+ }
+ catch (CommandException e) {
+ throw new CoordinatorEngineException(e);
+ }
+
+ }
+
+ @Override
+ public WorkflowJob getJob(String jobId) throws BaseEngineException {
+ throw new BaseEngineException(new XException(ErrorCode.E0301));
+ }
+
+ @Override
+ public WorkflowJob getJob(String jobId, int start, int length) throws BaseEngineException {
+ throw new BaseEngineException(new XException(ErrorCode.E0301));
+ }
+
+ private static final Set FILTER_NAMES = new HashSet();
+
+ static {
+ FILTER_NAMES.add(OozieClient.FILTER_USER);
+ FILTER_NAMES.add(OozieClient.FILTER_NAME);
+ FILTER_NAMES.add(OozieClient.FILTER_GROUP);
+ FILTER_NAMES.add(OozieClient.FILTER_STATUS);
+ }
+
+ public CoordinatorJobInfo getCoordJobs(String filterStr, int start, int len) throws CoordinatorEngineException {
+ Map> filter = parseFilter(filterStr);
+
+ try {
+ return new CoordJobsCommand(filter, start, len).call();
+ }
+ catch (CommandException ex) {
+ throw new CoordinatorEngineException(ex);
+ }
+ }
+
+ protected Map> parseFilter(String filter) throws CoordinatorEngineException {
+ Map> map = new HashMap>();
+ if (filter != null) {
+ StringTokenizer st = new StringTokenizer(filter, ";");
+ while (st.hasMoreTokens()) {
+ String token = st.nextToken();
+ if (token.contains("=")) {
+ String[] pair = token.split("=");
+ if (pair.length != 2) {
+ throw new CoordinatorEngineException(ErrorCode.E0420, filter,
+ "elements must be name=value pairs");
+ }
+ if (!FILTER_NAMES.contains(pair[0])) {
+ throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format("invalid name [{0}]",
+ pair[0]));
+ }
+ if (pair[0].equals("status")) {
+ try {
+ CoordinatorJob.Status.valueOf(pair[1]);
+ }
+ catch (IllegalArgumentException ex) {
+ throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format(
+ "invalid status [{0}]", pair[1]));
+ }
+ }
+ List list = map.get(pair[0]);
+ if (list == null) {
+ list = new ArrayList();
+ map.put(pair[0], list);
+ }
+ list.add(pair[1]);
+ }
+ else {
+ throw new CoordinatorEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs");
+ }
+ }
+ }
+ return map;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/CoordinatorEngineException.java b/core/src/main/java/org/apache/oozie/CoordinatorEngineException.java
new file mode 100644
index 000000000..f15b69dd3
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/CoordinatorEngineException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+/**
+ * Exception thrown by the {@link CoordinatorEngine}.
+ */
+public class CoordinatorEngineException extends BaseEngineException {
+
+ /**
+ * Create a coordinator engine exception from a XException.
+ *
+ * @param cause the XException cause.
+ */
+ public CoordinatorEngineException(XException cause) {
+ super(cause);
+ }
+
+ /**
+ * Create a coordinator engine exception.
+ *
+ * @param errorCode error code.
+ * @param params parameters for the error code message template.
+ */
+ public CoordinatorEngineException(ErrorCode errorCode, Object... params) {
+ super(errorCode, params);
+ }
+
+
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/CoordinatorJobBean.java b/core/src/main/java/org/apache/oozie/CoordinatorJobBean.java
new file mode 100644
index 000000000..76a4343be
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/CoordinatorJobBean.java
@@ -0,0 +1,378 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.rest.JsonCoordinatorJob;
+
+import java.util.Date;
+
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.WritableUtils;
+import org.apache.hadoop.io.Writable;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.DataInput;
+
+import javax.persistence.Entity;
+import javax.persistence.Column;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Basic;
+import javax.persistence.Lob;
+
+import org.apache.openjpa.persistence.jdbc.Index;
+
+import java.sql.Timestamp;
+
+@Entity
+@NamedQueries({
+ @NamedQuery(name = "UPDATE_COORD_JOB", query = "update CoordinatorJobBean w set w.appName = :appName, w.appPath = :appPath, w.concurrency = :concurrency, w.conf = :conf, w.externalId = :externalId, w.frequency = :frequency, w.lastActionNumber = :lastActionNumber, w.timeOut = :timeOut, w.timeZone = :timeZone, w.authToken = :authToken, w.createdTimestamp = :createdTime, w.endTimestamp = :endTime, w.execution = :execution, w.jobXml = :jobXml, w.lastActionTimestamp = :lastAction, w.lastModifiedTimestamp = :lastModifiedTime, w.nextMaterializedTimestamp = :nextMaterializedTime, w.origJobXml = :origJobXml, w.slaXml=:slaXml, w.startTimestamp = :startTime, w.status = :status, w.timeUnitStr = :timeUnit where w.id = :id"),
+
+ @NamedQuery(name = "UPDATE_COORD_JOB_STATUS", query = "update CoordinatorJobBean w set w.status = :status, w.lastModifiedTimestamp = :lastModifiedTime where w.id = :id"),
+
+ @NamedQuery(name = "DELETE_COORD_JOB", query = "delete from CoordinatorJobBean w where w.id = :id"),
+
+ @NamedQuery(name = "GET_COORD_JOBS", query = "select OBJECT(w) from CoordinatorJobBean w"),
+
+ @NamedQuery(name = "GET_COORD_JOB", query = "select OBJECT(w) from CoordinatorJobBean w where w.id = :id"),
+
+ @NamedQuery(name = "GET_COORD_JOBS_COUNT", query = "select count(w) from CoordinatorJobBean w"),
+
+ @NamedQuery(name = "GET_COORD_JOBS_COLUMNS", query = "select w.id, w.appName, w.status, w.user, w.group, w.startTimestamp, w.endTimestamp, w.appPath, w.concurrency, w.frequency, w.lastActionTimestamp, w.nextMaterializedTimestamp, w.createdTimestamp, w.timeUnitStr, w.timeZone, w.timeOut from CoordinatorJobBean w order by w.createdTimestamp desc"),
+
+ @NamedQuery(name = "GET_COORD_JOBS_OLDER_THAN", query = "select OBJECT(w) from CoordinatorJobBean w where w.startTimestamp <= :matTime AND (w.status = 'PREP' OR w.status = 'RUNNING') AND (w.nextMaterializedTimestamp IS NULL OR w.endTimestamp > w.nextMaterializedTimestamp) AND (w.nextMaterializedTimestamp < :matTime OR w.nextMaterializedTimestamp IS NULL) order by w.lastModifiedTimestamp"),
+
+ @NamedQuery(name = "GET_COORD_JOBS_OLDER_THAN_STATUS", query = "select OBJECT(w) from CoordinatorJobBean w where w.status = :status AND w.lastModifiedTimestamp <= :lastModTime order by w.lastModifiedTimestamp"),
+
+ @NamedQuery(name = "GET_COMPLETED_COORD_JOBS_OLDER_THAN_STATUS", query = "select OBJECT(w) from CoordinatorJobBean w where ( w.status = 'SUCCEEDED' OR w.status = 'FAILED' or w.status = 'KILLED') AND w.lastModifiedTimestamp <= :lastModTime order by w.lastModifiedTimestamp")})
+public class CoordinatorJobBean extends JsonCoordinatorJob implements Writable {
+
+ @Basic
+ @Index
+ @Column(name = "status")
+ private String status = CoordinatorJob.Status.PREP.toString();
+
+ @Basic
+ @Column(name = "auth_token")
+ @Lob
+ private String authToken = null;
+
+ @Basic
+ @Column(name = "start_time")
+ private java.sql.Timestamp startTimestamp = null;
+
+ @Basic
+ @Column(name = "end_time")
+ private java.sql.Timestamp endTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "created_time")
+ private java.sql.Timestamp createdTimestamp = null;
+
+ @Basic
+ @Column(name = "time_unit")
+ private String timeUnitStr = CoordinatorJob.Timeunit.NONE.toString();
+
+ @Basic
+ @Column(name = "execution")
+ private String execution = null;
+
+ @Basic
+ @Column(name = "last_action")
+ private java.sql.Timestamp lastActionTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "next_matd_time")
+ private java.sql.Timestamp nextMaterializedTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "last_modified_time")
+ private java.sql.Timestamp lastModifiedTimestamp = null;
+
+ @Column(name = "job_xml")
+ @Lob
+ private String jobXml = null;
+
+ @Column(name = "orig_job_xml")
+ @Lob
+ private String origJobXml = null;
+
+ @Column(name = "sla_xml")
+ @Lob
+ private String slaXml = null;
+
+ public java.sql.Timestamp getStartTimestamp() {
+ return startTimestamp;
+ }
+
+ public void setStartTimestamp(java.sql.Timestamp startTimestamp) {
+ this.startTimestamp = startTimestamp;
+ }
+
+ public java.sql.Timestamp getEndTimestamp() {
+ return endTimestamp;
+ }
+
+ public void setEndTimestamp(java.sql.Timestamp endTimestamp) {
+ this.endTimestamp = endTimestamp;
+ }
+
+ public Timestamp getNextMaterializedTimestamp() {
+ return nextMaterializedTimestamp;
+ }
+
+ public void setNextMaterializedTimestamp(java.sql.Timestamp nextMaterializedTimestamp) {
+ this.nextMaterializedTimestamp = nextMaterializedTimestamp;
+ }
+
+ public Timestamp getLastModifiedTimestamp() {
+ return lastModifiedTimestamp;
+ }
+
+ public void setLastModifiedTimestamp(java.sql.Timestamp lastModifiedTimestamp) {
+ this.lastModifiedTimestamp = lastModifiedTimestamp;
+ }
+
+ public String getJobXml() {
+ return jobXml;
+ }
+
+ public void setJobXml(String jobXml) {
+ this.jobXml = jobXml;
+ }
+
+ public String getOrigJobXml() {
+ return origJobXml;
+ }
+
+ public void setOrigJobXml(String origJobXml) {
+ this.origJobXml = origJobXml;
+ }
+
+ public String getSlaXml() {
+ return slaXml;
+ }
+
+ public void setSlaXml(String slaXml) {
+ this.slaXml = slaXml;
+ }
+
+ @Override
+ public void setTimeUnit(Timeunit timeUnit) {
+ super.setTimeUnit(timeUnit);
+ this.timeUnitStr = timeUnit.toString();
+ }
+
+ public void setExecution(String execution) {
+ this.execution = execution;
+ }
+
+ public void setLastActionTimestamp(java.sql.Timestamp lastActionTimestamp) {
+ this.lastActionTimestamp = lastActionTimestamp;
+ }
+
+ public void setAuthToken(String authToken) {
+ this.authToken = authToken;
+ }
+
+ public CoordinatorJobBean() {
+ }
+
+ /*
+ * Serialize the coordinator bean to a data output. @param dataOutput data
+ * output. @throws IOException thrown if the coordinator bean could not be
+ * serialized.
+ */
+ public void write(DataOutput dataOutput) throws IOException {
+ WritableUtils.writeStr(dataOutput, getAppPath());
+ WritableUtils.writeStr(dataOutput, getAppName());
+ WritableUtils.writeStr(dataOutput, getId());
+ WritableUtils.writeStr(dataOutput, getConf());
+ WritableUtils.writeStr(dataOutput, getStatusStr());
+ dataOutput.writeInt(getFrequency());
+ WritableUtils.writeStr(dataOutput, getTimeUnit().toString());
+ WritableUtils.writeStr(dataOutput, getTimeZone());
+ dataOutput.writeInt(getConcurrency());
+ WritableUtils.writeStr(dataOutput, getExecutionOrder().toString());
+ dataOutput.writeLong((getStartTime() != null) ? getLastActionTime().getTime() : -1);
+ dataOutput.writeLong((getStartTime() != null) ? getNextMaterializedTime().getTime() : -1);
+ dataOutput.writeLong((getStartTime() != null) ? getStartTime().getTime() : -1);
+ dataOutput.writeLong((getEndTime() != null) ? getEndTime().getTime() : -1);
+ WritableUtils.writeStr(dataOutput, getUser());
+ WritableUtils.writeStr(dataOutput, getGroup());
+ WritableUtils.writeStr(dataOutput, getExternalId());
+ dataOutput.writeInt(getTimeout());
+ }
+
+ /**
+ * Deserialize a coordinator bean from a data input.
+ *
+ * @param dataInput data input.
+ * @throws IOException thrown if the workflow bean could not be deserialized.
+ */
+ public void readFields(DataInput dataInput) throws IOException {
+ setAppPath(WritableUtils.readStr(dataInput));
+ setAppName(WritableUtils.readStr(dataInput));
+ setId(WritableUtils.readStr(dataInput));
+ setConf(WritableUtils.readStr(dataInput));
+ setStatus(CoordinatorJob.Status.valueOf(WritableUtils.readStr(dataInput)));
+ setFrequency(dataInput.readInt());
+ setTimeUnit(CoordinatorJob.Timeunit.valueOf(WritableUtils.readStr(dataInput)));
+ setTimeZone(WritableUtils.readStr(dataInput));
+ setConcurrency(dataInput.readInt());
+ setExecutionOrder(Execution.valueOf(WritableUtils.readStr(dataInput)));
+
+ long d = dataInput.readLong();
+ if (d != -1) {
+ setLastActionTime(new Date(d));
+ }
+ d = dataInput.readLong();
+ if (d != -1) {
+ setNextMaterializedTime(new Date(d));
+ }
+ d = dataInput.readLong();
+ if (d != -1) {
+ setStartTime(new Date(d));
+ }
+
+ d = dataInput.readLong();
+ if (d != -1) {
+ setEndTime(new Date(d));
+ }
+ setUser(WritableUtils.readStr(dataInput));
+ setGroup(WritableUtils.readStr(dataInput));
+ setExternalId(WritableUtils.readStr(dataInput));
+ setTimeout(dataInput.readInt());
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.valueOf(this.status);
+ }
+
+ public String getStatusStr() {
+ return status;
+ }
+
+ @Override
+ public void setStatus(Status val) {
+ super.setStatus(val);
+ this.status = val.toString();
+ }
+
+ public String getTimeUnitStr() {
+ return timeUnitStr;
+ }
+
+ public Timeunit getTimeUnit() {
+ return Timeunit.valueOf(this.timeUnitStr);
+ }
+
+ public void setExecution(Execution order) {
+ this.execution = order.toString();
+ super.setExecutionOrder(order);
+ }
+
+ @Override
+ public Execution getExecutionOrder() {
+ return Execution.valueOf(this.execution);
+ }
+
+ public String getExecution() {
+ return execution;
+ }
+
+ @Override
+ public void setLastActionTime(Date lastAction) {
+ this.lastActionTimestamp = DateUtils.convertDateToTimestamp(lastAction);
+ super.setLastActionTime(lastAction);
+ }
+
+ @Override
+ public Date getLastActionTime() {
+ return DateUtils.toDate(lastActionTimestamp);
+ }
+
+ public Timestamp getLastActionTimestamp() {
+ return lastActionTimestamp;
+ }
+
+ @Override
+ public void setNextMaterializedTime(Date nextMaterializedTime) {
+ super.setNextMaterializedTime(nextMaterializedTime);
+ this.nextMaterializedTimestamp = DateUtils.convertDateToTimestamp(nextMaterializedTime);
+ }
+
+ @Override
+ public Date getNextMaterializedTime() {
+ return DateUtils.toDate(nextMaterializedTimestamp);
+ }
+
+ public void setLastModifiedTime(Date lastModifiedTime) {
+ this.lastModifiedTimestamp = DateUtils.convertDateToTimestamp(lastModifiedTime);
+ }
+
+ public Date getLastModifiedTime() {
+ return DateUtils.toDate(lastModifiedTimestamp);
+ }
+
+ @Override
+ public void setStartTime(Date startTime) {
+ super.setStartTime(startTime);
+ this.startTimestamp = DateUtils.convertDateToTimestamp(startTime);
+ }
+
+ @Override
+ public Date getStartTime() {
+ return DateUtils.toDate(startTimestamp);
+ }
+
+ @Override
+ public void setEndTime(Date endTime) {
+ super.setEndTime(endTime);
+ this.endTimestamp = DateUtils.convertDateToTimestamp(endTime);
+ }
+
+ @Override
+ public Date getEndTime() {
+ return DateUtils.convertDateToTimestamp(endTimestamp);
+ }
+
+ public void setCreatedTime(Date createTime) {
+ this.createdTimestamp = DateUtils.convertDateToTimestamp(createTime);
+ }
+
+ public Date getCreatedTime() {
+ return DateUtils.toDate(createdTimestamp);
+ }
+
+ public Timestamp getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public String getAuthToken() {
+ // TODO Auto-generated method stub
+ return this.authToken;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/CoordinatorJobInfo.java b/core/src/main/java/org/apache/oozie/CoordinatorJobInfo.java
new file mode 100644
index 000000000..bd1d56e93
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/CoordinatorJobInfo.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import java.util.List;
+
+/**
+ * @author ramach
+ */
+public class CoordinatorJobInfo {
+ private int start;
+ private int len;
+ private int total;
+ private List jobs;
+
+ /**
+ * Create a workflows info bean.
+ *
+ * @param workflows workflows being returned.
+ * @param start workflows offset.
+ * @param len number of workflows.
+ * @param total total workflows.
+ */
+ public CoordinatorJobInfo(List jobs, int start, int len, int total) {
+ this.start = start;
+ this.len = len;
+ this.total = total;
+ this.jobs = jobs;
+ }
+
+ /**
+ * Return the workflows being returned.
+ *
+ * @return the workflows being returned.
+ */
+ public List getCoordJobs() {
+ return jobs;
+ }
+
+ /**
+ * Return the offset of the workflows being returned. For pagination purposes.
+ *
+ * @return the offset of the workflows being returned.
+ */
+ public int getStart() {
+ return start;
+ }
+
+ /**
+ * Return the number of the workflows being returned. For pagination purposes.
+ *
+ * @return the number of the workflows being returned.
+ */
+ public int getLen() {
+ return len;
+ }
+
+ /**
+ * Return the total number of workflows. For pagination purposes.
+ *
+ * @return the total number of workflows.
+ */
+ public int getTotal() {
+ return total;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/DagELFunctions.java b/core/src/main/java/org/apache/oozie/DagELFunctions.java
index e58f08e09..6811edcba 100644
--- a/core/src/main/java/org/apache/oozie/DagELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/DagELFunctions.java
@@ -53,14 +53,14 @@ public class DagELFunctions {
public static void configureEvaluator(ELEvaluator evaluator, WorkflowJobBean workflow, WorkflowActionBean action) {
evaluator.setVariable(WORKFLOW, workflow);
evaluator.setVariable(ACTION, action);
- for (Map.Entry entry : workflow.getWorkflowInstance().getConf()) {
+ for (Map.Entry entry : workflow.getWorkflowInstance().getConf()) {
if (ParamChecker.isValidIdentifier(entry.getKey())) {
- evaluator.setVariable(entry.getKey(), entry.getValue());
+ evaluator.setVariable(entry.getKey().trim(), entry.getValue().trim());
}
}
try {
evaluator.setVariable(ACTION_PROTO_CONF,
- new XConfiguration(new StringReader(workflow.getProtoActionConf())));
+ new XConfiguration(new StringReader(workflow.getProtoActionConf())));
}
catch (IOException ex) {
throw new RuntimeException("It should not happen", ex);
diff --git a/core/src/main/java/org/apache/oozie/DagEngine.java b/core/src/main/java/org/apache/oozie/DagEngine.java
index ff8ce75c1..0c7871ced 100644
--- a/core/src/main/java/org/apache/oozie/DagEngine.java
+++ b/core/src/main/java/org/apache/oozie/DagEngine.java
@@ -21,6 +21,7 @@
import org.apache.oozie.service.XLogService;
import org.apache.oozie.service.DagXLogInfoService;
import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.client.CoordinatorJob;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.command.wf.CompletedActionCommand;
@@ -36,9 +37,11 @@
import org.apache.oozie.command.wf.SuspendCommand;
import org.apache.oozie.command.wf.DefinitionCommand;
import org.apache.oozie.command.wf.ExternalIdCommand;
+import org.apache.oozie.command.wf.WorkflowActionInfoCommand;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.CallableQueueService;
import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
import java.io.Writer;
@@ -52,13 +55,12 @@
import java.util.ArrayList;
import java.io.IOException;
-
/**
* The DagEngine bean provides all the DAG engine functionality for WS calls.
*/
-public class DagEngine {
- private String user;
- private String authToken;
+public class DagEngine extends BaseEngine {
+
+ private static final int HIGH_PRIORITY = 10;
/**
* Create a system Dag engine, with no user and no group.
@@ -69,7 +71,7 @@ public DagEngine() {
/**
* Create a Dag engine to perform operations on behave of a user.
*
- * @param user user name.
+ * @param user user name.
* @param authToken the authentication token.
*/
public DagEngine(String user, String authToken) {
@@ -78,36 +80,17 @@ public DagEngine(String user, String authToken) {
}
/**
- * Return the user name.
- *
- * @return the user name.
- */
- public String getUser() {
- return user;
- }
-
- /**
- * Return the authentication token.
+ * Submit a workflow job. It validates configuration properties.
*
- * @return the authentication token.
- */
- protected String getAuthToken() {
- return authToken;
- }
-
- /**
- * Submit a workflow job.
- *
- * It validates configuration properties.
- *
- * @param conf job configuration.
+ * @param conf job configuration.
* @param startJob indicates if the job should be started or not.
* @return the job Id.
* @throws DagEngineException thrown if the job could not be created.
*/
+ @Override
public String submitJob(Configuration conf, boolean startJob) throws DagEngineException {
validateSubmitConfiguration(conf);
- SubmitCommand submit = new SubmitCommand(conf, authToken);
+ SubmitCommand submit = new SubmitCommand(conf, getAuthToken());
try {
String jobId = submit.call();
if (startJob) {
@@ -120,6 +103,34 @@ public String submitJob(Configuration conf, boolean startJob) throws DagEngineEx
}
}
+ public static void main(String[] args) throws Exception {
+ // Configuration conf = new XConfiguration(IOUtils.getResourceAsReader(
+ // "org/apache/oozie/coord/conf.xml", -1));
+
+ Configuration conf = new XConfiguration();
+
+ // String appXml =
+ // IOUtils.getResourceAsString("org/apache/oozie/coord/test1.xml", -1);
+ conf.set(OozieClient.APP_PATH, "file:///Users/danielwo/oozie/workflows/examples/seed/workflows/map-reduce");
+ conf.set(OozieClient.USER_NAME, "danielwo");
+ conf.set(OozieClient.GROUP_NAME, "other");
+
+ conf.set("inputDir", " blah ");
+
+ // System.out.println("appXml :"+ appXml + "\n conf :"+ conf);
+ new Services().init();
+ try {
+ DagEngine de = new DagEngine("me", "TESTING_WF");
+ String jobId = de.submitJob(conf, true);
+ System.out.println("WF Job Id " + jobId);
+
+ Thread.sleep(20000);
+ }
+ finally {
+ Services.get().destroy();
+ }
+ }
+
private void validateSubmitConfiguration(Configuration conf) throws DagEngineException {
if (conf.get(OozieClient.APP_PATH) == null) {
throw new DagEngineException(ErrorCode.E0401, OozieClient.APP_PATH);
@@ -132,6 +143,7 @@ private void validateSubmitConfiguration(Configuration conf) throws DagEngineExc
* @param jobId job Id.
* @throws DagEngineException thrown if the job could not be started.
*/
+ @Override
public void start(String jobId) throws DagEngineException {
// Changing to synchronous call from asynchronous queuing to prevent the
// loss of command if the queue is full or the queue is lost in case of
@@ -150,6 +162,7 @@ public void start(String jobId) throws DagEngineException {
* @param jobId job Id.
* @throws DagEngineException thrown if the job could not be resumed.
*/
+ @Override
public void resume(String jobId) throws DagEngineException {
// Changing to synchronous call from asynchronous queuing to prevent the
// loss of command if the queue is full or the queue is lost in case of
@@ -168,6 +181,7 @@ public void resume(String jobId) throws DagEngineException {
* @param jobId job Id.
* @throws DagEngineException thrown if the job could not be suspended.
*/
+ @Override
public void suspend(String jobId) throws DagEngineException {
// Changing to synchronous call from asynchronous queuing to prevent the
// loss of command if the queue is full or the queue is lost in case of
@@ -186,12 +200,14 @@ public void suspend(String jobId) throws DagEngineException {
* @param jobId job Id.
* @throws DagEngineException thrown if the job could not be killed.
*/
+ @Override
public void kill(String jobId) throws DagEngineException {
// Changing to synchronous call from asynchronous queuing to prevent the
// loss of command if the queue is full or the queue is lost in case of
// failure.
try {
new KillCommand(jobId).call();
+ XLog.getLog(getClass()).info("User " + user + " killed the WF job " + jobId);
}
catch (CommandException e) {
throw new DagEngineException(e);
@@ -202,13 +218,14 @@ public void kill(String jobId) throws DagEngineException {
* Rerun a job.
*
* @param jobId job Id to rerun.
- * @param conf configuration information for the rerun.
+ * @param conf configuration information for the rerun.
* @throws DagEngineException thrown if the job could not be rerun.
*/
+ @Override
public void reRun(String jobId, Configuration conf) throws DagEngineException {
try {
validateReRunConfiguration(conf);
- new ReRunCommand(jobId, conf, authToken).call();
+ new ReRunCommand(jobId, conf, getAuthToken()).call();
start(jobId);
}
catch (CommandException ex) {
@@ -228,18 +245,18 @@ private void validateReRunConfiguration(Configuration conf) throws DagEngineExce
/**
* Process an action callback.
*
- * @param actionId the action Id.
+ * @param actionId the action Id.
* @param externalStatus the action external status.
- * @param actionData the action output data, null
if none.
- * @throws DagEngineException thrown if the callback could not be processed.
+ * @param actionData the action output data, null
if none.
+ * @throws DagEngineException thrown if the callback could not be processed.
*/
public void processCallback(String actionId, String externalStatus, Properties actionData)
throws DagEngineException {
XLog.Info.get().clearParameter(XLogService.GROUP);
XLog.Info.get().clearParameter(XLogService.USER);
- Command command = new CompletedActionCommand(actionId, externalStatus, actionData);
+ Command command = new CompletedActionCommand(actionId, externalStatus, actionData, HIGH_PRIORITY);
if (!Services.get().get(CallableQueueService.class).queue(command)) {
- XLog.getLog(this.getClass()).warn(XLog.OPS, "queue is full, ignoring callback");
+ XLog.getLog(this.getClass()).warn(XLog.OPS, "queue is full or system is in SAFEMODE, ignoring callback");
}
}
@@ -250,6 +267,7 @@ public void processCallback(String actionId, String externalStatus, Properties a
* @return the workflow job info.
* @throws DagEngineException thrown if the job info could not be obtained.
*/
+ @Override
public WorkflowJob getJob(String jobId) throws DagEngineException {
try {
return new JobCommand(jobId).call();
@@ -259,6 +277,25 @@ public WorkflowJob getJob(String jobId) throws DagEngineException {
}
}
+ /**
+ * Return the info about a job with actions subset.
+ *
+ * @param jobId job Id
+ * @param start starting from this index in the list of actions belonging to the job
+ * @param length number of actions to be returned
+ * @return the workflow job info.
+ * @throws DagEngineException thrown if the job info could not be obtained.
+ */
+ @Override
+ public WorkflowJob getJob(String jobId, int start, int length) throws DagEngineException {
+ try {
+ return new JobCommand(jobId, start, length).call();
+ }
+ catch (CommandException ex) {
+ throw new DagEngineException(ex);
+ }
+ }
+
/**
* Return the a job definition.
*
@@ -266,6 +303,7 @@ public WorkflowJob getJob(String jobId) throws DagEngineException {
* @return the job definition.
* @throws DagEngineException thrown if the job definition could no be obtained.
*/
+ @Override
public String getDefinition(String jobId) throws DagEngineException {
try {
return new DefinitionCommand(jobId).call();
@@ -281,9 +319,9 @@ public String getDefinition(String jobId) throws DagEngineException {
* @param jobId job Id.
* @param writer writer to stream the log to.
* @throws IOException thrown if the log cannot be streamed.
- * @throws DagEngineException thrown if there is error in getting the
- * Workflow Information for jobId.
+ * @throws DagEngineException thrown if there is error in getting the Workflow Information for jobId.
*/
+ @Override
public void streamLog(String jobId, Writer writer) throws IOException, DagEngineException {
XLogStreamer.Filter filter = new XLogStreamer.Filter();
filter.setParameter(DagXLogInfoService.JOB, jobId);
@@ -316,20 +354,19 @@ protected Map> parseFilter(String filter) throws DagEngineE
if (token.contains("=")) {
String[] pair = token.split("=");
if (pair.length != 2) {
- throw new DagEngineException(ErrorCode.E0420, filter,
- "elements must be name=value pairs");
+ throw new DagEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs");
}
if (!FILTER_NAMES.contains(pair[0])) {
- throw new DagEngineException(ErrorCode.E0420, filter,
- XLog.format("invalid name [{0}]", pair[0]));
+ throw new DagEngineException(ErrorCode.E0420, filter, XLog
+ .format("invalid name [{0}]", pair[0]));
}
if (pair[0].equals("status")) {
try {
WorkflowJob.Status.valueOf(pair[1]);
}
catch (IllegalArgumentException ex) {
- throw new DagEngineException(ErrorCode.E0420, filter,
- XLog.format("invalid status [{0}]", pair[1]));
+ throw new DagEngineException(ErrorCode.E0420, filter, XLog.format("invalid status [{0}]",
+ pair[1]));
}
}
List list = map.get(pair[0]);
@@ -340,8 +377,7 @@ protected Map> parseFilter(String filter) throws DagEngineE
list.add(pair[1]);
}
else {
- throw new DagEngineException(ErrorCode.E0420, filter,
- "elements must be name=value pairs");
+ throw new DagEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs");
}
}
}
@@ -352,36 +388,60 @@ protected Map> parseFilter(String filter) throws DagEngineE
* Return the info about a set of jobs.
*
* @param filterStr job filter. Refer to the {@link org.apache.oozie.client.OozieClient} for the filter syntax.
- * @param start offset, base 1.
- * @param len number of jobs to return.
+ * @param start offset, base 1.
+ * @param len number of jobs to return.
* @return job info for all matching jobs, the jobs don't contain node action information.
* @throws DagEngineException thrown if the jobs info could not be obtained.
*/
@SuppressWarnings("unchecked")
public WorkflowsInfo getJobs(String filterStr, int start, int len) throws DagEngineException {
- Map> filter = parseFilter(filterStr);
- try {
- return new JobsCommand(filter, start, len).call();
- } catch (CommandException dce) {
- throw new DagEngineException(dce);
- }
- }
+ Map> filter = parseFilter(filterStr);
+ try {
+ return new JobsCommand(filter, start, len).call();
+ }
+ catch (CommandException dce) {
+ throw new DagEngineException(dce);
+ }
+ }
/**
- * Return the workflow Job ID for an external ID.
- *
- * This is reverse lookup for recovery purposes.
+ * Return the workflow Job ID for an external ID. This is reverse lookup for recovery purposes.
*
* @param externalId external ID provided at job submission time.
* @return the associated workflow job ID if any, null
if none.
* @throws DagEngineException thrown if the lookup could not be done.
*/
+ @Override
public String getJobIdForExternalId(String externalId) throws DagEngineException {
try {
return new ExternalIdCommand(externalId).call();
- } catch (CommandException dce) {
+ }
+ catch (CommandException dce) {
throw new DagEngineException(dce);
}
}
+ @Override
+ public CoordinatorJob getCoordJob(String jobId) throws BaseEngineException {
+ throw new BaseEngineException(new XException(ErrorCode.E0301));
+ }
+
+ @Override
+ public CoordinatorJob getCoordJob(String jobId, int start, int length) throws BaseEngineException {
+ throw new BaseEngineException(new XException(ErrorCode.E0301));
+ }
+
+ public WorkflowActionBean getWorkflowAction(String actionId) throws BaseEngineException {
+ try {
+ return new WorkflowActionInfoCommand(actionId).call();
+ }
+ catch (CommandException ex) {
+ throw new BaseEngineException(ex);
+ }
+ }
+
+ @Override
+ public String dryrunSubmit(Configuration conf, boolean startJob) throws BaseEngineException {
+ return null;
+ }
}
diff --git a/core/src/main/java/org/apache/oozie/DagEngineException.java b/core/src/main/java/org/apache/oozie/DagEngineException.java
index 4ec40c806..2a49eea78 100644
--- a/core/src/main/java/org/apache/oozie/DagEngineException.java
+++ b/core/src/main/java/org/apache/oozie/DagEngineException.java
@@ -20,7 +20,7 @@
/**
* Exception thrown by the {@link DagEngine}.
*/
-public class DagEngineException extends XException {
+public class DagEngineException extends BaseEngineException {
/**
* Create an dag engine exception from a XException.
diff --git a/core/src/main/java/org/apache/oozie/ErrorCode.java b/core/src/main/java/org/apache/oozie/ErrorCode.java
index 35de0db9e..b94f7cb00 100644
--- a/core/src/main/java/org/apache/oozie/ErrorCode.java
+++ b/core/src/main/java/org/apache/oozie/ErrorCode.java
@@ -54,6 +54,9 @@ public enum ErrorCode {
E0303(XLog.STD, "Invalid parameter value, [{0}] = [{1}]"),
E0304(XLog.STD, "Invalid parameter type, parameter [{0}] expected type [{1}]"),
E0305(XLog.STD, "Missing parameter [{0}]"),
+ E0306(XLog.STD, "Invalid parameter"),
+ E0307(XLog.STD, "Runtime error [{0}]"),
+
E0400(XLog.STD, "User mismatch, request user [{0}] configuration user [{1}]"),
E0401(XLog.STD, "Missing configuration property [{0}]"),
@@ -70,8 +73,8 @@ public enum ErrorCode {
E0505(XLog.OPS, "Workflow app definition [{0}] does not exist"),
E0506(XLog.OPS, "Workflow app definition [{0}] is not a file"),
E0507(XLog.OPS, "Could not access to [{0}], {1}"),
- E0508(XLog.OPS, "User [{0}] not authorized for job [{1}]"),
-
+ E0508(XLog.OPS, "User [{0}] not authorized for WF job [{1}]"),
+ E0509(XLog.OPS, "User [{0}] not authorized for Coord job [{1}]"),
E0600(XLog.OPS, "Could not get connection, {0}"),
E0601(XLog.OPS, "Could not close connection, {0}"),
@@ -116,9 +119,23 @@ public enum ErrorCode {
E0805(XLog.STD, "Workflow job not completed, status [{0}]"),
E0806(XLog.STD, "Action did not complete in previous run, action [{0}]"),
E0807(XLog.STD, "Some skip actions were not executed [{0}]"),
-
- ETEST(XLog.STD, "THIS SHOULD HAPPEN ONLY IN TESTING, invalid job id [{0}]"),
- ;
+ E0808(XLog.STD, "Disallowed user property [{0}]"),
+
+ E1001(XLog.STD, "Could not read the coordinator job definition, {0}"),
+ E1002(XLog.STD, "Invalid coordinator application URI [{0}], {1}"),
+ E1003(XLog.STD, "Invalid coordinator application attributes [{0}], {1}"),
+ E1004(XLog.STD, "Expression language evaluation error [{0}], {1}"),
+ E1005(XLog.STD, "Could not read the coordinator job configuration read from DB, {0}"),
+ E1006(XLog.STD, "Invalid coordinator application [{0}], {1}"),
+ E1007(XLog.STD, "Unable to add record to SLA table. [{0}], {1}"),
+ E1008(XLog.STD, "Not implemented. [{0}]"),
+ E1009(XLog.STD, "Unable to parse XML response. [{0}]"),
+ E1010(XLog.STD, "Invalid data in coordinator xml. [{0}]"),
+ E1011(XLog.STD, "Cannot update coordinator job [{0}], {1}"),
+ E1012(XLog.STD, "Coord Job Materialization Error: {0}"),
+ E1013(XLog.STD, "Coord Job Recovery Error: {0}"),
+
+ ETEST(XLog.STD, "THIS SHOULD HAPPEN ONLY IN TESTING, invalid job id [{0}]"),;
private String template;
private int logMask;
@@ -127,7 +144,7 @@ public enum ErrorCode {
* Create an error code.
*
* @param template template for the exception message.
- * @param logMask log mask for the exception.
+ * @param logMask log mask for the exception.
*/
private ErrorCode(int logMask, String template) {
this.logMask = logMask;
@@ -158,7 +175,7 @@ public int getLogMask() {
* @param args the parameters for the templatized message.
* @return error message.
*/
- public String format(Object ... args) {
+ public String format(Object... args) {
return XLog.format("{0}: {1}", toString(), XLog.format(getTemplate(), args));
}
diff --git a/core/src/main/java/org/apache/oozie/FaultInjection.java b/core/src/main/java/org/apache/oozie/FaultInjection.java
index 5afdd410a..ac69834a2 100644
--- a/core/src/main/java/org/apache/oozie/FaultInjection.java
+++ b/core/src/main/java/org/apache/oozie/FaultInjection.java
@@ -20,21 +20,13 @@
import org.apache.oozie.util.XLog;
/**
- * Fault Injection support class.
- *
- * Concrete classes should be available only during testing, not in production.
- *
- * To activate fault injection the {@link #FAULT_INJECTION} system property must be set to true.
- *
- * When fault injection is activated, the concrete class (specified by name) will be call for activation.
- *
- * Concrete classes should be activated by presense of a second system property.
- *
- * This fault injection pattern provides 3 levels of safeguard: a general 'fault injection' system property,
- * the availabity of of the concrete 'fault injection' class in the classpath, a specifi 'fault injection' system
- * property.
- *
- * Refer to the SkipCommitFaultInjection
class in the test classes for an example.
+ * Fault Injection support class. Concrete classes should be available only during testing, not in production.
+ * To activate fault injection the {@link #FAULT_INJECTION} system property must be set to true. When fault
+ * injection is activated, the concrete class (specified by name) will be call for activation. Concrete classes
+ * should be activated by presense of a second system property. This fault injection pattern provides 3 levels of
+ * safeguard: a general 'fault injection' system property, the availabity of of the concrete 'fault injection' class in
+ * the classpath, a specifi 'fault injection' system property. Refer to the SkipCommitFaultInjection
+ * class in the test classes for an example.
*/
public abstract class FaultInjection {
diff --git a/core/src/main/java/org/apache/oozie/LocalOozieClient.java b/core/src/main/java/org/apache/oozie/LocalOozieClient.java
index bde4dbe72..6ff2baf5b 100644
--- a/core/src/main/java/org/apache/oozie/LocalOozieClient.java
+++ b/core/src/main/java/org/apache/oozie/LocalOozieClient.java
@@ -29,33 +29,20 @@
import java.util.Properties;
/**
- * Client API to submit and manage Oozie workflow jobs against an Oozie intance.
- *
- * This class is thread safe.
- *
+ * Client API to submit and manage Oozie workflow jobs against an Oozie intance. This class is thread safe.
* Syntax for filter for the {@link #getJobsInfo(String)} {@link #getJobsInfo(String, int, int)} methods:
- * [NAME=VALUE][;NAME=VALUE]*
.
- *
- * Valid filter names are:
- *
- *
- * name: the workflow application name from the workflow definition.
- * user: the user that submitted the job.
- * group: the group for the job.
- * status: the status of the job.
- *
- *
- * The query will do an AND among all the filter names.
- * The query will do an OR among all the filter values for the same name. Multiple values must be specified as
- * different name value pairs.
+ * [NAME=VALUE][;NAME=VALUE]*
. Valid filter names are: name: the workflow application
+ * name from the workflow definition. user: the user that submitted the job. group: the group for the
+ * job. status: the status of the job. The query will do an AND among all the filter names. The
+ * query will do an OR among all the filter values for the same name. Multiple values must be specified as different
+ * name value pairs.
*/
public class LocalOozieClient extends OozieClient {
private DagEngine dagEngine;
/**
- * Create a workflow client for Oozie local use.
- *
+ * Create a workflow client for Oozie local use.
*
* @param dagEngine the dag engine instance to use.
*/
@@ -64,9 +51,8 @@ public LocalOozieClient(DagEngine dagEngine) {
}
/**
- * Return the Oozie URL of the workflow client instance.
- *
- * This URL is the base URL fo the Oozie system, with not protocol versioning.
+ * Return the Oozie URL of the workflow client instance. This URL is the base URL fo the Oozie system, with not
+ * protocol versioning.
*
* @return the Oozie URL of the workflow client instance.
*/
@@ -76,12 +62,12 @@ public String getOozieUrl() {
}
/**
- * Return the Oozie URL used by the client and server for WS communications.
- *
- * This URL is the original URL plus the versioning element path.
+ * Return the Oozie URL used by the client and server for WS communications. This URL is the original URL plus
+ * the versioning element path.
*
* @return the Oozie URL used by the client and server for communication.
- * @throws org.apache.oozie.client.OozieClientException thrown in the client and the server are not protocol compatible.
+ * @throws org.apache.oozie.client.OozieClientException thrown in the client and the server are not protocol
+ * compatible.
*/
@Override
public String getProtocolUrl() throws OozieClientException {
@@ -91,15 +77,16 @@ public String getProtocolUrl() throws OozieClientException {
/**
* Validate that the Oozie client and server instances are protocol compatible.
*
- * @throws org.apache.oozie.client.OozieClientException thrown in the client and the server are not protocol compatible.
+ * @throws org.apache.oozie.client.OozieClientException thrown in the client and the server are not protocol
+ * compatible.
*/
@Override
public synchronized void validateWSVersion() throws OozieClientException {
}
/**
- * Create an empty configuration with just the {@link #USER_NAME} set to the JVM user name and the
- * {@link #GROUP_NAME} set to 'other'.
+ * Create an empty configuration with just the {@link #USER_NAME} set to the JVM user name and the {@link
+ * #GROUP_NAME} set to 'other'.
*
* @return an empty configuration.
*/
@@ -296,7 +283,6 @@ public WorkflowJob getJobInfo(String jobId) throws OozieClientException {
* @return a list with the workflow jobs info, without node details.
* @throws org.apache.oozie.client.OozieClientException thrown if the jobs info could not be retrieved.
*/
- @Override
public List getJobsInfo(String filter, int start, int len) throws OozieClientException {
try {
return (List) (List) dagEngine.getJobs(filter, start, len).getWorkflows();
@@ -307,9 +293,8 @@ public List getJobsInfo(String filter, int start, int len) throws O
}
/**
- * Return the info of the workflow jobs that match the filter.
- *
- * It returns the first 100 jobs that match the filter.
+ * Return the info of the workflow jobs that match the filter. It returns the first 100 jobs that match the
+ * filter.
*
* @param filter job filter. Refer to the {@link LocalOozieClient} for the filter syntax.
* @return a list with the workflow jobs info, without node details.
@@ -320,9 +305,7 @@ public List getJobsInfo(String filter) throws OozieClientException
}
/**
- * Return the workflow job Id for an external Id.
- *
- * The external Id must have provided at job creation time.
+ * Return the workflow job Id for an external Id. The external Id must have provided at job creation time.
*
* @param externalId external Id given at job creation time.
* @return the workflow job Id for an external Id, null
if none.
@@ -344,8 +327,9 @@ public String getJobId(String externalId) throws OozieClientException {
* @return true if safe mode is ON
false if safe mode is OFF
* @throws org.apache.oozie.client.OozieClientException throw if it could not obtain the safe mode status.
*/
- public boolean isInSafeMode() throws OozieClientException {
- return Services.get().isSafeMode();
- }
+ /*public SYSTEM_MODE isInSafeMode() throws OozieClientException {
+ //return Services.get().isSafeMode();
+ return Services.get().getSystemMode() ;
+ }*/
}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/SLAEventBean.java b/core/src/main/java/org/apache/oozie/SLAEventBean.java
new file mode 100644
index 000000000..a3fbfc51d
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/SLAEventBean.java
@@ -0,0 +1,355 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.sql.Timestamp;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import javax.persistence.Basic;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.oozie.client.SLAEvent;
+import org.apache.oozie.client.rest.JsonSLAEvent;
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.XLog;
+import org.jdom.Element;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+@Entity
+@NamedQueries({
+
+ @NamedQuery(name = "GET_SLA_EVENT_NEWER_SEQ_LIMITED", query = "select OBJECT(w) from SLAEventBean w where w.event_id > :id order by w.event_id")})
+public class SLAEventBean extends JsonSLAEvent implements Writable {
+
+ @Basic
+ @Column(name = "job_status")
+ private String jobStatusStr = null;
+
+ @Basic
+ @Column(name = "app_type")
+ private String appTypeStr = null;
+
+ @Basic
+ @Column(name = "expected_start")
+ private java.sql.Timestamp expectedStartTS = null;
+
+ @Basic
+ @Column(name = "expected_end")
+ private java.sql.Timestamp expectedEndTS = null;
+
+ @Basic
+ @Column(name = "status_timestamp")
+ private java.sql.Timestamp statusTimestampTS = null;
+
+ @Basic
+ @Column(name = "event_type")
+ private String eventType = null;
+
+ public SLAEventBean() {
+
+ }
+
+ public String getJobStatusStr() {
+ return jobStatusStr;
+ }
+
+ public void setJobStatusStr(String jobStatusStr) {
+ this.jobStatusStr = jobStatusStr;
+ }
+
+ public Status getJobStatus() {
+ return Status.valueOf(this.jobStatusStr);
+ }
+
+ public void setJobStatus(Status jobStatus) {
+ super.setJobStatus(jobStatus);
+ this.jobStatusStr = jobStatus.toString();
+ }
+
+ public String getAppTypeStr() {
+ return appTypeStr;
+ }
+
+ public void setAppTypeStr(String appTypeStr) {
+ this.appTypeStr = appTypeStr;
+ }
+
+ public SlaAppType getAppType() {
+ return SlaAppType.valueOf(appTypeStr);
+ }
+
+ public void setAppType(SlaAppType appType) {
+ super.setAppType(appType);
+ this.appTypeStr = appType.toString();
+ }
+
+ public java.sql.Timestamp getExpectedStartTS() {
+ return expectedStartTS;
+ }
+
+ public Date getExpectedStart() {
+ return DateUtils.toDate(expectedStartTS);
+ }
+
+ public void setExpectedStart(Date expectedStart) {
+ super.setExpectedStart(expectedStart);
+ this.expectedStartTS = DateUtils.convertDateToTimestamp(expectedStart);
+ }
+
+ public java.sql.Timestamp getExpectedEndTS() {
+ return expectedEndTS;
+ }
+
+ public Date getExpectedEnd() {
+ return DateUtils.toDate(expectedEndTS);
+ }
+
+ public void setExpectedEnd(Date expectedEnd) {
+ super.setExpectedEnd(expectedEnd);
+ this.expectedEndTS = DateUtils.convertDateToTimestamp(expectedEnd);
+ }
+
+ public java.sql.Timestamp getStatusTimestampTS() {
+ return statusTimestampTS;
+ }
+
+ public Date getStatusTimestamp() {
+ return DateUtils.toDate(statusTimestampTS);
+ }
+
+ public void setStatusTimestamp(Date statusTimestamp) {
+ super.setStatusTimestamp(statusTimestamp);
+ this.statusTimestampTS = DateUtils.convertDateToTimestamp(statusTimestamp);
+ }
+
+ public String getEventType() {
+ return eventType;
+ }
+
+ public void setEventType(String eventType) {
+ this.eventType = eventType;
+ }
+
+ @Override
+ public void readFields(DataInput arg0) throws IOException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void write(DataOutput arg0) throws IOException {
+ // TODO Auto-generated method stub
+
+ }
+
+ public String toString() {
+ return MessageFormat.format("Event id[{0}] status[{1}]", getEvent_id(),
+ getJobStatus());
+ }
+
+ /**
+ * Convert a SLAEvent list into a JSONArray.
+ *
+ * @param SLAEVent list.
+ * @return the corresponding JSON array.
+ */
+ @SuppressWarnings("unchecked")
+ public static JSONArray toJSONArray(List extends SLAEventBean> events) {
+ JSONArray array = new JSONArray();
+ if (events != null) {
+ for (JsonSLAEvent node : events) {
+ array.add(node.toJSONObject());
+ }
+ }
+ return array;
+ }
+
+ /**
+ * Convert a JSONArray into a SLAEvent list.
+ *
+ * @param array JSON array.
+ * @return the corresponding SLA event list.
+ */
+ @SuppressWarnings("unchecked")
+ public static List fromJSONArray(JSONArray array) {
+ List list = new ArrayList();
+ for (Object obj : array) {
+ list.add(new JsonSLAEvent((JSONObject) obj));
+ }
+ return list;
+ }
+
+ /* public String toXml2() {
+ String ret = "";
+ if (getJobStatus() == Status.CREATED) {
+ ret = getRegistrationEventXml();
+ }
+ else {
+ ret = getStatusEventXml();
+ }
+ return createATag("event", ret);
+ }
+
+ private String getStatusEventXml() {
+ StringBuilder statXml = new StringBuilder();
+ statXml
+ .append(createATag("sequence-id", String.valueOf(getEvent_id())));
+ statXml.append("");
+ statXml.append(createATag("sla-id", getSlaId()));
+ statXml.append(createATag("status-timestamp",
+ getDateString(getStatusTimestamp())));
+ statXml.append(createATag("job-status", getJobStatus().toString()));
+ statXml.append("");
+ return statXml.toString();
+ }
+
+ private String getRegistrationEventXml() {
+ StringBuilder regXml = new StringBuilder();
+ regXml.append(createATag("sequence-id", String.valueOf(getEvent_id())));
+ regXml.append("");
+ regXml.append(createATag("sla-id", String.valueOf(getSlaId())));
+ regXml.append(createATag("app-type", getAppType().toString()));
+ regXml.append(createATag("app-name", getAppName()));
+ regXml.append(createATag("user", getUser()));
+ regXml.append(createATag("group", getGroupName()));
+ regXml.append(createATag("parent-sla-id", String
+ .valueOf(getParentSlaId())));
+ regXml.append(createATag("expected-start",
+ getDateString(getExpectedStart())));
+ regXml.append(createATag("expected-end",
+ getDateString(getExpectedEnd())));
+ regXml.append(createATag("status-timestamp",
+ getDateString(getStatusTimestamp())));
+ regXml.append(createATag("job-status", getJobStatus().toString()));
+
+ regXml.append(createATag("alert-contact", getAlertContact()));
+ regXml.append(createATag("dev-contact", getDevContact()));
+ regXml.append(createATag("qa-contact", getQaContact()));
+ regXml.append(createATag("se-contact", getSeContact()));
+ regXml.append(createATag("notification-msg", getNotificationMsg()));
+ regXml.append(createATag("alert-percentage", getAlertPercentage()));
+ regXml.append(createATag("alert-frequency", getAlertFrequency()));
+ regXml.append(createATag("upstream-apps", getUpstreamApps()));
+ regXml.append("");
+ return regXml.toString();
+ }
+ private String createATag(String tag, String content) {
+ if (content == null) {
+ content = "";
+ }
+ return "<" + tag + ">" + content + "" + tag + ">";
+ }
+ */
+ public Element toXml() {
+ Element retElem = null;
+ if (getJobStatus() == Status.CREATED) {
+ retElem = getRegistrationEvent("event");
+ }
+ else {
+ retElem = getStatusEvent("event");
+ }
+ return retElem;
+ }
+
+ private Element getRegistrationEvent(String tag) {
+ Element eReg = new Element(tag);
+ eReg.addContent(createATagElement("sequence-id", String.valueOf(getEvent_id())));
+ Element e = new Element("registration");
+ e.addContent(createATagElement("sla-id", getSlaId()));
+ //e.addContent(createATagElement("sla-id", String.valueOf(getSlaId())));
+ e.addContent(createATagElement("app-type", getAppType().toString()));
+ e.addContent(createATagElement("app-name", getAppName()));
+ e.addContent(createATagElement("user", getUser()));
+ e.addContent(createATagElement("group", getGroupName()));
+ e.addContent(createATagElement("parent-sla-id", String
+ .valueOf(getParentSlaId())));
+ e.addContent(createATagElement("expected-start",
+ getDateString(getExpectedStart())));
+ e.addContent(createATagElement("expected-end",
+ getDateString(getExpectedEnd())));
+ e.addContent(createATagElement("status-timestamp",
+ getDateString(getStatusTimestamp())));
+ e.addContent(createATagElement("notification-msg", getNotificationMsg()));
+
+ e.addContent(createATagElement("alert-contact", getAlertContact()));
+ e.addContent(createATagElement("dev-contact", getDevContact()));
+ e.addContent(createATagElement("qa-contact", getQaContact()));
+ e.addContent(createATagElement("se-contact", getSeContact()));
+
+ e.addContent(createATagElement("alert-percentage", getAlertPercentage()));
+ e.addContent(createATagElement("alert-frequency", getAlertFrequency()));
+
+ e.addContent(createATagElement("upstream-apps", getUpstreamApps()));
+ e.addContent(createATagElement("job-status", getJobStatus().toString()));
+ e.addContent(createATagElement("job-data", getJobData()));
+ eReg.addContent(e);
+ return eReg;
+ }
+
+ private Element getStatusEvent(String tag) {
+ Element eStat = new Element(tag);
+ eStat.addContent(createATagElement("sequence-id", String.valueOf(getEvent_id())));
+ Element e = new Element("status");
+ e.addContent(createATagElement("sla-id", getSlaId()));
+ e.addContent(createATagElement("status-timestamp",
+ getDateString(getStatusTimestamp())));
+ e.addContent(createATagElement("job-status", getJobStatus().toString()));
+ e.addContent(createATagElement("job-data", getJobData()));
+ eStat.addContent(e);
+ return eStat;
+ }
+
+ private Element createATagElement(String tag, String content) {
+ if (content == null) {
+ content = "";
+ }
+ Element e = new Element(tag);
+ e.addContent(content);
+ return e;
+ }
+
+ private Element createATagElement(String tag, Element content) {
+ Element e = new Element(tag);
+ e.addContent(content);
+ return e;
+ }
+
+ private String getDateString(Date d) {
+ try {
+ return DateUtils.formatDateUTC(d);
+ }
+ catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ XLog.getLog(getClass()).error("Date formatting error " + d, e);
+ throw new RuntimeException("Date formatting error " + d + e);
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/WorkflowActionBean.java b/core/src/main/java/org/apache/oozie/WorkflowActionBean.java
index af851dcf7..419103739 100644
--- a/core/src/main/java/org/apache/oozie/WorkflowActionBean.java
+++ b/core/src/main/java/org/apache/oozie/WorkflowActionBean.java
@@ -19,6 +19,7 @@
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.rest.JsonWorkflowAction;
+import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.PropertiesUtils;
import org.apache.oozie.util.WritableUtils;
@@ -30,17 +31,103 @@
import java.io.IOException;
import java.io.DataInput;
+import javax.persistence.Entity;
+import javax.persistence.Column;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Basic;
+import javax.persistence.Lob;
+
+import org.apache.openjpa.persistence.jdbc.Index;
+
+import javax.persistence.Transient;
+
+import java.sql.Timestamp;
+
/**
* Bean that contains all the information to start an action for a workflow node.
*/
+// Following statements(INSERT_ACTION, UPDATE_ACTION) follow the same
+// numbering for place holders and uses same function
+// getActionValueMapFromBean for setting the values. So The numbering is to
+// be maintained if any change is made.
+@Entity
+@NamedQueries({
+
+ @NamedQuery(name = "UPDATE_ACTION", query = "update WorkflowActionBean a set a.conf = :conf, a.consoleUrl = :consoleUrl, a.data = :data, a.errorCode = :errorCode, a.errorMessage = :errorMessage, a.externalId = :externalId, a.externalStatus = :externalStatus, a.name = :name, a.retries = :retries, a.trackerUri = :trackerUri, a.transition = :transition, a.type = :type, a.endTimestamp = :endTime, a.executionPath = :executionPath, a.lastCheckTimestamp = :lastCheckTime, a.logToken = :logToken, a.pending = :pending, a.pendingAgeTimestamp = :pendingAge, a.signalValue = :signalValue, a.slaXml = :slaXml, a.startTimestamp = :startTime, a.status = :status, a.wfId=:wfId where a.id = :id"),
+
+ @NamedQuery(name = "DELETE_ACTION", query = "delete from WorkflowActionBean a where a.id = :id"),
+
+ @NamedQuery(name = "DELETE_ACTIONS_FOR_WORKFLOW", query = "delete from WorkflowActionBean a where a.wfId = :wfId"),
+
+ @NamedQuery(name = "GET_ACTIONS", query = "select OBJECT(a) from WorkflowActionBean a"),
+
+ @NamedQuery(name = "GET_ACTION", query = "select OBJECT(a) from WorkflowActionBean a where a.id = :id"),
+
+ @NamedQuery(name = "GET_ACTION_FOR_UPDATE", query = "select OBJECT(a) from WorkflowActionBean a where a.id = :id"),
+
+ @NamedQuery(name = "GET_ACTIONS_FOR_WORKFLOW", query = "select OBJECT(a) from WorkflowActionBean a where a.wfId = :wfId order by a.startTimestamp"),
+
+ @NamedQuery(name = "GET_ACTIONS_OF_WORKFLOW_FOR_UPDATE", query = "select OBJECT(a) from WorkflowActionBean a where a.wfId = :wfId order by a.startTimestamp"),
+
+ @NamedQuery(name = "GET_PENDING_ACTIONS", query = "select OBJECT(a) from WorkflowActionBean a where a.pending = 1 AND a.pendingAgeTimestamp < :pendingAge AND a.status <> 'RUNNING'"),
+
+ @NamedQuery(name = "GET_RUNNING_ACTIONS", query = "select OBJECT(a) from WorkflowActionBean a where a.pending = 1 AND a.status = 'RUNNING' AND a.lastCheckTimestamp < :lastCheckTime")
+ })
+
public class WorkflowActionBean extends JsonWorkflowAction implements Writable {
- private String jobId;
- private String executionPath;
- private boolean pending;
+
+ @Basic
+ @Index
+ @Column(name = "wf_id")
+ private String wfId = null;
+
+ @Basic
+ @Index
+ @Column(name = "status")
+ private String status = WorkflowAction.Status.PREP.toString();
+
+ @Basic
+ @Column(name = "last_check_time")
+ private java.sql.Timestamp lastCheckTimestamp;
+
+ @Basic
+ @Column(name = "end_time")
+ private java.sql.Timestamp endTimestamp = null;
+
+ @Basic
+ @Column(name = "start_time")
+ private java.sql.Timestamp startTimestamp = null;
+
+ @Basic
+ @Column(name = "execution_path")
+ private String executionPath = null;
+
+ @Basic
+ @Column(name = "pending")
+ private int pending = 0;
+
+ // @Temporal(TemporalType.TIME)
+ // @Column(name="pending_age",columnDefinition="timestamp default '0000-00-00 00:00:00'")
+ @Basic
+ @Index
+ @Column(name = "pending_age")
+ private java.sql.Timestamp pendingAgeTimestamp = null;
+
+ @Basic
+ @Column(name = "signal_value")
+ private String signalValue = null;
+
+ @Basic
+ @Column(name = "log_token")
+ private String logToken = null;
+
+ @Transient
private Date pendingAge;
- private Date lastCheckTime;
- private String signalValue;
- private String logToken;
+
+ @Column(name = "sla_xml")
+ @Lob
+ private String slaXml = null;
/**
* Default constructor.
@@ -54,12 +141,13 @@ public WorkflowActionBean() {
* @param dataOutput data output.
* @throws IOException thrown if the action bean could not be serialized.
*/
+
public void write(DataOutput dataOutput) throws IOException {
WritableUtils.writeStr(dataOutput, getId());
WritableUtils.writeStr(dataOutput, getName());
WritableUtils.writeStr(dataOutput, getType());
WritableUtils.writeStr(dataOutput, getConf());
- WritableUtils.writeStr(dataOutput, getStatus().toString());
+ WritableUtils.writeStr(dataOutput, getStatusStr());
dataOutput.writeInt(getRetries());
dataOutput.writeLong((getStartTime() != null) ? getStartTime().getTime() : -1);
dataOutput.writeLong((getEndTime() != null) ? getEndTime().getTime() : -1);
@@ -72,9 +160,9 @@ public void write(DataOutput dataOutput) throws IOException {
WritableUtils.writeStr(dataOutput, getConsoleUrl());
WritableUtils.writeStr(dataOutput, getErrorCode());
WritableUtils.writeStr(dataOutput, getErrorMessage());
- WritableUtils.writeStr(dataOutput, jobId);
+ WritableUtils.writeStr(dataOutput, wfId);
WritableUtils.writeStr(dataOutput, executionPath);
- dataOutput.writeBoolean(pending);
+ dataOutput.writeInt(pending);
dataOutput.writeLong((pendingAge != null) ? pendingAge.getTime() : -1);
WritableUtils.writeStr(dataOutput, signalValue);
WritableUtils.writeStr(dataOutput, logToken);
@@ -112,12 +200,13 @@ public void readFields(DataInput dataInput) throws IOException {
setTrackerUri(WritableUtils.readStr(dataInput));
setConsoleUrl(WritableUtils.readStr(dataInput));
setErrorInfo(WritableUtils.readStr(dataInput), WritableUtils.readStr(dataInput));
- jobId = WritableUtils.readStr(dataInput);
+ wfId = WritableUtils.readStr(dataInput);
executionPath = WritableUtils.readStr(dataInput);
- pending = dataInput.readBoolean();
+ pending = dataInput.readInt();
d = dataInput.readLong();
if (d != -1) {
pendingAge = new Date(d);
+ pendingAgeTimestamp = DateUtils.convertDateToTimestamp(pendingAge);
}
signalValue = WritableUtils.readStr(dataInput);
logToken = WritableUtils.readStr(dataInput);
@@ -139,15 +228,16 @@ public boolean isExecutionComplete() {
*/
public boolean isComplete() {
return getStatus() == WorkflowAction.Status.OK || getStatus() == WorkflowAction.Status.KILLED ||
- getStatus() == WorkflowAction.Status.ERROR;
+ getStatus() == WorkflowAction.Status.ERROR;
}
/**
* Set the action as pending and the current time as pending.
*/
public void setPending() {
- pending = true;
+ pending = 1;
pendingAge = new Date();
+ pendingAgeTimestamp = DateUtils.convertDateToTimestamp(pendingAge);
}
/**
@@ -157,6 +247,7 @@ public void setPending() {
*/
public void setPendingAge(Date pendingAge) {
this.pendingAge = pendingAge;
+ this.pendingAgeTimestamp = DateUtils.convertDateToTimestamp(pendingAge);
}
/**
@@ -165,7 +256,7 @@ public void setPendingAge(Date pendingAge) {
* @return the pending age of the action, null
if the action is not pending.
*/
public Date getPendingAge() {
- return pendingAge;
+ return DateUtils.toDate(pendingAgeTimestamp);
}
/**
@@ -174,15 +265,16 @@ public Date getPendingAge() {
* @return if the action is pending.
*/
public boolean isPending() {
- return pending;
+ return pending == 1 ? true : false;
}
/**
* Removes the pending flag from the action.
*/
public void resetPending() {
- pending = false;
+ pending = 0;
pendingAge = null;
+ pendingAgeTimestamp = null;
}
@@ -194,7 +286,7 @@ public void incRetries() {
}
/**
- * Set a tracking information for an action, and set the action status to {@link org.apache.oozie.client.WorkflowAction.Status#DONE}
+ * Set a tracking information for an action, and set the action status to {@link Action.Status#DONE}
*
* @param externalId external ID for the action.
* @param trackerUri tracker URI for the action.
@@ -211,10 +303,10 @@ public void setStartData(String externalId, String trackerUri, String consoleUrl
}
/**
- * Set the completion information for an action start. Sets the Action status to {@link org.apache.oozie.client.WorkflowAction.Status#DONE}
+ * Set the completion information for an action start. Sets the Action status to {@link Action.Status#DONE}
*
* @param externalStatus action external end status.
- * @param actionData action output data, null
if there is no action output data.
+ * @param actionData action output data, null
if there is no action output data.
*/
public void setExecutionData(String externalStatus, Properties actionData) {
setStatus(Status.DONE);
@@ -227,10 +319,9 @@ public void setExecutionData(String externalStatus, Properties actionData) {
/**
* Set the completion information for an action end.
*
- * @param status action status, {@link org.apache.oozie.client.WorkflowAction.Status#OK} or
- * {@link org.apache.oozie.client.WorkflowAction.Status#ERROR} or {@link org.apache.oozie.client.WorkflowAction.Status#KILLED}
- * @param signalValue the signal value. In most cases, the value should be
- * OK or ERROR.
+ * @param status action status, {@link Action.Status#OK} or {@link Action.Status#ERROR} or {@link
+ * Action.Status#KILLED}
+ * @param signalValue the signal value. In most cases, the value should be OK or ERROR.
*/
public void setEndData(Status status, String signalValue) {
if (status == null || (status != Status.OK && status != Status.ERROR && status != Status.KILLED)) {
@@ -244,13 +335,23 @@ public void setEndData(Status status, String signalValue) {
setSignalValue(ParamChecker.notEmpty(signalValue, "signalValue"));
}
+
/**
* Return the job Id.
*
* @return the job Id.
*/
public String getJobId() {
- return jobId;
+ return wfId;
+ }
+
+ /**
+ * Return the job Id.
+ *
+ * @return the job Id.
+ */
+ public String getWfId() {
+ return wfId;
}
/**
@@ -259,7 +360,28 @@ public String getJobId() {
* @param id jobId;
*/
public void setJobId(String id) {
- this.jobId = id;
+ this.wfId = id;
+ }
+
+ public String getSlaXml() {
+ return slaXml;
+ }
+
+ public void setSlaXml(String slaXml) {
+ this.slaXml = slaXml;
+ }
+
+ public void setStatus(Status val) {
+ this.status = val.toString();
+ super.setStatus(val);
+ }
+
+ public String getStatusStr() {
+ return status;
+ }
+
+ public Status getStatus() {
+ return Status.valueOf(this.status);
}
/**
@@ -281,20 +403,18 @@ public void setExecutionPath(String executionPath) {
}
/**
- * Return the signal value for the action.
- *
- * For decision nodes it is the choosen transition, for actions it is OK or ERROR.
+ * Return the signal value for the action. For decision nodes it is the choosen transition, for actions it is
+ * OK or ERROR.
*
- * @return the action signal value.
+ * @return the action signal value.
*/
public String getSignalValue() {
return signalValue;
}
/**
- * Set the signal value for the action.
- *
- * For decision nodes it is the choosen transition, for actions it is OK or ERROR.
+ * Set the signal value for the action. For decision nodes it is the choosen transition, for actions it is OK
+ * or ERROR.
*
* @param signalValue the action signal value.
*/
@@ -319,14 +439,51 @@ public String getLogToken() {
public void setLogToken(String logToken) {
this.logToken = logToken;
}
-
+
/**
* Return the action last check time
*
* @return the last check time
*/
public Date getLastCheckTime() {
- return lastCheckTime;
+ return DateUtils.toDate(lastCheckTimestamp);
+ }
+
+ /**
+ * Return the action last check time
+ *
+ * @return the last check time
+ */
+ public Timestamp getLastCheckTimestamp() {
+ return lastCheckTimestamp;
+ }
+
+ /**
+ * Return the action last check time
+ *
+ * @return the last check time
+ */
+ public Timestamp getStartTimestamp() {
+ return startTimestamp;
+ }
+
+ /**
+ * Return the action last check time
+ *
+ * @return the last check time
+ */
+ public Timestamp getEndTimestamp() {
+ return endTimestamp;
+ }
+
+
+ /**
+ * Return the action last check time
+ *
+ * @return the last check time
+ */
+ public Timestamp getPendingAgeTimestamp() {
+ return pendingAgeTimestamp;
}
/**
@@ -335,6 +492,29 @@ public Date getLastCheckTime() {
* @param lastCheckTime the last check time to set.
*/
public void setLastCheckTime(Date lastCheckTime) {
- this.lastCheckTime = lastCheckTime;
+ this.lastCheckTimestamp = DateUtils.convertDateToTimestamp(lastCheckTime);
}
+
+ public boolean getPending() {
+ return this.pending == 1 ? true : false;
+ }
+
+ public Date getStartTime() {
+ return DateUtils.toDate(startTimestamp);
+ }
+
+ public void setStartTime(Date startTime) {
+ super.setStartTime(startTime);
+ this.startTimestamp = DateUtils.convertDateToTimestamp(startTime);
+ }
+
+ public Date getEndTime() {
+ return DateUtils.toDate(endTimestamp);
+ }
+
+ public void setEndTime(Date endTime) {
+ super.setEndTime(endTime);
+ this.endTimestamp = DateUtils.convertDateToTimestamp(endTime);
+ }
+
}
diff --git a/core/src/main/java/org/apache/oozie/WorkflowJobBean.java b/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
index ba27f372d..3501c577d 100644
--- a/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
+++ b/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
@@ -18,8 +18,10 @@
package org.apache.oozie;
import org.apache.oozie.workflow.WorkflowInstance;
+import org.apache.oozie.workflow.lite.LiteWorkflowInstance;
import org.apache.oozie.client.rest.JsonWorkflowJob;
import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.WritableUtils;
import org.apache.hadoop.io.Writable;
@@ -28,11 +30,95 @@
import java.io.DataOutput;
import java.util.Date;
+import javax.persistence.Entity;
+import javax.persistence.Column;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Basic;
+import javax.persistence.Lob;
+
+import java.sql.Timestamp;
+
+import org.apache.openjpa.persistence.jdbc.Index;
+
+@Entity
+@NamedQueries({
+
+ @NamedQuery(name = "UPDATE_WORKFLOW", query = "update WorkflowJobBean w set w.appName = :appName, w.appPath = :appPath, w.conf = :conf, w.group = :groupName, w.run = :run, w.user = :user, w.authToken = :authToken, w.createdTimestamp = :createdTime, w.endTimestamp = :endTime, w.externalId = :externalId, w.lastModifiedTimestamp = :lastModTime, w.logToken = :logToken, w.protoActionConf = :protoActionConf, w.slaXml =:slaXml, w.startTimestamp = :startTime, w.status = :status, w.wfInstance = :wfInstance where w.id = :id"),
+
+ @NamedQuery(name = "DELETE_WORKFLOW", query = "delete from WorkflowJobBean w where w.id = :id"),
+
+ @NamedQuery(name = "GET_WORKFLOWS", query = "select OBJECT(w) from WorkflowJobBean w order by w.startTimestamp desc"),
+
+ @NamedQuery(name = "GET_WORKFLOWS_COLUMNS", query = "select w.id, w.appName, w.status, w.run, w.user, w.group, w.createdTimestamp, "
+ + "w.startTimestamp, w.lastModifiedTimestamp, w.endTimestamp from WorkflowJobBean w order by w.startTimestamp desc"),
+
+ @NamedQuery(name = "GET_WORKFLOWS_COUNT", query = "select count(w) from WorkflowJobBean w"),
+
+ @NamedQuery(name = "GET_COMPLETED_WORKFLOWS_OLDER_THAN", query = "select w from WorkflowJobBean w where w.endTimestamp < :endTime"),
+
+ @NamedQuery(name = "GET_WORKFLOW", query = "select OBJECT(w) from WorkflowJobBean w where w.id = :id"),
+
+ @NamedQuery(name = "GET_WORKFLOW_FOR_UPDATE", query = "select OBJECT(w) from WorkflowJobBean w where w.id = :id"),
+
+ @NamedQuery(name = "GET_WORKFLOW_ID_FOR_EXTERNAL_ID", query = "select w.id from WorkflowJobBean w where w.externalId = :externalId"),
+
+ @NamedQuery(name = "GET_WORKFLOWS_COUNT_WITH_STATUS", query = "select count(w) from WorkflowJobBean w where w.status = :status"),
+
+ @NamedQuery(name = "GET_WORKFLOWS_COUNT_WITH_STATUS_IN_LAST_N_SECS", query = "select count(w) from WorkflowJobBean w where w.status = :status and w.lastModifiedTimestamp > :lastModTime")
+
+ })
public class WorkflowJobBean extends JsonWorkflowJob implements Writable {
- private String authToken;
- private String logToken;
- private WorkflowInstance workflowInstance;
- private String protoActionConf;
+
+ @Column(name = "proto_action_conf")
+ @Lob
+ private String protoActionConf = null;
+
+ @Basic
+ @Column(name = "log_token")
+ private String logToken = null;
+
+ @Basic
+ @Index
+ @Column(name = "external_id")
+ private String externalId = null;
+
+ @Basic
+ @Index
+ @Column(name = "status")
+ private String status = WorkflowJob.Status.PREP.toString();
+
+ @Basic
+ @Column(name = "created_time")
+ private java.sql.Timestamp createdTimestamp = null;
+
+ @Basic
+ @Column(name = "start_time")
+ private java.sql.Timestamp startTimestamp = null;
+
+ @Basic
+ @Index
+ @Column(name = "end_time")
+ private java.sql.Timestamp endTimestamp = null;
+
+ @Column(name = "auth_token")
+ @Lob
+ private String authToken = null;
+
+ @Basic
+ @Index
+ @Column(name = "last_modified_time")
+ private java.sql.Timestamp lastModifiedTimestamp = null;
+
+ // @Basic(fetch = FetchType.LAZY)
+ // @Column(name="wfinstance",columnDefinition="blob")
+ @Column(name = "wf_instance")
+ @Lob
+ private byte[] wfInstance = null;
+
+ @Column(name = "sla_xml")
+ @Lob
+ private String slaXml = null;
/**
* Default constructor.
@@ -51,10 +137,10 @@ public void write(DataOutput dataOutput) throws IOException {
WritableUtils.writeStr(dataOutput, getAppName());
WritableUtils.writeStr(dataOutput, getId());
WritableUtils.writeStr(dataOutput, getConf());
- WritableUtils.writeStr(dataOutput, getStatus().toString());
+ WritableUtils.writeStr(dataOutput, getStatusStr());
dataOutput.writeLong((getCreatedTime() != null) ? getCreatedTime().getTime() : -1);
dataOutput.writeLong((getStartTime() != null) ? getStartTime().getTime() : -1);
- dataOutput.writeLong((getLastModTime() != null) ? getLastModTime().getTime() : -1);
+ dataOutput.writeLong((getLastModifiedTime() != null) ? getLastModifiedTime().getTime() : -1);
dataOutput.writeLong((getEndTime() != null) ? getEndTime().getTime() : -1);
WritableUtils.writeStr(dataOutput, getUser());
WritableUtils.writeStr(dataOutput, getGroup());
@@ -76,17 +162,18 @@ public void readFields(DataInput dataInput) throws IOException {
setId(WritableUtils.readStr(dataInput));
setConf(WritableUtils.readStr(dataInput));
setStatus(WorkflowJob.Status.valueOf(WritableUtils.readStr(dataInput)));
+ // setStatus(WritableUtils.readStr(dataInput));
long d = dataInput.readLong();
if (d != -1) {
setCreatedTime(new Date(d));
}
d = dataInput.readLong();
if (d != -1) {
- setStartTime(new Date(d));
}
+ setStartTime(new Date(d));
d = dataInput.readLong();
- if(d != -1) {
- setLastModTime(new Date(d));
+ if (d != -1) {
+ setLastModifiedTime(new Date(d));
}
d = dataInput.readLong();
if (d != -1) {
@@ -98,6 +185,8 @@ public void readFields(DataInput dataInput) throws IOException {
authToken = WritableUtils.readStr(dataInput);
logToken = WritableUtils.readStr(dataInput);
protoActionConf = WritableUtils.readStr(dataInput);
+ setExternalId(getExternalId());
+ setProtoActionConf(protoActionConf);
}
public String getAuthToken() {
@@ -116,12 +205,32 @@ public void setLogToken(String logToken) {
this.logToken = logToken;
}
+ public String getSlaXml() {
+ return slaXml;
+ }
+
+ public void setSlaXml(String slaXml) {
+ this.slaXml = slaXml;
+ }
+
public WorkflowInstance getWorkflowInstance() {
- return workflowInstance;
+ return get(this.wfInstance);
+ }
+
+ public byte[] getWfInstance() {
+ return wfInstance;
}
public void setWorkflowInstance(WorkflowInstance workflowInstance) {
- this.workflowInstance = workflowInstance;
+ setWfInstance(workflowInstance);
+ }
+
+ public void setWfInstance(byte[] wfInstance) {
+ this.wfInstance = wfInstance;
+ }
+
+ public void setWfInstance(WorkflowInstance wfInstance) {
+ this.wfInstance = WritableUtils.toByteArray((LiteWorkflowInstance) wfInstance);
}
public String getProtoActionConf() {
@@ -131,4 +240,119 @@ public String getProtoActionConf() {
public void setProtoActionConf(String protoActionConf) {
this.protoActionConf = protoActionConf;
}
+
+ public String getprotoActionConf() {
+ return protoActionConf;
+ }
+
+ public String getlogToken() {
+ return logToken;
+ }
+
+ public String getStatusStr() {
+ return status;
+ }
+
+ public Timestamp getLastModifiedTimestamp() {
+ return lastModifiedTimestamp;
+ }
+
+ public Timestamp getStartTimestamp() {
+ return startTimestamp;
+ }
+
+ public Timestamp getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public Timestamp getEndTimestamp() {
+ return endTimestamp;
+ }
+
+ @Override
+ public void setAppName(String val) {
+ super.setAppName(val);
+ }
+
+ @Override
+ public void setAppPath(String val) {
+ super.setAppPath(val);
+ }
+
+ @Override
+ public void setConf(String val) {
+ super.setConf(val);
+ }
+
+ @Override
+ public void setStatus(Status val) {
+ super.setStatus(val);
+ this.status = val.toString();
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.valueOf(this.status);
+ }
+
+ @Override
+ public void setExternalId(String externalId) {
+ super.setExternalId(externalId);
+ this.externalId = externalId;
+ }
+
+ @Override
+ public String getExternalId() {
+ return externalId;
+ }
+
+ @Override
+ public void setLastModifiedTime(Date lastModifiedTime) {
+ super.setLastModifiedTime(lastModifiedTime);
+ this.lastModifiedTimestamp = DateUtils.convertDateToTimestamp(lastModifiedTime);
+ }
+
+ @Override
+ public Date getLastModifiedTime() {
+ return DateUtils.toDate(lastModifiedTimestamp);
+ }
+
+ @Override
+ public Date getCreatedTime() {
+ return DateUtils.toDate(createdTimestamp);
+ }
+
+ @Override
+ public void setCreatedTime(Date createdTime) {
+ super.setCreatedTime(createdTime);
+ this.createdTimestamp = DateUtils.convertDateToTimestamp(createdTime);
+ }
+
+ @Override
+ public Date getStartTime() {
+ return DateUtils.toDate(startTimestamp);
+ }
+
+ @Override
+ public void setStartTime(Date startTime) {
+ super.setStartTime(startTime);
+ this.startTimestamp = DateUtils.convertDateToTimestamp(startTime);
+ }
+
+ @Override
+ public Date getEndTime() {
+ return DateUtils.toDate(endTimestamp);
+ }
+
+ @Override
+ public void setEndTime(Date endTime) {
+ super.setEndTime(endTime);
+ this.endTimestamp = DateUtils.convertDateToTimestamp(endTime);
+ }
+
+ private WorkflowInstance get(byte[] array) {
+ LiteWorkflowInstance pInstance = WritableUtils.fromByteArray(array, LiteWorkflowInstance.class);
+ return pInstance;
+ }
+
}
diff --git a/core/src/main/java/org/apache/oozie/WorkflowsInfo.java b/core/src/main/java/org/apache/oozie/WorkflowsInfo.java
index 1da21e0b0..8bfc635e7 100644
--- a/core/src/main/java/org/apache/oozie/WorkflowsInfo.java
+++ b/core/src/main/java/org/apache/oozie/WorkflowsInfo.java
@@ -53,9 +53,7 @@ public List getWorkflows() {
}
/**
- * Return the offset of the workflows being returned.
- *
- * For pagination purposes.
+ * Return the offset of the workflows being returned. For pagination purposes.
*
* @return the offset of the workflows being returned.
*/
@@ -64,9 +62,7 @@ public int getStart() {
}
/**
- * Return the number of the workflows being returned.
- *
- * For pagination purposes.
+ * Return the number of the workflows being returned. For pagination purposes.
*
* @return the number of the workflows being returned.
*/
@@ -75,9 +71,7 @@ public int getLen() {
}
/**
- * Return the total number of workflows.
- *
- * For pagination purposes.
+ * Return the total number of workflows. For pagination purposes.
*
* @return the total number of workflows.
*/
diff --git a/core/src/main/java/org/apache/oozie/XException.java b/core/src/main/java/org/apache/oozie/XException.java
index c7c5e8349..e0890619e 100644
--- a/core/src/main/java/org/apache/oozie/XException.java
+++ b/core/src/main/java/org/apache/oozie/XException.java
@@ -21,10 +21,7 @@
import org.apache.oozie.util.ParamChecker;
/**
- * Base exception for all Oozie exception.
- *
- * It requires error codes an captures the Log info at exception time.
- *
+ * Base exception for all Oozie exception. It requires error codes an captures the Log info at exception time.
* Error codes should be modeled in subclasses as Enums.
*/
public class XException extends Exception {
@@ -52,11 +49,9 @@ public XException(XException cause) {
}
/**
- * Create an EXception from an error code plus parameter to create the exception message.
- *
- * The value of {@link ErrorCode#getTemplate} is used as a StringFormat template for the exception message.
- *
- * If the last parameter is an Exception it is used as the exception cause.
+ * Create an EXception from an error code plus parameter to create the exception message. The value of {@link
+ * ErrorCode#getTemplate} is used as a StringFormat template for the exception message. If the last parameter
+ * is an Exception it is used as the exception cause.
*
* @param errorCode the error code for the exception.
* @param params parameters used to create the exception message together with the error code template. If the last
diff --git a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
index 95ca46b79..f5c3d0784 100644
--- a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
@@ -35,9 +35,7 @@
import java.util.LinkedHashMap;
/**
- * Base action executor class.
- *
- * All the action executors should extend this class.
+ * Base action executor class. All the action executors should extend this class.
*/
public abstract class ActionExecutor {
@@ -99,19 +97,17 @@ public interface Context {
public ELEvaluator getELEvaluator();
/**
- * Set a workflow action variable.
- *
- * Convenience method that prefixes the variable name with the action name plus a '.'.
+ * Set a workflow action variable. Convenience method that prefixes the variable name with the action name
+ * plus a '.'.
*
- * @param name variable name.
+ * @param name variable name.
* @param value variable value, null
removes the variable.
*/
public void setVar(String name, String value);
/**
- * Get a workflow action variable.
- *
- * Convenience method that prefixes the variable name with the action name plus a '.'.
+ * Get a workflow action variable. Convenience method that prefixes the variable name with the action name
+ * plus a '.'.
*
* @param name variable name.
* @return the variable value, null
if not set.
@@ -128,19 +124,19 @@ public interface Context {
void setStartData(String externalId, String trackerUri, String consoleUrl);
/**
- * Set the action execution completion information for an action. The action
- * status is set to {@link org.apache.oozie.client.WorkflowAction.Status#DONE}
- *
+ * Set the action execution completion information for an action. The action status is set to {@link
+ * org.apache.oozie.client.WorkflowAction.Status#DONE}
+ *
* @param externalStatus the action external end status.
- * @param actionData the action data on completion, null
- * if none.
+ * @param actionData the action data on completion, null
if none.
*/
void setExecutionData(String externalStatus, Properties actionData);
/**
* Set the action end completion information for a completed action.
*
- * @param status the action end status, it can be {@link org.apache.oozie.client.WorkflowAction.Status#OK} or {@link org.apache.oozie.client.WorkflowAction.Status#ERROR}.
+ * @param status the action end status, it can be {@link org.apache.oozie.client.WorkflowAction.Status#OK} or
+ * {@link org.apache.oozie.client.WorkflowAction.Status#ERROR}.
* @param signalValue the action external end status.
*/
void setEndData(WorkflowAction.Status status, String signalValue);
@@ -178,11 +174,12 @@ public interface Context {
* @throws URISyntaxException
*/
public FileSystem getAppFileSystem() throws IOException, URISyntaxException;
+
+ public void setErrorInfo(String str, String exMsg);
}
/**
- * Define the default maximum number of retry attempts for transient errors
- * (total attempts = 1 + MAX_RETRIES).
+ * Define the default maximum number of retry attempts for transient errors (total attempts = 1 + MAX_RETRIES).
*/
public static final int MAX_RETRIES = 3;
@@ -207,7 +204,7 @@ protected ActionExecutor(String type) {
/**
* Create an action executor.
*
- * @param type action executor type.
+ * @param type action executor type.
* @param retryAttempts retry attempts.
* @param retryInterval retry interval, in seconds.
*/
@@ -242,14 +239,10 @@ public static void disableInit() {
}
/**
- * Invoked once at system initialization time.
- *
- * It can be used to register error information for the expected exceptions. Exceptions should be register from
- * subclasses to superclasses to ensure proper detection, same thing that it is done in a normal catch.
- *
- * This method should invoke the {@link #registerError} method to register all its possible errors.
- *
- * Subclasses overriding must invoke super.
+ * Invoked once at system initialization time. It can be used to register error information for the expected
+ * exceptions. Exceptions should be register from subclasses to superclasses to ensure proper detection, same thing
+ * that it is done in a normal catch. This method should invoke the {@link #registerError} method to register
+ * all its possible errors. Subclasses overriding must invoke super.
*/
public void initActionType() {
ERROR_INFOS.put(getType(), new LinkedHashMap());
@@ -265,9 +258,8 @@ public String getOozieSystemId() {
}
/**
- * Return the runtime directory of the Oozie instance.
- *
- * The directory is created under TMP and it is always a new directory per system initialization.
+ * Return the runtime directory of the Oozie instance. The directory is created under TMP and it is always a
+ * new directory per system initialization.
*
* @return the runtime directory of the Oozie instance.
*/
@@ -276,9 +268,7 @@ public String getOozieRuntimeDir() {
}
/**
- * Return Oozie configuration.
- *
- * This is useful for actions that need access to configuration properties.
+ * Return Oozie configuration. This is useful for actions that need access to configuration properties.
*
* @return Oozie configuration.
*/
@@ -289,9 +279,8 @@ public Configuration getOozieConf() {
/**
* Register error handling information for an exception.
*
- * @param exClass excpetion class name (to work in case of a particular exception not being in the classpath,
- * needed to be able to handle multiple version of Hadoop or other JARs used by executors with
- * the same codebase).
+ * @param exClass excpetion class name (to work in case of a particular exception not being in the classpath, needed
+ * to be able to handle multiple version of Hadoop or other JARs used by executors with the same codebase).
* @param errorType error type for the exception.
* @param errorCode error code for the exception.
*/
@@ -358,8 +347,7 @@ public void setRetryInterval(long retryInterval) {
/**
* Utility method to handle exceptions in the {@link #start}, {@link #end}, {@link #kill} and {@link #check} methods
- *
- * It uses the error registry to convert exceptions to {@link ActionExecutorException}s.
+ * It uses the error registry to convert exceptions to {@link ActionExecutorException}s.
*
* @param ex exception to convert.
* @return ActionExecutorException converted exception.
@@ -372,7 +360,7 @@ protected ActionExecutorException convertException(Exception ex) {
for (Map.Entry errorInfo : ERROR_INFOS.get(getType()).entrySet()) {
if (errorInfo.getKey().isInstance(ex)) {
return new ActionExecutorException(errorInfo.getValue().errorType, errorInfo.getValue().errorCode,
- "{0}", ex.getMessage(), ex);
+ "{0}", ex.getMessage(), ex);
}
}
String errorCode = ex.getClass().getName();
@@ -401,6 +389,7 @@ protected String getActionSignal(WorkflowAction.Status status) {
/**
* Return the path that will be used to store action specific data
+ *
* @param jobId Worfklow ID
* @param action Action
* @param key An Identifier
@@ -429,49 +418,41 @@ public Path getActionDir(String jobId, WorkflowAction action, String key, boolea
}
/**
- * Start an action.
- *
- * The {@link Context#setStartData} method must be called within this method.
- *
- * If the action has completed, the {@link Context#setExecutionData} method must be called within this method.
+ * Start an action. The {@link Context#setStartData} method must be called within this method. If the
+ * action has completed, the {@link Context#setExecutionData} method must be called within this method.
*
* @param context executor context.
- * @param action the action to start.
+ * @param action the action to start.
* @throws ActionExecutorException thrown if the action could not start.
*/
public abstract void start(Context context, WorkflowAction action) throws ActionExecutorException;
/**
- * End an action after it has executed.
- *
- * The {@link Context#setEndData} method must be called within this method.
+ * End an action after it has executed. The {@link Context#setEndData} method must be called within this
+ * method.
*
* @param context executor context.
- * @param action the action to end.
+ * @param action the action to end.
* @throws ActionExecutorException thrown if the action could not end.
*/
public abstract void end(Context context, WorkflowAction action) throws ActionExecutorException;
/**
- * Check if an action has completed. This method must be implemented by Async Action Executors.
- *
- * If the action has completed, the {@link Context#setExecutionData} method must be called within this method.
- *
- * If the action has not completed, the {@link Context#setExternalStatus} method must be called within this method.
+ * Check if an action has completed. This method must be implemented by Async Action Executors. If the action
+ * has completed, the {@link Context#setExecutionData} method must be called within this method. If the action
+ * has not completed, the {@link Context#setExternalStatus} method must be called within this method.
*
* @param context executor context.
- * @param action the action to end.
+ * @param action the action to end.
* @throws ActionExecutorException thrown if the action could not be checked.
*/
public abstract void check(Context context, WorkflowAction action) throws ActionExecutorException;
/**
- * Kill an action.
- *
- * The {@link Context#setEndData} method must be called within this method.
+ * Kill an action. The {@link Context#setEndData} method must be called within this method.
*
* @param context executor context.
- * @param action the action to kill.
+ * @param action the action to kill.
* @throws ActionExecutorException thrown if the action could not be killed.
*/
public abstract void kill(Context context, WorkflowAction action) throws ActionExecutorException;
diff --git a/core/src/main/java/org/apache/oozie/action/ActionExecutorException.java b/core/src/main/java/org/apache/oozie/action/ActionExecutorException.java
index 576f05159..762884c72 100644
--- a/core/src/main/java/org/apache/oozie/action/ActionExecutorException.java
+++ b/core/src/main/java/org/apache/oozie/action/ActionExecutorException.java
@@ -21,9 +21,8 @@
import org.apache.oozie.util.XLog;
/**
- * ActionExecutor exception.
- *
- * The exception provides information regarding the transient/no-transient/fatal nature of the exception.
+ * ActionExecutor exception. The exception provides information regarding the transient/no-transient/fatal nature
+ * of the exception.
*/
public class ActionExecutorException extends Exception {
@@ -73,8 +72,7 @@ public ActionExecutorException(ErrorType errorType, String errorCode, String mes
/**
* Create an action executor exception.
*
- *
- * If the last parameter is an Exception it is used as the exception cause.
+ * If the last parameter is an Exception it is used as the exception cause.
*
* @param errorType the error type.
* @param errorCode the error code.
@@ -82,7 +80,7 @@ public ActionExecutorException(ErrorType errorType, String errorCode, String mes
* @param params parameters used to create the exception message together with the messageTemplate. If the last
* parameter is an Exception it is used as the exception cause.
*/
- public ActionExecutorException(ErrorType errorType, String errorCode, String messageTemplate, Object ... params) {
+ public ActionExecutorException(ErrorType errorType, String errorCode, String messageTemplate, Object... params) {
super(errorCode + ": " + XLog.format(messageTemplate, params), XLog.getCause(params));
this.errorType = ParamChecker.notNull(errorType, "errorType");
this.errorCode = ParamChecker.notEmpty(errorCode, "errorCode");
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/DoAs.java b/core/src/main/java/org/apache/oozie/action/hadoop/DoAs.java
index cf8ceda97..7c5cba0a8 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/DoAs.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/DoAs.java
@@ -20,6 +20,7 @@
import java.util.concurrent.Callable;
//TODO this class goes away when doing 20.100+ only
+
//TODO this class is for testing, but is here to allow selective compilation
public class DoAs implements Callable {
private String user;
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
index 3ffee63c4..fba76de18 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
@@ -35,9 +35,7 @@
import java.util.List;
/**
- * File system action executor.
- *
- * This executes the file system mkdir, move and delete commands
+ * File system action executor. This executes the file system mkdir, move and delete commands
*/
public class FsActionExecutor extends ActionExecutor {
@@ -57,9 +55,11 @@ void validatePath(Path path, boolean withScheme) throws ActionExecutorException
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS001",
"Missing scheme in path [{0}]", path);
}
- else if (!scheme.equals("hdfs")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS002",
- "Scheme [{0}] not support in path [{1}]", scheme, path);
+ else {
+ if (!scheme.equals("hdfs")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS002",
+ "Scheme [{0}] not support in path [{1}]", scheme, path);
+ }
}
}
else {
@@ -84,21 +84,27 @@ void doOperations(Context context, Element element) throws ActionExecutorExcepti
Path path = getPath(commandElement, "path");
mkdir(context, path);
}
- else if (command.equals("delete")) {
- Path path = getPath(commandElement, "path");
- delete(context, path);
- }
- else if (command.equals("move")) {
- Path source = getPath(commandElement, "source");
- Path target = getPath(commandElement, "target");
- move(context, source, target, recovery);
- }
- else if (command.equals("chmod")) {
- Path path = getPath(commandElement, "path");
- String str = commandElement.getAttributeValue("dir-files");
- boolean dirFiles = (str == null) || Boolean.parseBoolean(str);
- String permissionsMask = commandElement.getAttributeValue("permissions").trim();
- chmod(context, path, permissionsMask, dirFiles);
+ else {
+ if (command.equals("delete")) {
+ Path path = getPath(commandElement, "path");
+ delete(context, path);
+ }
+ else {
+ if (command.equals("move")) {
+ Path source = getPath(commandElement, "source");
+ Path target = getPath(commandElement, "target");
+ move(context, source, target, recovery);
+ }
+ else {
+ if (command.equals("chmod")) {
+ Path path = getPath(commandElement, "path");
+ String str = commandElement.getAttributeValue("dir-files");
+ boolean dirFiles = (str == null) || Boolean.parseBoolean(str);
+ String permissionsMask = commandElement.getAttributeValue("permissions").trim();
+ chmod(context, path, permissionsMask, dirFiles);
+ }
+ }
+ }
}
}
}
@@ -223,12 +229,14 @@ FsPermission createShortPermission(String permissions, Path path) throws ActionE
short omask = Short.parseShort(Integer.toString(mask), 8);
return new FsPermission(omask);
}
- else if (permissions.length() == 10) {
- return FsPermission.valueOf(permissions);
- }
else {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS010",
- "chmod, path [{0}] invalid permissions mask [{1}]", path, permissions);
+ if (permissions.length() == 10) {
+ return FsPermission.valueOf(permissions);
+ }
+ else {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS010",
+ "chmod, path [{0}] invalid permissions mask [{1}]", path, permissions);
+ }
}
}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java b/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
index 670f1c35c..1b133ca30 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
@@ -62,10 +62,9 @@ private static FileStatus getFileStatus(String pathUri) throws Exception {
/**
* Return if a path exists.
- *
+ *
* @param pathUri file system path uri.
- * @return true
if the path exists, false
if it
- * does not.
+ * @return true
if the path exists, false
if it does not.
* @throws Exception
*/
public static boolean fs_exists(String pathUri) throws Exception {
@@ -77,10 +76,9 @@ public static boolean fs_exists(String pathUri) throws Exception {
/**
* Return if a path is a directory.
- *
+ *
* @param pathUri fs path uri.
- * @return true
if the path exists and it is a directory,
- * false
otherwise.
+ * @return true
if the path exists and it is a directory, false
otherwise.
* @throws Exception
*/
public static boolean fs_isDir(String pathUri) throws Exception {
@@ -94,10 +92,9 @@ public static boolean fs_isDir(String pathUri) throws Exception {
/**
* Return the len of a file.
- *
+ *
* @param pathUri file system path uri.
- * @return the file len in bytes, -1 if the file does not exist or if it is
- * a directory.
+ * @return the file len in bytes, -1 if the file does not exist or if it is a directory.
* @throws Exception
*/
public static long fs_fileSize(String pathUri) throws Exception {
@@ -111,10 +108,9 @@ public static long fs_fileSize(String pathUri) throws Exception {
/**
* Return the size of all files in the directory, it is not recursive.
- *
+ *
* @param pathUri file system path uri.
- * @return the size of all files in the directory, -1 if the directory does
- * not exist or if it is a file.
+ * @return the size of all files in the directory, -1 if the directory does not exist or if it is a file.
* @throws Exception
*/
public static long fs_dirSize(String pathUri) throws Exception {
@@ -144,10 +140,9 @@ public static long fs_dirSize(String pathUri) throws Exception {
/**
* Return the file block size in bytes.
- *
+ *
* @param pathUri file system path uri.
- * @return the block size of the file in bytes, -1 if the file does not
- * exist or if it is a directory.
+ * @return the block size of the file in bytes, -1 if the file does not exist or if it is a directory.
* @throws Exception
*/
public static long fs_blockSize(String pathUri) throws Exception {
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
index 751e6bd3f..c25daa7df 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
@@ -48,7 +48,7 @@ public static Map> hadoop_counters(String nodeName) th
Map> counters = (Map>) obj;
if (counters == null) {
counters = getCounters(nodeName);
- instance.setTransientVar(nodeName + WorkflowInstance.NODE_VAR_SEPARATOR + HADOOP_COUNTERS, counters);
+ instance.setTransientVar(nodeName + WorkflowInstance.NODE_VAR_SEPARATOR + HADOOP_COUNTERS, counters);
}
return counters;
}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 7fbcbbaf5..58bbd43c4 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -30,6 +30,8 @@
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.client.WorkflowAction.Status;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.service.Services;
@@ -40,6 +42,7 @@
import org.apache.oozie.util.XmlUtils;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.PropertiesUtils;
+import org.apache.openjpa.lib.log.Log;
import org.jdom.Element;
import org.jdom.Namespace;
import org.jdom.JDOMException;
@@ -60,6 +63,7 @@
import java.util.Set;
import java.util.ArrayList;
import java.util.Properties;
+import java.util.logging.Logger;
public class JavaActionExecutor extends ActionExecutor {
@@ -77,6 +81,7 @@ public class JavaActionExecutor extends ActionExecutor {
private static final String FAILED = "FAILED";
private static final String FAILED_KILLED = "FAILED/KILLED";
private static final String RUNNING = "RUNNING";
+ private XLog log = XLog.getLog(getClass());
static {
DISALLOWED_PROPERTIES.add(HADOOP_USER);
@@ -117,13 +122,13 @@ public void initActionType() {
registerError(UnknownHostException.class.getName(), ActionExecutorException.ErrorType.TRANSIENT, "JA001");
registerError(AccessControlException.class.getName(), ActionExecutorException.ErrorType.NON_TRANSIENT,
- "JA002");
+ "JA002");
registerError(DiskChecker.DiskOutOfSpaceException.class.getName(),
- ActionExecutorException.ErrorType.NON_TRANSIENT, "JA003");
+ ActionExecutorException.ErrorType.NON_TRANSIENT, "JA003");
registerError(org.apache.hadoop.hdfs.protocol.QuotaExceededException.class.getName(),
- ActionExecutorException.ErrorType.NON_TRANSIENT, "JA004");
+ ActionExecutorException.ErrorType.NON_TRANSIENT, "JA004");
registerError(org.apache.hadoop.hdfs.server.namenode.SafeModeException.class.getName(),
- ActionExecutorException.ErrorType.NON_TRANSIENT, "JA005");
+ ActionExecutorException.ErrorType.NON_TRANSIENT, "JA005");
registerError(ConnectException.class.getName(), ActionExecutorException.ErrorType.TRANSIENT, "JA006");
registerError(JDOMException.class.getName(), ActionExecutorException.ErrorType.ERROR, "JA007");
registerError(FileNotFoundException.class.getName(), ActionExecutorException.ErrorType.ERROR, "JA008");
@@ -138,7 +143,7 @@ void checkForDisallowedProps(Configuration conf, String confName) throws ActionE
for (String prop : DISALLOWED_PROPERTIES) {
if (conf.get(prop) != null) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA010",
- "Property [{0}] not allowed in action [{1}] configuration", prop, confName);
+ "Property [{0}] not allowed in action [{1}] configuration", prop, confName);
}
}
}
@@ -153,6 +158,7 @@ Configuration createBaseHadoopConf(Context context, Element actionXml) {
String nameNode = actionXml.getChild("name-node", ns).getTextTrim();
conf.set(HADOOP_JOB_TRACKER, jobTracker);
conf.set(HADOOP_NAME_NODE, nameNode);
+ conf.set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "true");
return conf;
}
@@ -234,8 +240,8 @@ Configuration setupActionConf(Configuration actionConf, Context context, Element
Configuration addToCache(Configuration conf, Path appPath, String filePath, boolean archive)
throws ActionExecutorException {
+ Path path = null;
try {
- Path path;
if (filePath.startsWith("/")) {
path = new Path(filePath);
}
@@ -256,12 +262,14 @@ Configuration addToCache(Configuration conf, Path appPath, String filePath, bool
uri = new Path(path.toString() + "#" + fileName).toUri();
uri = new URI(uri.getPath());
}
- else if (!fileName.contains("#")) {
- path = new Path(uri.toString());
+ else {
+ if (!fileName.contains("#")) {
+ path = new Path(uri.toString());
- String user = conf.get("user.name");
- String group = conf.get("group.name");
- Services.get().get(HadoopAccessorService.class).addFileToClassPath(user, group, path, conf);
+ String user = conf.get("user.name");
+ String group = conf.get("group.name");
+ Services.get().get(HadoopAccessorService.class).addFileToClassPath(user, group, path, conf);
+ }
}
DistributedCache.addCacheFile(uri, conf);
}
@@ -269,6 +277,9 @@ else if (!fileName.contains("#")) {
return conf;
}
catch (Exception ex) {
+ XLog.getLog(getClass()).debug(
+ "Errors when add to DistributedCache. Path=" + path + ", archive=" + archive + ", conf="
+ + XmlUtils.prettyPrint(conf).toString());
throw convertException(ex);
}
}
@@ -343,9 +354,11 @@ void setLibFilesArchives(Context context, Element actionXml, Path appPath, Confi
String path = eProp.getTextTrim();
addToCache(conf, appPath, path, false);
}
- else if (eProp.getName().equals("archive")) {
- String path = eProp.getTextTrim();
- addToCache(conf, appPath, path, true);
+ else {
+ if (eProp.getName().equals("archive")) {
+ String path = eProp.getTextTrim();
+ addToCache(conf, appPath, path, true);
+ }
}
}
}
@@ -422,10 +435,11 @@ JobConf createLauncherConf(Context context, WorkflowAction action, Element actio
}
}
- //to disable cancelation of delegation token on launcher job end
+ // to disable cancelation of delegation token on launcher job end
launcherJobConf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
- //setting the group owning the Oozie job to allow anybody in that group to kill the jobs.
+ // setting the group owning the Oozie job to allow anybody in that
+ // group to kill the jobs.
launcherJobConf.set("mapreduce.job.acl-modify-job", context.getWorkflow().getGroup());
return launcherJobConf;
@@ -452,6 +466,8 @@ void injectLauncherCallback(Context context, Configuration launcherConf) {
}
void submitLauncher(Context context, WorkflowAction action) throws ActionExecutorException {
+ JobClient jobClient = null;
+ boolean exception = false;
try {
Path appPath = new Path(context.getWorkflow().getAppPath());
Element actionXml = XmlUtils.parseXml(action.getConf());
@@ -459,19 +475,21 @@ void submitLauncher(Context context, WorkflowAction action) throws ActionExecuto
// action job configuration
Configuration actionConf = createBaseHadoopConf(context, actionXml);
setupActionConf(actionConf, context, actionXml, appPath);
+ XLog.getLog(getClass()).debug("Setting LibFilesArchives ");
setLibFilesArchives(context, actionXml, appPath, actionConf);
String jobName = XLog.format("oozie:action:T={0}:W={1}:A={2}:ID={3}", getType(), context.getWorkflow()
.getAppName(), action.getName(), context.getWorkflow().getId());
actionConf.set("mapred.job.name", jobName);
injectActionCallback(context, actionConf);
- //setting the group owning the Oozie job to allow anybody in that group to kill the jobs.
+ // setting the group owning the Oozie job to allow anybody in that
+ // group to kill the jobs.
actionConf.set("mapreduce.job.acl-modify-job", context.getWorkflow().getGroup());
JobConf launcherJobConf = createLauncherConf(context, action, actionXml, actionConf);
injectLauncherCallback(context, launcherJobConf);
-
- JobClient jobClient = createJobClient(context, launcherJobConf);
+ XLog.getLog(getClass()).debug("Creating Job Client for action " + action.getId());
+ jobClient = createJobClient(context, launcherJobConf);
String launcherId = LauncherMapper.getRecoveryId(launcherJobConf, context.getActionDir(), context
.getRecoveryId());
boolean alreadyRunning = launcherId != null;
@@ -482,17 +500,23 @@ void submitLauncher(Context context, WorkflowAction action) throws ActionExecuto
if (runningJob == null) {
String jobTracker = launcherJobConf.get("mapred.job.tracker");
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
- "unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
+ "unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
}
}
else {
prepare(context, actionXml);
+ XLog.getLog(getClass()).debug("Submitting the job through Job Client for action " + action.getId());
- //setting up propagation of the delegation token.
+ // setting up propagation of the delegation token.
AuthHelper.get().set(jobClient, launcherJobConf);
runningJob = jobClient.submitJob(launcherJobConf);
+ if (runningJob == null) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
+ "Error submitting launcher for action [{0}]", action.getId());
+ }
launcherId = runningJob.getID().toString();
+ XLog.getLog(getClass()).debug("After submission get the launcherId " + launcherId);
}
String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
@@ -500,26 +524,49 @@ void submitLauncher(Context context, WorkflowAction action) throws ActionExecuto
context.setStartData(launcherId, jobTracker, consoleUrl);
}
catch (Exception ex) {
+ exception = true;
throw convertException(ex);
}
+ finally {
+ if (jobClient != null) {
+ try {
+ jobClient.close();
+ }
+ catch (Exception e) {
+ if (exception) {
+ log.error("JobClient error: ", e);
+ }
+ else {
+ throw convertException(e);
+ }
+ }
+ }
+ }
}
void prepare(Context context, Element actionXml) throws ActionExecutorException {
Namespace ns = actionXml.getNamespace();
Element prepare = actionXml.getChild("prepare", ns);
if (prepare != null) {
+ XLog.getLog(getClass()).debug("Preparing the action with FileSystem operation");
FsActionExecutor fsAe = new FsActionExecutor();
fsAe.doOperations(context, prepare);
+ XLog.getLog(getClass()).debug("FS Operation is completed");
}
}
@Override
public void start(Context context, WorkflowAction action) throws ActionExecutorException {
try {
+ XLog.getLog(getClass()).debug("Starting action " + action.getId() + " getting Action File System");
FileSystem actionFs = getActionFileSystem(context, action);
+ XLog.getLog(getClass()).debug("Preparing action Dir through copying " + context.getActionDir());
prepareActionDir(actionFs, context);
+ XLog.getLog(getClass()).debug("Action Dir is ready. Submitting the action ");
submitLauncher(context, action);
+ XLog.getLog(getClass()).debug("Action submit completed. Performing check ");
check(context, action);
+ XLog.getLog(getClass()).debug("Action check is done after submission");
}
catch (Exception ex) {
throw convertException(ex);
@@ -531,7 +578,7 @@ public void end(Context context, WorkflowAction action) throws ActionExecutorExc
try {
String externalStatus = action.getExternalStatus();
WorkflowAction.Status status = externalStatus.equals(SUCCEEDED) ? WorkflowAction.Status.OK
- : WorkflowAction.Status.ERROR;
+ : WorkflowAction.Status.ERROR;
context.setEndData(status, getActionSignal(status));
}
catch (Exception ex) {
@@ -556,20 +603,22 @@ protected JobClient createJobClient(Context context, JobConf jobConf) throws IOE
@Override
public void check(Context context, WorkflowAction action) throws ActionExecutorException {
+ JobClient jobClient = null;
+ boolean exception = false;
try {
Element actionXml = XmlUtils.parseXml(action.getConf());
FileSystem actionFs = getActionFileSystem(context, actionXml);
Configuration conf = createBaseHadoopConf(context, actionXml);
JobConf jobConf = new JobConf();
XConfiguration.copy(conf, jobConf);
- JobClient jobClient = createJobClient(context, jobConf);
+ jobClient = createJobClient(context, jobConf);
RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalId()));
if (runningJob == null) {
context.setExternalStatus(FAILED);
context.setExecutionData(FAILED, null);
throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
- "Unknown hadoop job [{0}] associated with action [{1}]. Failing this action!", action
- .getExternalId(), action.getId());
+ "Unknown hadoop job [{0}] associated with action [{1}]. Failing this action!", action
+ .getExternalId(), action.getId());
}
if (runningJob.isComplete()) {
Path actionDir = context.getActionDir();
@@ -585,13 +634,20 @@ public void check(Context context, WorkflowAction action) throws ActionExecutorE
reader.close();
String newId = props.getProperty("id");
runningJob = jobClient.getJob(JobID.forName(newId));
+ if (runningJob == null) {
+ context.setExternalStatus(FAILED);
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
+ "Unknown hadoop job [{0}] associated with action [{1}]. Failing this action!", newId,
+ action.getId());
+ }
+
context.setStartData(newId, action.getTrackerUri(), runningJob.getTrackingURL());
- XLog.getLog(getClass()).info(XLog.STD, "External ID swap, old ID [{0}] new ID [{1}]",
- launcherId, newId);
+ XLog.getLog(getClass()).info(XLog.STD, "External ID swap, old ID [{0}] new ID [{1}]", launcherId,
+ newId);
}
if (runningJob.isComplete()) {
XLog.getLog(getClass()).info(XLog.STD, "action completed, external ID [{0}]",
- action.getExternalId());
+ action.getExternalId());
if (runningJob.isSuccessful() && LauncherMapper.isMainSuccessful(runningJob)) {
Properties props = null;
if (getCaptureOutput(action)) {
@@ -619,6 +675,7 @@ public void check(Context context, WorkflowAction action) throws ActionExecutorE
errorReason = props.getProperty("error.reason");
log.warn("Launcher ERROR, reason: {0}", errorReason);
String exMsg = props.getProperty("exception.message");
+ context.setErrorInfo("JA018", exMsg);
String exStackTrace = props.getProperty("exception.stacktrace");
if (exMsg != null) {
log.warn("Launcher exception: {0}{E}{1}", exMsg, exStackTrace);
@@ -635,20 +692,35 @@ public void check(Context context, WorkflowAction action) throws ActionExecutorE
else {
context.setExternalStatus(RUNNING);
XLog.getLog(getClass()).info(XLog.STD, "checking action, external ID [{0}] status [{1}]",
- action.getExternalId(), action.getExternalStatus());
+ action.getExternalId(), action.getExternalStatus());
}
}
else {
context.setExternalStatus(RUNNING);
XLog.getLog(getClass()).info(XLog.STD, "checking action, external ID [{0}] status [{1}]",
- action.getExternalId(), action.getExternalStatus());
+ action.getExternalId(), action.getExternalStatus());
}
}
catch (Exception ex) {
XLog.getLog(getClass()).warn("Exception in check(). Message[{0}]", ex.getMessage(), ex);
-
+ exception = true;
throw convertException(ex);
}
+ finally {
+ if (jobClient != null) {
+ try {
+ jobClient.close();
+ }
+ catch (Exception e) {
+ if (exception) {
+ log.error("JobClient error: ", e);
+ }
+ else {
+ throw convertException(e);
+ }
+ }
+ }
+ }
}
protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
@@ -660,27 +732,40 @@ protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
@Override
public void kill(Context context, WorkflowAction action) throws ActionExecutorException {
+ JobClient jobClient = null;
+ boolean exception = false;
try {
Element actionXml = XmlUtils.parseXml(action.getConf());
Configuration conf = createBaseHadoopConf(context, actionXml);
JobConf jobConf = new JobConf();
XConfiguration.copy(conf, jobConf);
- JobClient jobClient = createJobClient(context, jobConf);
+ jobClient = createJobClient(context, jobConf);
RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalId()));
- runningJob.killJob();
+ if (runningJob != null) {
+ runningJob.killJob();
+ }
context.setExternalStatus(KILLED);
context.setExecutionData(KILLED, null);
}
catch (Exception ex) {
+ exception = true;
throw convertException(ex);
}
finally {
try {
FileSystem actionFs = getActionFileSystem(context, action);
cleanUpActionDir(actionFs, context);
+ if (jobClient != null) {
+ jobClient.close();
+ }
}
catch (Exception ex) {
- throw convertException(ex);
+ if (exception) {
+ log.error("Error: ", ex);
+ }
+ else {
+ throw convertException(ex);
+ }
}
}
}
@@ -698,4 +783,4 @@ public boolean isCompleted(String externalStatus) {
return FINAL_STATUS.contains(externalStatus);
}
-}
\ No newline at end of file
+}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapper.java b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapper.java
index a08a15350..93977cd23 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapper.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapper.java
@@ -115,9 +115,9 @@ public static String getRecoveryId(Configuration launcherConf, Path actionDir, S
String jobId = null;
Path recoveryFile = new Path(actionDir, recoveryId);
//FileSystem fs = FileSystem.get(launcherConf);
- FileSystem fs = Services.get().get(HadoopAccessorService.class)
- .createFileSystem(launcherConf.get("user.name"),
- launcherConf.get("group.name"), launcherConf);
+ FileSystem fs = Services.get().get(HadoopAccessorService.class)
+ .createFileSystem(launcherConf.get("user.name"),
+ launcherConf.get("group.name"), launcherConf);
if (fs.exists(recoveryFile)) {
InputStream is = fs.open(recoveryFile);
@@ -145,7 +145,7 @@ public static void setupMaxOutputData(Configuration launcherConf, int maxOutputD
}
public static void setupLauncherInfo(JobConf launcherConf, String jobId, String actionId, Path actionDir,
- String recoveryId, Configuration actionConf) throws IOException {
+ String recoveryId, Configuration actionConf) throws IOException {
launcherConf.setMapperClass(LauncherMapper.class);
launcherConf.setSpeculativeExecution(false);
@@ -238,7 +238,7 @@ public static boolean hasIdSwap(RunningJob runningJob, String user, String group
Path p = getIdSwapPath(actionDir);
// log.debug("Checking for newId file in: [{0}]", p);
- FileSystem fs = Services.get().get(HadoopAccessorService.class).createFileSystem(user, group,p. toUri(),
+ FileSystem fs = Services.get().get(HadoopAccessorService.class).createFileSystem(user, group, p.toUri(),
new Configuration());
if (fs.exists(p)) {
log.debug("Hadoop Counters is null, but found newID file.");
@@ -314,6 +314,8 @@ public void map(K1 key, V1 value, OutputCollector collector, Reporter re
String[] args = getMainArguments(getJobConf());
+ printContentsOfCurrentDir();
+
System.out.println();
System.out.println("Oozie Java/Map-Reduce/Pig action launcher-job configuration");
System.out.println("=================================================================");
@@ -353,14 +355,15 @@ public void map(K1 key, V1 value, OutputCollector collector, Reporter re
catch (InvocationTargetException ex) {
if (SecurityException.class.isInstance(ex.getCause())) {
if (LauncherSecurityManager.getExitInvoked()) {
- System.out.println("Intercepting System.exit(" + LauncherSecurityManager.getExitCode() +
- ")");
- System.err.println("Intercepting System.exit(" + LauncherSecurityManager.getExitCode() +
- ")");
- // if 0 main() method finished successfully, ignoring
+ System.out.println("Intercepting System.exit(" + LauncherSecurityManager.getExitCode()
+ + ")");
+ System.err.println("Intercepting System.exit(" + LauncherSecurityManager.getExitCode()
+ + ")");
+ // if 0 main() method finished successfully
+ // ignoring
if (LauncherSecurityManager.getExitCode() != 0) {
- errorMessage = msgPrefix + "exit code [" + LauncherSecurityManager.getExitCode() +
- "]";
+ errorMessage = msgPrefix + "exit code [" + LauncherSecurityManager.getExitCode()
+ + "]";
errorCause = null;
}
}
@@ -535,6 +538,40 @@ private void failLauncher(String reason, Throwable ex) throws LauncherException
}
}
+ /**
+ * Print files and directories in current directory. Will list files in the sub-directory (only 1 level deep)
+ */
+ protected void printContentsOfCurrentDir() {
+ File folder = new File(".");
+ System.out.println();
+ System.out.println("Files in current dir:" + folder.getAbsolutePath());
+ System.out.println("======================");
+
+ File[] listOfFiles = folder.listFiles();
+ for (File fileName : listOfFiles) {
+ if (fileName.isFile()) {
+ System.out.println("File: " + fileName.getName());
+ }
+ else {
+ if (fileName.isDirectory()) {
+ System.out.println("Dir: " + fileName.getName());
+ File subDir = new File(fileName.getName());
+ File[] moreFiles = subDir.listFiles();
+ for (File subFileName : moreFiles) {
+ if (subFileName.isFile()) {
+ System.out.println(" File: " + subFileName.getName());
+ }
+ else {
+ if (subFileName.isDirectory()) {
+ System.out.println(" Dir: " + subFileName.getName());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
}
class LauncherSecurityManager extends SecurityManager {
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index f26079d85..79ca63001 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@ -38,6 +38,7 @@
public class MapReduceActionExecutor extends JavaActionExecutor {
public static final String HADOOP_COUNTERS = "hadoop.counters";
+ private XLog log = XLog.getLog(getClass());
public MapReduceActionExecutor() {
super("map-reduce");
@@ -58,17 +59,18 @@ protected String getLauncherMain(Configuration launcherConf, Element actionXml)
if (actionXml.getChild("streaming", ns) != null) {
mainClass = launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, StreamingMain.class.getName());
}
- else if (actionXml.getChild("pipes", ns) != null) {
- mainClass = launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, PipesMain.class.getName());
- }
else {
- mainClass = launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, MapReduceMain.class.getName());
+ if (actionXml.getChild("pipes", ns) != null) {
+ mainClass = launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, PipesMain.class.getName());
+ }
+ else {
+ mainClass = launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, MapReduceMain.class.getName());
+ }
}
return mainClass;
}
- Configuration setupLauncherConf(Configuration conf, Element actionXml, Path appPath)
- throws ActionExecutorException {
+ Configuration setupLauncherConf(Configuration conf, Element actionXml, Path appPath) throws ActionExecutorException {
super.setupLauncherConf(conf, actionXml, appPath);
conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", true);
return conf;
@@ -95,15 +97,17 @@ Configuration setupActionConf(Configuration actionConf, Context context, Element
}
StreamingMain.setStreaming(actionConf, mapper, reducer, recordReader, recordReaderMapping, env);
}
- else if (actionXml.getChild("pipes", ns) != null) {
- Element pipesXml = actionXml.getChild("pipes", ns);
- String map = pipesXml.getChildTextTrim("map", ns);
- String reduce = pipesXml.getChildTextTrim("reduce", ns);
- String inputFormat = pipesXml.getChildTextTrim("inputformat", ns);
- String partitioner = pipesXml.getChildTextTrim("partitioner", ns);
- String writer = pipesXml.getChildTextTrim("writer", ns);
- String program = pipesXml.getChildTextTrim("program", ns);
- PipesMain.setPipes(actionConf, map, reduce, inputFormat, partitioner, writer, program);
+ else {
+ if (actionXml.getChild("pipes", ns) != null) {
+ Element pipesXml = actionXml.getChild("pipes", ns);
+ String map = pipesXml.getChildTextTrim("map", ns);
+ String reduce = pipesXml.getChildTextTrim("reduce", ns);
+ String inputFormat = pipesXml.getChildTextTrim("inputformat", ns);
+ String partitioner = pipesXml.getChildTextTrim("partitioner", ns);
+ String writer = pipesXml.getChildTextTrim("writer", ns);
+ String program = pipesXml.getChildTextTrim("program", ns);
+ PipesMain.setPipes(actionConf, map, reduce, inputFormat, partitioner, writer, program);
+ }
}
actionConf = super.setupActionConf(actionConf, context, actionXml, appPath);
return actionConf;
@@ -112,18 +116,26 @@ else if (actionXml.getChild("pipes", ns) != null) {
@Override
public void end(Context context, WorkflowAction action) throws ActionExecutorException {
super.end(context, action);
+ JobClient jobClient = null;
+ boolean exception = false;
try {
if (action.getStatus() == WorkflowAction.Status.OK) {
Element actionXml = XmlUtils.parseXml(action.getConf());
Configuration conf = createBaseHadoopConf(context, actionXml);
JobConf jobConf = new JobConf();
XConfiguration.copy(conf, jobConf);
- JobClient jobClient = createJobClient(context, jobConf);
+ jobClient = createJobClient(context, jobConf);
RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalId()));
+ if (runningJob == null) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "MR002",
+ "Unknown hadoop job [{0}] associated with action [{1}]. Failing this action!", action
+ .getExternalId(), action.getId());
+ }
+
// TODO this has to be done in a better way
if (!runningJob.getJobName().startsWith("oozie:action:")) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "MR001",
- "ID swap should have happened in launcher job [{0}]", action.getExternalId());
+ "ID swap should have happened in launcher job [{0}]", action.getExternalId());
}
Counters counters = runningJob.getCounters();
if (counters != null) {
@@ -139,8 +151,24 @@ public void end(Context context, WorkflowAction action) throws ActionExecutorExc
}
}
catch (Exception ex) {
+ exception = true;
throw convertException(ex);
}
+ finally {
+ if (jobClient != null) {
+ try {
+ jobClient.close();
+ }
+ catch (Exception e) {
+ if (exception) {
+ log.error("JobClient error: ", e);
+ }
+ else {
+ throw convertException(e);
+ }
+ }
+ }
+ }
}
@SuppressWarnings("unchecked")
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
index 3cde41a84..ec4c90745 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
@@ -44,7 +44,7 @@ protected void run(String[] args) throws Exception {
System.out.println("Oozie Map-Reduce action configuration");
System.out.println("=======================");
- //loading action conf prepared by Oozie
+ // loading action conf prepared by Oozie
Configuration actionConf = new Configuration(false);
actionConf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml")));
@@ -57,14 +57,12 @@ protected void run(String[] args) throws Exception {
System.out.println("------------------------");
System.out.println();
-
-
System.out.println("Submitting Oozie action Map-Reduce job");
System.out.println();
- //submitting job
+ // submitting job
RunningJob runningJob = submitJob(actionConf);
- //propagating job id back to Oozie
+ // propagating job id back to Oozie
String jobId = runningJob.getID().toString();
Properties props = new Properties();
props.setProperty("id", jobId);
@@ -86,14 +84,38 @@ protected void addActionConf(JobConf jobConf, Configuration actionConf) {
protected RunningJob submitJob(Configuration actionConf) throws Exception {
JobConf jobConf = new JobConf();
addActionConf(jobConf, actionConf);
-
- //propagate delegation related props from launcher job to MR job
+
+ // propagate delegation related props from launcher job to MR job
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
jobConf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
-
- JobClient jobClient = createJobClient(jobConf);
- return jobClient.submitJob(jobConf);
+ JobClient jobClient = null;
+ RunningJob runJob = null;
+ boolean exception = false;
+ try {
+ jobClient = createJobClient(jobConf);
+ runJob = jobClient.submitJob(jobConf);
+ }
+ catch (Exception ex) {
+ exception = true;
+ throw ex;
+ }
+ finally {
+ try {
+ if (jobClient != null) {
+ jobClient.close();
+ }
+ }
+ catch (Exception ex) {
+ if (exception) {
+ System.out.println("JobClient Error: " + ex);
+ }
+ else {
+ throw ex;
+ }
+ }
+ }
+ return runJob;
}
@SuppressWarnings("unchecked")
@@ -101,7 +123,8 @@ protected JobClient createJobClient(JobConf jobConf) throws IOException {
return new JobClient(jobConf);
}
- // allows any character in the value, the conf.setStrings() does not allow commas
+ // allows any character in the value, the conf.setStrings() does not allow
+ // commas
public static void setStrings(Configuration conf, String key, String[] values) {
if (values != null) {
conf.setInt(key + ".size", values.length);
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
index 8771850fe..c81f21a8c 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
@@ -73,8 +73,15 @@ Configuration setupActionConf(Configuration actionConf, Context context, Element
for (int i = 0; i < params.size(); i++) {
strParams[i] = params.get(i).getTextTrim();
}
-
- PigMain.setPigScript(actionConf, pigName, strParams);
+ String[] strArgs = null;
+ List eArgs = actionXml.getChildren("argument", ns);
+ if (eArgs != null && eArgs.size() > 0) {
+ strArgs = new String[eArgs.size()];
+ for (int i = 0; i < eArgs.size(); i++) {
+ strArgs[i] = eArgs.get(i).getTextTrim();
+ }
+ }
+ PigMain.setPigScript(actionConf, pigName, strParams, strArgs);
return actionConf;
}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/PigMain.java b/core/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 39761f74c..1bceb2f9f 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import java.io.FileNotFoundException;
import java.io.OutputStream;
import java.io.FileOutputStream;
import java.io.BufferedReader;
@@ -163,6 +164,11 @@ protected void run(String[] args) throws Exception {
arguments.add("-logfile");
arguments.add(pigLog);
+ String[] pigArgs = MapReduceMain.getStrings(actionConf, "oozie.pig.args");
+ for (String pigArg : pigArgs) {
+ arguments.add(pigArg);
+ }
+
System.out.println("Pig command arguments :");
for (String arg : arguments) {
System.out.println(" " + arg);
@@ -174,12 +180,7 @@ protected void run(String[] args) throws Exception {
System.out.println();
System.out.flush();
- String userName = System.getProperty("user.name");
try {
- //TODO Pig should fix this
- //Pig somehow is taking user from Java SYS props, if task is running with cluster UNIX user this is
- //a problem, because of this we are setting here the user.name to the oozie job user.name
- System.setProperty("user.name", pigProperties.getProperty("user.name"));
runPigJob(arguments.toArray(new String[arguments.size()]));
}
catch (SecurityException ex) {
@@ -188,20 +189,22 @@ protected void run(String[] args) throws Exception {
System.err.println();
System.err.println("Pig logfile dump:");
System.err.println();
- BufferedReader reader = new BufferedReader(new FileReader(pigLog));
- line = reader.readLine();
- while (line != null) {
- System.err.println(line);
+ try {
+ BufferedReader reader = new BufferedReader(new FileReader(pigLog));
line = reader.readLine();
+ while (line != null) {
+ System.err.println(line);
+ line = reader.readLine();
+ }
+ reader.close();
+ }
+ catch (FileNotFoundException e) {
+ System.err.println("pig log file: " + pigLog + " not found.");
}
- reader.close();
throw ex;
}
}
}
- finally {
- System.setProperty("user.name", userName);
- }
System.out.println();
System.out.println("<<< Invocation of Pig command completed <<<");
@@ -222,9 +225,10 @@ protected void runPigJob(String[] args) throws Exception {
Main.main(args);
}
- public static void setPigScript(Configuration conf, String script, String[] params) {
+ public static void setPigScript(Configuration conf, String script, String[] params, String[] args) {
conf.set("oozie.pig.script", script);
MapReduceMain.setStrings(conf, "oozie.pig.params", params);
+ MapReduceMain.setStrings(conf, "oozie.pig.args", args);
}
private static final String JOB_ID_LOG_PREFIX = "HadoopJobId: ";
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/PipesMain.java b/core/src/main/java/org/apache/oozie/action/hadoop/PipesMain.java
index 8fcc76d2f..730d00dff 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/PipesMain.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/PipesMain.java
@@ -54,7 +54,7 @@ protected RunningJob submitJob(Configuration actionConf) throws Exception {
value = actionConf.get("oozie.pipes.writer");
if (value != null) {
jobConf.setBoolean("hadoop.pipes.java.recordwriter", true);
- jobConf.set("mapred.output.format.class", value);
+ jobConf.set("mapred.output.format.class", value);
}
value = actionConf.get("oozie.pipes.program");
if (value != null) {
@@ -70,7 +70,7 @@ protected RunningJob submitJob(Configuration actionConf) throws Exception {
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
jobConf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
-
+
return Submitter.jobSubmit(jobConf);
}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/StreamingMain.java b/core/src/main/java/org/apache/oozie/action/hadoop/StreamingMain.java
index fefb4a5c6..79ff21eab 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/StreamingMain.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/StreamingMain.java
@@ -74,13 +74,38 @@ protected RunningJob submitJob(Configuration actionConf) throws Exception {
addActionConf(jobConf, actionConf);
- //propagate delegation related props from launcher job to MR job
+ // propagate delegation related props from launcher job to MR job
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
jobConf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
-
- JobClient jobClient = createJobClient(jobConf);
- return jobClient.submitJob(jobConf);
+
+ JobClient jobClient = null;
+ RunningJob runJob = null;
+ boolean exception = false;
+ try {
+ jobClient = createJobClient(jobConf);
+ runJob = jobClient.submitJob(jobConf);
+ }
+ catch (Exception ex) {
+ exception = true;
+ throw ex;
+ }
+ finally {
+ try {
+ if (jobClient != null) {
+ jobClient.close();
+ }
+ }
+ catch (Exception ex) {
+ if (exception) {
+ System.out.println("JobClient Error: " + ex);
+ }
+ else {
+ throw ex;
+ }
+ }
+ }
+ return runJob;
}
public static void setStreaming(Configuration conf, String mapper, String reducer, String recordReader,
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosAuthHelper.java b/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosAuthHelper.java
index da3d6acad..f99a399e7 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosAuthHelper.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosAuthHelper.java
@@ -28,9 +28,9 @@
public class KerberosAuthHelper extends AuthHelper {
- public void set(JobClient jobClient, JobConf launcherJobConf) throws IOException, InterruptedException {
+ public void set(JobClient jobClient, JobConf launcherJobConf) throws IOException, InterruptedException {
Token mrdt = jobClient.getDelegationToken(new Text("mr token"));
- launcherJobConf.getCredentials().addToken( new Text("mr token"), mrdt);
+ launcherJobConf.getCredentials().addToken(new Text("mr token"), mrdt);
}
}
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosDoAs.java b/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosDoAs.java
index 90aa6043b..06dbc2853 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosDoAs.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/kerberos/KerberosDoAs.java
@@ -24,6 +24,7 @@
import java.util.concurrent.Callable;
//TODO this class goes away when doing 20.100+ only
+
//TODO this class is for testing, but is here to allow selective compilation
public class KerberosDoAs extends DoAs {
diff --git a/core/src/main/java/org/apache/oozie/action/oozie/SubWorkflowActionExecutor.java b/core/src/main/java/org/apache/oozie/action/oozie/SubWorkflowActionExecutor.java
index d9b373695..0a6436c02 100644
--- a/core/src/main/java/org/apache/oozie/action/oozie/SubWorkflowActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/oozie/SubWorkflowActionExecutor.java
@@ -24,9 +24,12 @@
import org.apache.oozie.LocalOozieClient;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.service.DagEngineService;
+import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.util.PropertiesUtils;
import org.apache.oozie.util.XmlUtils;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
@@ -44,6 +47,20 @@ public class SubWorkflowActionExecutor extends ActionExecutor {
public static final String ACTION_TYPE = "sub-workflow";
public static final String LOCAL = "local";
+ private static final Set DISALLOWED_DEFAULT_PROPERTIES = new HashSet();
+
+ static {
+ String[] badUserProps = {PropertiesUtils.DAYS, PropertiesUtils.HOURS, PropertiesUtils.MINUTES,
+ PropertiesUtils.KB, PropertiesUtils.MB, PropertiesUtils.GB, PropertiesUtils.TB, PropertiesUtils.PB,
+ PropertiesUtils.RECORDS, PropertiesUtils.MAP_IN, PropertiesUtils.MAP_OUT, PropertiesUtils.REDUCE_IN,
+ PropertiesUtils.REDUCE_OUT, PropertiesUtils.GROUPS};
+
+ String[] badDefaultProps = {PropertiesUtils.HADOOP_USER, PropertiesUtils.HADOOP_UGI,
+ WorkflowAppService.HADOOP_JT_KERBEROS_NAME, WorkflowAppService.HADOOP_NN_KERBEROS_NAME};
+ PropertiesUtils.createPropertySet(badUserProps, DISALLOWED_DEFAULT_PROPERTIES);
+ PropertiesUtils.createPropertySet(badDefaultProps, DISALLOWED_DEFAULT_PROPERTIES);
+ }
+
protected SubWorkflowActionExecutor() {
super(ACTION_TYPE);
}
@@ -63,18 +80,23 @@ protected OozieClient getWorkflowClient(Context context, String oozieUri) {
oozieClient = new LocalOozieClient(dagEngine);
}
else {
- //TODO we need to add authToken to the WC for the remote case
+ // TODO we need to add authToken to the WC for the remote case
oozieClient = new OozieClient(oozieUri);
}
return oozieClient;
}
- protected void injectInline(Element eConf, Configuration subWorkflowConf)
- throws IOException, ActionExecutorException {
+ protected void injectInline(Element eConf, Configuration subWorkflowConf) throws IOException,
+ ActionExecutorException {
if (eConf != null) {
String strConf = XmlUtils.prettyPrint(eConf).toString();
Configuration conf = new XConfiguration(new StringReader(strConf));
- checkForDisallowedProps(conf, "inline configuration");
+ try {
+ PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_DEFAULT_PROPERTIES);
+ }
+ catch (CommandException ex) {
+ throw convertException(ex);
+ }
XConfiguration.copy(conf, subWorkflowConf);
}
}
@@ -95,7 +117,7 @@ protected void injectRecovery(String externalId, Configuration conf) {
protected String checkIfRunning(OozieClient oozieClient, String extId) throws OozieClientException {
String jobId = oozieClient.getJobId(extId);
- if(jobId.equals("")) {
+ if (jobId.equals("")) {
return null;
}
return jobId;
@@ -111,7 +133,7 @@ public void start(Context context, WorkflowAction action) throws ActionExecutorE
String subWorkflowId = null;
String extId = context.getRecoveryId();
String runningJobId = null;
- if(extId != null) {
+ if (extId != null) {
runningJobId = checkIfRunning(oozieClient, extId);
}
if (runningJobId == null) {
@@ -139,7 +161,7 @@ public void start(Context context, WorkflowAction action) throws ActionExecutorE
WorkflowJob workflow = oozieClient.getJobInfo(subWorkflowId);
String consoleUrl = workflow.getConsoleUrl();
context.setStartData(subWorkflowId, oozieUri, consoleUrl);
- if(runningJobId != null) {
+ if (runningJobId != null) {
check(context, action);
}
}
@@ -148,26 +170,11 @@ public void start(Context context, WorkflowAction action) throws ActionExecutorE
}
}
- private static final Set DISALLOWED_PROPERTIES = new HashSet();
-
- static {
- DISALLOWED_PROPERTIES.add(OozieClient.USER_NAME);
- DISALLOWED_PROPERTIES.add(OozieClient.GROUP_NAME);
- }
-
- protected void checkForDisallowedProps(Configuration conf, String confName) throws ActionExecutorException {
- for (String prop : DISALLOWED_PROPERTIES) {
- if (conf.get(prop) != null) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "DISALLOWED_CONF_PROPERTY",
- confName);
- }
- }
- }
-
public void end(Context context, WorkflowAction action) throws ActionExecutorException {
try {
String externalStatus = action.getExternalStatus();
- WorkflowAction.Status status = externalStatus.equals("SUCCEEDED") ? WorkflowAction.Status.OK : WorkflowAction.Status.ERROR;
+ WorkflowAction.Status status = externalStatus.equals("SUCCEEDED") ? WorkflowAction.Status.OK
+ : WorkflowAction.Status.ERROR;
context.setEndData(status, getActionSignal(status));
}
catch (Exception ex) {
diff --git a/core/src/main/java/org/apache/oozie/action/ssh/SshActionExecutor.java b/core/src/main/java/org/apache/oozie/action/ssh/SshActionExecutor.java
index f8fb6608b..b377f4321 100644
--- a/core/src/main/java/org/apache/oozie/action/ssh/SshActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/ssh/SshActionExecutor.java
@@ -42,27 +42,20 @@
import org.jdom.Namespace;
/**
- * Ssh action executor.
- *
- *
- * - Execute the shell commands on the remote host
- * - Copies the base and wrapper scripts on to the remote location
- * - Base script is used to run the command on the remote host
- * - Wrapper script is used to check the status of the submitted command
- * - handles the submission failures
- *
+ * Ssh action executor. - Execute the shell commands on the remote host
- Copies the base and wrapper
+ * scripts on to the remote location
- Base script is used to run the command on the remote host
- Wrapper
+ * script is used to check the status of the submitted command
- handles the submission failures
*/
public class SshActionExecutor extends ActionExecutor {
public static final String ACTION_TYPE = "ssh";
/**
- * Configuration parameter which specifies whether the specified ssh user is
- * allowed, or has to be the job user.
+ * Configuration parameter which specifies whether the specified ssh user is allowed, or has to be the job user.
*/
public static final String CONF_SSH_ALLOW_USER_AT_HOST = CONF_PREFIX + "ssh.allow.user.at.host";
protected static final String SSH_COMMAND_OPTIONS =
- "-o PasswordAuthentication=no -o KbdInteractiveDevices=no -o StrictHostKeyChecking=no -o ConnectTimeout=20 ";
+ "-o PasswordAuthentication=no -o KbdInteractiveDevices=no -o StrictHostKeyChecking=no -o ConnectTimeout=20 ";
protected static final String SSH_COMMAND_BASE = "ssh " + SSH_COMMAND_OPTIONS;
protected static final String SCP_COMMAND_BASE = "scp " + SSH_COMMAND_OPTIONS;
@@ -108,7 +101,7 @@ public void initActionType() {
/**
* Check ssh action status.
- *
+ *
* @param context action execution context.
* @param action action object.
*/
@@ -123,7 +116,7 @@ public void check(Context context, WorkflowAction action) throws ActionExecutorE
}
catch (JDOMException ex) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_XML_PARSE_FAILED",
- "unknown error", ex);
+ "unknown error", ex);
}
XLog log = XLog.getLog(getClass());
log.debug("Capture Output: {0}", captureOutput);
@@ -142,30 +135,32 @@ public void check(Context context, WorkflowAction action) throws ActionExecutorE
}
if (overflow) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR,
- "ERR_OUTPUT_EXCEED_MAX_LEN", "unknown error");
+ "ERR_OUTPUT_EXCEED_MAX_LEN", "unknown error");
}
context.setExecutionData(status.toString(), PropertiesUtils.stringToProperties(buffer.toString()));
}
catch (Exception ex) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "ERR_UNKNOWN_ERROR",
- "unknown error", ex);
+ "unknown error", ex);
}
}
else {
context.setExecutionData(status.toString(), null);
}
}
- else if (status == Status.ERROR) {
- context.setExecutionData(status.toString(), null);
- }
else {
- context.setExternalStatus(status.toString());
+ if (status == Status.ERROR) {
+ context.setExecutionData(status.toString(), null);
+ }
+ else {
+ context.setExternalStatus(status.toString());
+ }
}
}
/**
* Kill ssh action.
- *
+ *
* @param context action execution context.
* @param action object.
*/
@@ -182,7 +177,7 @@ public void kill(Context context, WorkflowAction action) throws ActionExecutorEx
/**
* Start the ssh action execution.
- *
+ *
* @param context action execution context.
* @param action action object.
*/
@@ -238,7 +233,7 @@ public String call() throws Exception {
@Override
public String call() throws Exception {
return doExecute(host, dirLocation, commandElement.getValue(), argsString, ignoreOutput,
- action, recoveryId);
+ action, recoveryId);
}
});
@@ -277,7 +272,7 @@ private String checkIfRunning(String host, final Context context, final Workflow
/**
* Get remote host working location.
- *
+ *
* @param context action execution context
* @param action Action
* @param fileExtension Extension to be added to file name
@@ -286,7 +281,7 @@ private String checkIfRunning(String host, final Context context, final Workflow
* @return remote host file name/Directory.
*/
public String getRemoteFileName(Context context, WorkflowAction action, String fileExtension, boolean dirOnly,
- boolean useExtId) {
+ boolean useExtId) {
String path = getActionDirPath(context.getWorkflow().getId(), action, ACTION_TYPE, false) + "/";
if (dirOnly) {
return path;
@@ -300,11 +295,11 @@ public String getRemoteFileName(Context context, WorkflowAction action, String f
/**
* Utility method to execute command.
- *
+ *
* @param command Command to execute as String.
+ * @return exit status of the execution.
* @throws IOException if process exits with status nonzero.
* @throws InterruptedException if process does not run properly.
- * @return exit status of the execution.
*/
public int executeCommand(String command) throws IOException, InterruptedException {
Runtime runtime = Runtime.getRuntime();
@@ -324,7 +319,7 @@ public int executeCommand(String command) throws IOException, InterruptedExcepti
/**
* Do ssh action execution setup on remote host.
- *
+ *
* @param host host name.
* @param context action execution context.
* @param action action object.
@@ -351,17 +346,17 @@ protected String setupRemote(String host, Context context, WorkflowAction action
String command = XLog.format("{0}{1} mkdir -p {2} ", SSH_COMMAND_BASE, host, remoteDirLocation).toString();
executeCommand(command);
command = XLog.format("{0}{1}/ssh-base.sh {2}/ssh-wrapper.sh {3}:{4}", SCP_COMMAND_BASE, localDirLocation,
- localDirLocation, host, remoteDirLocation);
+ localDirLocation, host, remoteDirLocation);
executeCommand(command);
command = XLog.format("{0}{1} chmod +x {2}ssh-base.sh {3}ssh-wrapper.sh ", SSH_COMMAND_BASE, host,
- remoteDirLocation, remoteDirLocation);
+ remoteDirLocation, remoteDirLocation);
executeCommand(command);
return remoteDirLocation;
}
/**
* Execute the ssh command.
- *
+ *
* @param host hostname.
* @param dirLocation location of the base and wrapper scripts.
* @param cmnd command to be executed.
@@ -374,7 +369,7 @@ protected String setupRemote(String host, Context context, WorkflowAction action
* @throws InterruptedException thrown if any interruption happens.
*/
protected String doExecute(String host, String dirLocation, String cmnd, String args, boolean ignoreOutput,
- WorkflowAction action, String recoveryId) throws IOException, InterruptedException {
+ WorkflowAction action, String recoveryId) throws IOException, InterruptedException {
XLog log = XLog.getLog(getClass());
Runtime runtime = Runtime.getRuntime();
String callbackPost = ignoreOutput ? "_" : getOozieConf().get(HTTP_COMMAND_OPTIONS).replace(" ", "%%%");
@@ -382,7 +377,7 @@ protected String doExecute(String host, String dirLocation, String cmnd, String
String callBackUrl = Services.get().get(CallbackService.class)
.createCallBackUrl(action.getId(), EXT_STATUS_VAR);
String command = XLog.format("{0}{1} {2}ssh-base.sh {3} \"{4}\" \"{5}\" {6} {7} {8} ", SSH_COMMAND_BASE, host,
- dirLocation, getOozieConf().get(HTTP_COMMAND), callBackUrl, callbackPost, recoveryId, cmnd, args)
+ dirLocation, getOozieConf().get(HTTP_COMMAND), callBackUrl, callbackPost, recoveryId, cmnd, args)
.toString();
log.trace("Executing ssh command [{0}]", command);
Process p = runtime.exec(command.split("\\s"));
@@ -405,7 +400,7 @@ dirLocation, getOozieConf().get(HTTP_COMMAND), callBackUrl, callbackPost, recove
/**
* End action execution.
- *
+ *
* @param context action execution context.
* @param action action object.
* @throws ActionExecutorException thrown if action end execution fails.
@@ -430,10 +425,9 @@ public void end(final Context context, final WorkflowAction action) throws Actio
/**
* Get the return value of a process.
- *
+ *
* @param command command to be executed.
- * @return zero if execution is successful and any non zero value for
- * failure.
+ * @return zero if execution is successful and any non zero value for failure.
* @throws ActionExecutorException
*/
private int getReturnValue(String command) throws ActionExecutorException {
@@ -476,11 +470,10 @@ private void initSshScripts() {
/**
* Get action status.
- *
+ *
* @param action action object.
* @return status of the action(RUNNING/OK/ERROR).
- * @throws ActionExecutorException thrown if there is any error in getting
- * status.
+ * @throws ActionExecutorException thrown if there is any error in getting status.
*/
protected Status getActionStatus(Context context, WorkflowAction action) throws ActionExecutorException {
String command = SSH_COMMAND_BASE + action.getTrackerUri() + " ps -p " + action.getExternalId();
@@ -505,10 +498,9 @@ protected Status getActionStatus(Context context, WorkflowAction action) throws
/**
* Execute the callable.
- *
+ *
* @param callable required callable.
- * @throws ActionExecutorException thrown if there is any error in command
- * execution.
+ * @throws ActionExecutorException thrown if there is any error in command execution.
*/
private T execute(Callable callable) throws ActionExecutorException {
XLog log = XLog.getLog(getClass());
@@ -522,43 +514,57 @@ private T execute(Callable callable) throws ActionExecutorException {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_UNKNOWN_ERROR, ex
.getMessage(), ex);
} // Host Resolution Issues
- else if (errorMessage.contains("Could not resolve hostname") ||
- errorMessage.contains("service not known")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_HOST_RESOLUTION, ex
- .getMessage(), ex);
- } // Connection Timeout. Host temporarily down.
- else if (errorMessage.contains("timed out")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_COULD_NOT_CONNECT,
- ex.getMessage(), ex);
- }// Local ssh-base or ssh-wrapper missing
- else if (errorMessage.contains("Required Local file")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF,
- ex.getMessage(), ex); // local_FNF
- }// Required oozie bash scripts missing, after the copy was
- // successful
- else if (errorMessage.contains("No such file or directory")
- && (errorMessage.contains("ssh-base") || errorMessage.contains("ssh-wrapper"))) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF,
- ex.getMessage(), ex); // remote
- // FNF
- } // Required application execution binary missing (either
- // caught by ssh-wrapper
- else if (errorMessage.contains("command not found")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_FNF, ex
- .getMessage(), ex); // remote
- // FNF
- } // Permission denied while connecting
- else if (errorMessage.contains("Permission denied")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_AUTH_FAILED, ex
- .getMessage(), ex);
- } // Permission denied while executing
- else if (errorMessage.contains(": Permission denied")) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_NO_EXEC_PERM, ex
- .getMessage(), ex);
- }
else {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_UNKNOWN_ERROR, ex
- .getMessage(), ex);
+ if (errorMessage.contains("Could not resolve hostname") ||
+ errorMessage.contains("service not known")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_HOST_RESOLUTION, ex
+ .getMessage(), ex);
+ } // Connection Timeout. Host temporarily down.
+ else {
+ if (errorMessage.contains("timed out")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_COULD_NOT_CONNECT,
+ ex.getMessage(), ex);
+ }// Local ssh-base or ssh-wrapper missing
+ else {
+ if (errorMessage.contains("Required Local file")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF,
+ ex.getMessage(), ex); // local_FNF
+ }// Required oozie bash scripts missing, after the copy was
+ // successful
+ else {
+ if (errorMessage.contains("No such file or directory")
+ && (errorMessage.contains("ssh-base") || errorMessage.contains("ssh-wrapper"))) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, ERR_FNF,
+ ex.getMessage(), ex); // remote
+ // FNF
+ } // Required application execution binary missing (either
+ // caught by ssh-wrapper
+ else {
+ if (errorMessage.contains("command not found")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_FNF, ex
+ .getMessage(), ex); // remote
+ // FNF
+ } // Permission denied while connecting
+ else {
+ if (errorMessage.contains("Permission denied")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_AUTH_FAILED, ex
+ .getMessage(), ex);
+ } // Permission denied while executing
+ else {
+ if (errorMessage.contains(": Permission denied")) {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.NON_TRANSIENT, ERR_NO_EXEC_PERM, ex
+ .getMessage(), ex);
+ }
+ else {
+ throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_UNKNOWN_ERROR, ex
+ .getMessage(), ex);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
}
} // Any other type of exception
catch (Exception ex) {
@@ -567,16 +573,13 @@ else if (errorMessage.contains(": Permission denied")) {
}
/**
- * Checks whether the system is configured to always use the oozie user for
- * ssh, and injects the user if required.
- *
+ * Checks whether the system is configured to always use the oozie user for ssh, and injects the user if required.
+ *
* @param host the host string.
* @param context the execution context.
- * @return the modified host string with a user parameter added on if
- * required.
- * @throws ActionExecutorException in case the flag to use the oozie user is
- * turned on and there is a mismatch between the user specified in
- * the host and the oozie user.
+ * @return the modified host string with a user parameter added on if required.
+ * @throws ActionExecutorException in case the flag to use the oozie user is turned on and there is a mismatch
+ * between the user specified in the host and the oozie user.
*/
private String prepareUserHost(String host, Context context) throws ActionExecutorException {
String oozieUser = context.getProtoActionConf().get(OozieClient.USER_NAME);
@@ -585,11 +588,11 @@ private String prepareUserHost(String host, Context context) throws ActionExecut
host = oozieUser + "@" + host;
}
}
- else{
+ else {
if (host.contains("@")) {
if (!host.toLowerCase().startsWith(oozieUser + "@")) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, ERR_USER_MISMATCH,
- XLog.format("user mismatch between oozie user [{0}] and ssh host [{1}]", oozieUser, host));
+ XLog.format("user mismatch between oozie user [{0}] and ssh host [{1}]", oozieUser, host));
}
}
else {
@@ -620,18 +623,14 @@ private String getTruncatedString(StringBuffer strBuffer) {
}
/**
- * Drains the inputStream and errorStream of the Process being executed. The
- * contents of the streams are stored if a buffer is provided for the
- * stream.
+ * Drains the inputStream and errorStream of the Process being executed. The contents of the streams are stored if a
+ * buffer is provided for the stream.
*
* @param p The Process instance.
- * @param inputBuffer The buffer into which STDOUT is to be read. Can be
- * null if only draining is required.
- * @param errorBuffer The buffer into which STDERR is to be read. Can be
- * null if only draining is required.
- * @param maxLength The maximum data length to be stored in these buffers.
- * This is an indicative value, and the store content may exceed this
- * length.
+ * @param inputBuffer The buffer into which STDOUT is to be read. Can be null if only draining is required.
+ * @param errorBuffer The buffer into which STDERR is to be read. Can be null if only draining is required.
+ * @param maxLength The maximum data length to be stored in these buffers. This is an indicative value, and the
+ * store content may exceed this length.
* @return the exit value of the process.
* @throws IOException
*/
@@ -671,14 +670,12 @@ private int drainBuffers(Process p, StringBuffer inputBuffer, StringBuffer error
* Reads the contents of a stream and stores them into the provided buffer.
*
* @param br The stream to be read.
- * @param storageBuf The buffer into which the contents of the stream are to
- * be stored.
- * @param maxLength The maximum number of bytes to be stored in the buffer.
- * An indicative value and may be exceeded.
+ * @param storageBuf The buffer into which the contents of the stream are to be stored.
+ * @param maxLength The maximum number of bytes to be stored in the buffer. An indicative value and may be
+ * exceeded.
* @param bytesRead The number of bytes read from this stream to date.
- * @param readAll If true, the stream is drained while their is data
- * available in it. Otherwise, only a single chunk of data is read,
- * irrespective of how much is available.
+ * @param readAll If true, the stream is drained while their is data available in it. Otherwise, only a single chunk
+ * of data is read, irrespective of how much is available.
* @return
* @throws IOException
*/
@@ -699,8 +696,7 @@ private int drainBuffer(BufferedReader br, StringBuffer storageBuf, int maxLengt
}
/**
- * Returns the first line from a StringBuffer, recognized by the new line
- * character \n.
+ * Returns the first line from a StringBuffer, recognized by the new line character \n.
*
* @param buffer The StringBuffer from which the first line is required.
* @return The first line of the buffer.
diff --git a/core/src/main/java/org/apache/oozie/command/Command.java b/core/src/main/java/org/apache/oozie/command/Command.java
index f3db0e9e5..1a7c4d9e9 100644
--- a/core/src/main/java/org/apache/oozie/command/Command.java
+++ b/core/src/main/java/org/apache/oozie/command/Command.java
@@ -17,40 +17,50 @@
*/
package org.apache.oozie.command;
-import org.apache.oozie.service.XLogService;
-import org.apache.oozie.XException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.FaultInjection;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
-import org.apache.oozie.FaultInjection;
-import org.apache.oozie.service.DagXLogInfoService;
-import org.apache.oozie.service.WorkflowStoreService;
-import org.apache.oozie.store.StoreException;
-import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.XException;
import org.apache.oozie.service.CallableQueueService;
+import org.apache.oozie.service.DagXLogInfoService;
import org.apache.oozie.service.InstrumentationService;
+import org.apache.oozie.service.MemoryLocksService;
import org.apache.oozie.service.Services;
+import org.apache.oozie.service.StoreService;
+import org.apache.oozie.service.XLogService;
+import org.apache.oozie.store.Store;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.util.Instrumentation;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XCallable;
import org.apache.oozie.util.XLog;
-
-import java.util.ArrayList;
-import java.util.List;
+import org.apache.oozie.util.MemoryLocks.LockToken;
/**
* Base class for all synchronous and asynchronous DagEngine commands.
*/
-public abstract class Command implements XCallable {
+public abstract class Command implements XCallable {
/**
* The instrumentation group used for Commands.
*/
private static final String INSTRUMENTATION_GROUP = "commands";
-
+
+ private final long createdTime;
+
/**
* The instrumentation group used for Jobs.
*/
private static final String INSTRUMENTATION_JOB_GROUP = "jobs";
+ private static final long LOCK_TIMEOUT = 1000;
+ protected static final long LOCK_FAILURE_REQUEUE_INTERVAL = 30000;
+
protected Instrumentation instrumentation;
private List> callables;
private List> delayedCallables;
@@ -60,7 +70,9 @@ public abstract class Command implements XCallable {
private int priority;
private int logMask;
private boolean withStore;
- private String type;
+ protected boolean dryrun = false;
+ protected String type;
+ private ArrayList locks = null;
/**
* This variable is package private for testing purposes only.
@@ -68,9 +80,8 @@ public abstract class Command implements XCallable {
XLog.Info logInfo;
/**
- * Create a command that uses a {@link WorkflowStore} instance.
- *
- * The current {@link XLog.Info} values are captured for execution.
+ * Create a command that uses a {@link WorkflowStore} instance. The current {@link XLog.Info} values are
+ * captured for execution.
*
* @param name command name.
* @param type command type.
@@ -82,9 +93,7 @@ public Command(String name, String type, int priority, int logMask) {
}
/**
- * Create a command.
- *
- * The current {@link XLog.Info} values are captured for execution.
+ * Create a command. The current {@link XLog.Info} values are captured for execution.
*
* @param name command name.
* @param type command type.
@@ -100,6 +109,24 @@ public Command(String name, String type, int priority, int logMask, boolean with
this.logMask = logMask;
instrumentation = Services.get().get(InstrumentationService.class).get();
logInfo = new XLog.Info(XLog.Info.get());
+ createdTime = System.currentTimeMillis();
+ locks = new ArrayList();
+ }
+
+ /**
+ * Create a command. The current {@link XLog.Info} values are captured for execution.
+ *
+ * @param name command name.
+ * @param type command type.
+ * @param priority priority of the command, used when queuing for asynchronous execution.
+ * @param logMask log mask for the command logging calls.
+ * @param withStore indicates if the command needs a {@link org.apache.oozie.store.WorkflowStore} instance or not.
+ * @param dryrun indicates if dryrun option is enabled. if enabled coordinator will show a diagnostic output without
+ * really submitting the job
+ */
+ public Command(String name, String type, int priority, int logMask, boolean withStore, boolean dryrun) {
+ this(name, type, priority, logMask, withStore);
+ this.dryrun = dryrun;
}
/**
@@ -112,10 +139,8 @@ public String getName() {
}
/**
- * Return the callable type.
- *
- * The callable type is used for concurrency throttling in the
- * {@link org.apache.oozie.service.CallableQueueService}.
+ * Return the callable type. The callable type is used for concurrency throttling in the {@link
+ * org.apache.oozie.service.CallableQueueService}.
*
* @return the callable type.
*/
@@ -133,25 +158,27 @@ public int getPriority() {
}
/**
- * Execute the command {@link #call(WorkflowStore)} setting all the necessary context.
- *
- * The {@link XLog.Info} is set to the values at instance creation time.
- *
- * The command execution is logged and instrumented.
- *
- * If a {@link WorkflowStore} is used, a fresh instance will be passed and it will be commited after the
- * {@link #call(WorkflowStore)} execution. It will be closed without committing if an exception is thrown.
- *
- * Commands queued via the DagCommand queue methods are queued for execution after the workflow store has been
- * committed.
- *
- * If an exception happends the queued commands will not be effectively queued for execution. Instead, the
- * the commands queued for exception will be effectively queued fro execution..
+ * Returns the createdTime of the callable in milliseconds
+ *
+ * @return the callable createdTime
+ */
+ public long getCreatedTime() {
+ return createdTime;
+ }
+
+ /**
+ * Execute the command {@link #call(WorkflowStore)} setting all the necessary context. The {@link XLog.Info} is
+ * set to the values at instance creation time. The command execution is logged and instrumented. If a
+ * {@link WorkflowStore} is used, a fresh instance will be passed and it will be commited after the {@link
+ * #call(WorkflowStore)} execution. It will be closed without committing if an exception is thrown. Commands
+ * queued via the DagCommand queue methods are queued for execution after the workflow store has been committed.
+ * If an exception happends the queued commands will not be effectively queued for execution. Instead, the the
+ * commands queued for exception will be effectively queued fro execution..
*
- * @throws CommandException thrown if the command could not be executed successfully, the workflow store is
- * closed without committing, thus doing a rollback.
+ * @throws CommandException thrown if the command could not be executed successfully, the workflow store is closed
+ * without committing, thus doing a rollback.
*/
- @SuppressWarnings({"ThrowFromFinallyBlock"})
+ @SuppressWarnings({"ThrowFromFinallyBlock", "unchecked"})
public final T call() throws CommandException {
XLog.Info.get().setParameters(logInfo);
XLog log = XLog.getLog(getClass());
@@ -162,14 +189,21 @@ public final T call() throws CommandException {
delayedCallables = new ArrayList>();
exceptionCallables = new ArrayList>();
delay = 0;
- WorkflowStore store = null;
+ S store = null;
boolean exception = false;
+
try {
if (withStore) {
- store = Services.get().get(WorkflowStoreService.class).create();
+ store = (S) Services.get().get(StoreService.class).getStore(getStoreClass());
+ store.beginTrx();
}
- T result = call(store);
-
+ T result = execute(store);
+ /*
+ *
+ * if (store != null && log != null) { log.info(XLog.STD,
+ * "connection log from store Flush Mode {0} ",
+ * store.getFlushMode()); }
+ */
if (withStore) {
if (store == null) {
throw new IllegalStateException("WorkflowStore should not be null");
@@ -177,20 +211,45 @@ public final T call() throws CommandException {
if (FaultInjection.isActive("org.apache.oozie.command.SkipCommitFaultInjection")) {
throw new RuntimeException("Skipping Commit for Failover Testing");
}
- store.commit();
+ store.commitTrx();
}
- //TODO figure out the reject due to concurrency problems and remove the delayed queuing for callables.
- Services.get().get(CallableQueueService.class).queueSerial(callables, 10);
+ // TODO figure out the reject due to concurrency problems and remove
+ // the delayed queuing for callables.
+ boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables, 10);
+ if (ret == false) {
+ logQueueCallableFalse(callables);
+ }
- Services.get().get(CallableQueueService.class).queueSerial(delayedCallables, delay);
+ ret = Services.get().get(CallableQueueService.class).queueSerial(delayedCallables, delay);
+ if (ret == false) {
+ logQueueCallableFalse(delayedCallables);
+ }
return result;
}
catch (XException ex) {
+ log.error(logMask | XLog.OPS, "XException, {0}", ex);
+ if (store != null) {
+ log.info(XLog.STD, "XException - connection logs from store {0}, {1}", store.getConnection(), store
+ .isClosed());
+ }
exception = true;
- //TODO figure out the reject due to concurrency problems and remove the delayed queuing for callables.
- Services.get().get(CallableQueueService.class).queueSerial(exceptionCallables, 10);
+ if (store != null && store.isActive()) {
+ try {
+ store.rollbackTrx();
+ }
+ catch (RuntimeException rex) {
+ log.error(logMask | XLog.OPS, "openjpa error, {1}, {2}", name, rex.getMessage(), rex);
+ }
+ }
+
+ // TODO figure out the reject due to concurrency problems and remove
+ // the delayed queuing for callables.
+ boolean ret = Services.get().get(CallableQueueService.class).queueSerial(exceptionCallables, 10);
+ if (ret == false) {
+ logQueueCallableFalse(exceptionCallables);
+ }
if (ex instanceof CommandException) {
throw (CommandException) ex;
}
@@ -199,38 +258,69 @@ public final T call() throws CommandException {
}
}
catch (RuntimeException ex) {
+ log.error(logMask | XLog.OPS, "Runtime exception, {0}", ex);
exception = true;
+ if (store != null && store.isActive()) {
+ try {
+ store.rollbackTrx();
+ }
+ catch (RuntimeException rex) {
+ log.error(logMask | XLog.OPS, "openjpa error, {1}, {2}", name, rex.getMessage(), rex);
+ }
+ }
throw ex;
}
+ catch (Error er) {
+ log.error(logMask | XLog.OPS, "Error, {0}", er);
+ exception = true;
+ if (store != null && store.isActive()) {
+ try {
+ store.rollbackTrx();
+ }
+ catch (RuntimeException rex) {
+ log.error(logMask | XLog.OPS, "openjpa error, {1}, {2}", name, rex.getMessage(), rex);
+ }
+ }
+ throw er;
+ }
finally {
FaultInjection.deactivate("org.apache.oozie.command.SkipCommitFaultInjection");
cron.stop();
instrumentation.addCron(INSTRUMENTATION_GROUP, name, cron);
incrCommandCounter(1);
log.trace(logMask, "End");
- if (store != null) {
- try {
- store.close();
+ if (locks != null) {
+ for (LockToken lock : locks) {
+ lock.release();
}
- catch (StoreException ex) {
- if (exception) {
- log.warn(logMask | XLog.OPS, "store error, {1}", name, ex.getMessage(), ex);
+ locks.clear();
+ }
+ if (store != null) {
+ if (!store.isActive()) {
+ try {
+ store.closeTrx();
}
- else {
- throw new CommandException(ex);
+ catch (RuntimeException rex) {
+ if (exception) {
+ log.error(logMask | XLog.OPS, "openjpa error, {1}, {2}", name, rex.getMessage(), rex);
+ }
+ else {
+ throw rex;
+ }
}
}
+ else {
+ log.warn(logMask | XLog.OPS, "transaction is not committed or rolled back before closing entitymanager.");
+ }
}
}
}
/**
* Queue a callable for execution after the current callable call invocation completes and the {@link WorkflowStore}
- * transaction commits.
- *
- * All queued callables, regardless of the number of queue invocations, are queued for a single serial execution.
- *
- * If the call invocation throws an exception all queued callables are discarded, they are not queued for execution.
+ * transaction commits. All queued callables, regardless of the number of queue invocations, are queued for a
+ * single serial execution. If the call invocation throws an exception all queued callables are discarded, they
+ * are not queued for execution.
*
* @param callable callable to queue for execution.
*/
@@ -239,12 +329,10 @@ protected void queueCallable(XCallable callable) {
}
/**
- * Queue a list of callables for execution after the current callable call invocation completes and the
- * {@link WorkflowStore} transaction commits.
- *
- * All queued callables, regardless of the number of queue invocations, are queued for a single serial execution.
- *
- * If the call invocation throws an exception all queued callables are discarded, they are not queued for execution.
+ * Queue a list of callables for execution after the current callable call invocation completes and the {@link
+ * WorkflowStore} transaction commits. All queued callables, regardless of the number of queue invocations, are
+ * queued for a single serial execution. If the call invocation throws an exception all queued callables are
+ * discarded, they are not queued for execution.
*
* @param callables list of callables to queue for execution.
*/
@@ -253,13 +341,11 @@ protected void queueCallable(List extends XCallable> callables) {
}
/**
- * Queue a callable for delayed execution after the current callable call invocation completes and the
- * {@link WorkflowStore} transaction commits.
- *
- * All queued delayed callables, regardless of the number of delay queue invocations,
- * are queued for a single serial delayed execution with the highest delay of all queued callables.
- *
- * If the call invocation throws an exception all queued callables are discarded, they are not queued for execution.
+ * Queue a callable for delayed execution after the current callable call invocation completes and the {@link
+ * WorkflowStore} transaction commits. All queued delayed callables, regardless of the number of delay queue
+ * invocations, are queued for a single serial delayed execution with the highest delay of all queued callables.
+ * If the call invocation throws an exception all queued callables are discarded, they are not queued for
+ * execution.
*
* @param callable callable to queue for delayed execution.
* @param delay the queue delay in milliseconds
@@ -270,12 +356,10 @@ protected void queueCallable(XCallable callable, long delay) {
}
/**
- * Queue a callable for execution only in the event of an exception being thrown during the call invocation.
- *
- * If an exception does not happen, all the callables queued by this method are discarded, they are not queued for
- * execution.
- *
- * All queued callables, regardless of the number of queue invocations, are queued for a single serial execution.
+ * Queue a callable for execution only in the event of an exception being thrown during the call invocation. If
+ * an exception does not happen, all the callables queued by this method are discarded, they are not queued for
+ * execution. All queued callables, regardless of the number of queue invocations, are queued for a single
+ * serial execution.
*
* @param callable callable to queue for execution in the case of an exception.
*/
@@ -284,10 +368,31 @@ protected void queueCallableForException(XCallable callable) {
}
/**
- * DagCallable subclasses must implement this method to perform their task.
- *
- * The workflow store works in transactional mode. The transaction is committed only if this method ends
- * successfully. Otherwise the transaction is rolledback.
+ * Logging the info if failed to queue the callables.
+ *
+ * @param callables
+ */
+ protected void logQueueCallableFalse(List extends XCallable> callables) {
+ StringBuilder sb = new StringBuilder(
+ "Unable to queue the callables, delayedQueue is full or system is in SAFEMODE - failed to queue:[");
+ int size = callables.size();
+ for (int i = 0; i < size; i++) {
+ XCallable callable = callables.get(i);
+ sb.append(callable.getName());
+ if (i < size - 1) {
+ sb.append(", ");
+ }
+ else {
+ sb.append("]");
+ }
+ }
+ XLog.getLog(getClass()).warn(sb.toString());
+ }
+
+ /**
+ * DagCallable subclasses must implement this method to perform their task. The workflow store works in
+ * transactional mode. The transaction is committed only if this method ends successfully. Otherwise the transaction
+ * is rolledback.
*
* @param store the workflow store instance for the callable, null
if the callable does not use a
* store.
@@ -295,7 +400,50 @@ protected void queueCallableForException(XCallable callable) {
* @throws StoreException thrown if the workflow store could not perform an operation.
* @throws CommandException thrown if the command could not perform its operation.
*/
- protected abstract T call(WorkflowStore store) throws StoreException, CommandException;
+ protected abstract T call(S store) throws StoreException, CommandException;
+
+ // to do
+ // need to implement on all sub commands and break down the transactions
+
+ // protected abstract T execute(String id) throws CommandException;
+
+ /**
+ * Command subclasses must implement this method correct Store can be passed to call(store);
+ *
+ * @return the Store class for use by Callable
+ * @throws CommandException thrown if the command could not perform its operation.
+ */
+ protected abstract Class extends Store> getStoreClass();
+
+ /**
+ * Set the log info with the context of the given coordinator bean.
+ *
+ * @param cBean coordinator bean.
+ */
+ protected void setLogInfo(CoordinatorJobBean cBean) {
+ if (logInfo.getParameter(XLogService.GROUP) == null) {
+ logInfo.setParameter(XLogService.GROUP, cBean.getGroup());
+ }
+ if (logInfo.getParameter(XLogService.USER) == null) {
+ logInfo.setParameter(XLogService.USER, cBean.getUser());
+ }
+ logInfo.setParameter(DagXLogInfoService.JOB, cBean.getId());
+ logInfo.setParameter(DagXLogInfoService.TOKEN, "");
+ logInfo.setParameter(DagXLogInfoService.APP, cBean.getAppName());
+ XLog.Info.get().setParameters(logInfo);
+ }
+
+ /**
+ * Set the log info with the context of the given coordinator action bean.
+ *
+ * @param action action bean.
+ */
+ protected void setLogInfo(CoordinatorActionBean action) {
+ logInfo.setParameter(DagXLogInfoService.JOB, action.getJobId());
+ // logInfo.setParameter(DagXLogInfoService.TOKEN, action.getLogToken());
+ logInfo.setParameter(DagXLogInfoService.ACTION, action.getId());
+ XLog.Info.get().setParameters(logInfo);
+ }
/**
* Set the log info with the context of the given workflow bean.
@@ -303,10 +451,10 @@ protected void queueCallableForException(XCallable callable) {
* @param workflow workflow bean.
*/
protected void setLogInfo(WorkflowJobBean workflow) {
- if(logInfo.getParameter(XLogService.GROUP) == null) {
+ if (logInfo.getParameter(XLogService.GROUP) == null) {
logInfo.setParameter(XLogService.GROUP, workflow.getGroup());
}
- if(logInfo.getParameter(XLogService.USER) == null) {
+ if (logInfo.getParameter(XLogService.USER) == null) {
logInfo.setParameter(XLogService.USER, workflow.getUser());
}
logInfo.setParameter(DagXLogInfoService.JOB, workflow.getId());
@@ -323,14 +471,14 @@ protected void setLogInfo(WorkflowJobBean workflow) {
protected void setLogInfo(WorkflowActionBean action) {
logInfo.setParameter(DagXLogInfoService.JOB, action.getJobId());
logInfo.setParameter(DagXLogInfoService.TOKEN, action.getLogToken());
- logInfo.setParameter(DagXLogInfoService.ACTION, action.getName());
+ logInfo.setParameter(DagXLogInfoService.ACTION, action.getId());
XLog.Info.get().setParameters(logInfo);
}
/**
* Reset the action bean information from the log info.
*/
- //TODO check if they are used, else delete
+ // TODO check if they are used, else delete
protected void resetLogInfoAction() {
logInfo.clearParameter(DagXLogInfoService.ACTION);
XLog.Info.get().clearParameter(DagXLogInfoService.ACTION);
@@ -339,7 +487,7 @@ protected void resetLogInfoAction() {
/**
* Reset the workflow bean information from the log info.
*/
- //TODO check if they are used, else delete
+ // TODO check if they are used, else delete
protected void resetLogInfoWorkflow() {
logInfo.clearParameter(DagXLogInfoService.JOB);
logInfo.clearParameter(DagXLogInfoService.APP);
@@ -364,7 +512,7 @@ private void incrCounter(String group, String name, int count) {
/**
* Used to increment command counters.
- *
+ *
* @param count the increment count.
*/
protected void incrCommandCounter(int count) {
@@ -372,9 +520,8 @@ protected void incrCommandCounter(int count) {
}
/**
- * Used to increment job counters. The counter name s the same as the
- * command name.
- *
+ * Used to increment job counters. The counter name s the same as the command name.
+ *
* @param count the increment count.
*/
protected void incrJobCounter(int count) {
@@ -391,7 +538,6 @@ protected void incrJobCounter(String name, int count) {
incrCounter(INSTRUMENTATION_JOB_GROUP, name, count);
}
-
/**
* Return the {@link Instrumentation} instance in use.
*
@@ -401,4 +547,28 @@ protected Instrumentation getInstrumentation() {
return instrumentation;
}
-}
\ No newline at end of file
+ protected boolean lock(String id) throws InterruptedException {
+ if (id == null || id.length() == 0) {
+ XLog.getLog(getClass()).warn("lock(): Id is null or empty :" + id + ":");
+ return false;
+ }
+ LockToken token = Services.get().get(MemoryLocksService.class).getWriteLock(id, LOCK_TIMEOUT);
+ if (token != null) {
+ locks.add(token);
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+
+ /*
+ * TODO - remove store coupling to EM. Store will only contain queries
+ * protected EntityManager getEntityManager() { return
+ * store.getEntityManager(); }
+ */
+ protected T execute(S store) throws CommandException, StoreException {
+ T result = call(store);
+ return result;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/CommandException.java b/core/src/main/java/org/apache/oozie/command/CommandException.java
index cd330441a..2e97cd7de 100644
--- a/core/src/main/java/org/apache/oozie/command/CommandException.java
+++ b/core/src/main/java/org/apache/oozie/command/CommandException.java
@@ -38,7 +38,7 @@ public CommandException(XException cause) {
* Create a dag command exception.
*
* @param errorCode error code.
- * @param params parameters for the error code message template.
+ * @param params parameters for the error code message template.
*/
public CommandException(ErrorCode errorCode, Object... params) {
super(errorCode, params);
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionCheckCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionCheckCommand.java
new file mode 100644
index 000000000..010413178
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionCheckCommand.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.sql.Timestamp;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.XException;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.StoreService;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.db.SLADbOperations;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
+import org.apache.oozie.command.CommandException;
+
+public class CoordActionCheckCommand extends CoordinatorCommand {
+ private String actionId;
+ private int actionCheckDelay;
+ private final XLog log = XLog.getLog(getClass());
+ private CoordinatorActionBean coordAction = null;
+
+ public CoordActionCheckCommand(String actionId, int actionCheckDelay) {
+ super("coord_action_check", "coord_action_check", -1, XLog.OPS);
+ this.actionId = actionId;
+ this.actionCheckDelay = actionCheckDelay;
+ }
+
+ protected Void call(CoordinatorStore cstore) throws StoreException, CommandException {
+ try {
+ //if the action has been updated, quit this command
+ Timestamp actionCheckTs = new Timestamp(System.currentTimeMillis() - actionCheckDelay * 1000);
+ Timestamp cactionLmt = coordAction.getLastModifiedTimestamp();
+ if (cactionLmt.after(actionCheckTs)) {
+ log.info("The coord action :" + actionId + " has been udated. Ignore CoordActionCheckCommand!");
+ return null;
+ }
+ if (coordAction.getStatus().equals(CoordinatorAction.Status.SUCCEEDED)
+ || coordAction.getStatus().equals(CoordinatorAction.Status.FAILED)
+ || coordAction.getStatus().equals(CoordinatorAction.Status.KILLED)) {
+ // do nothing
+ }
+ else {
+ incrJobCounter(1);
+ WorkflowStore wstore = Services.get().get(StoreService.class).getStore(WorkflowStore.class, cstore);
+ WorkflowJobBean wf = wstore.getWorkflow(coordAction.getExternalId(), false);
+
+ Status slaStatus = null;
+
+ if (wf.getStatus() == WorkflowJob.Status.SUCCEEDED) {
+ coordAction.setStatus(CoordinatorAction.Status.SUCCEEDED);
+ slaStatus = Status.SUCCEEDED;
+ }
+ else {
+ if (wf.getStatus() == WorkflowJob.Status.FAILED) {
+ coordAction.setStatus(CoordinatorAction.Status.FAILED);
+ slaStatus = Status.FAILED;
+ }
+ else {
+ if (wf.getStatus() == WorkflowJob.Status.KILLED) {
+ coordAction.setStatus(CoordinatorAction.Status.KILLED);
+ slaStatus = Status.KILLED;
+ }
+ else {
+ log.warn("Unexpected workflow " + wf.getId() + " STATUS " + wf.getStatus());
+ cstore.updateCoordinatorAction(coordAction);
+ return null;
+ }
+ }
+ }
+
+ log.debug("Updating Coordintaor actionId :" + coordAction.getId() + "status to =" + coordAction.getStatus());
+ cstore.updateCoordinatorAction(coordAction);
+ if (slaStatus != null) {
+ SLADbOperations.writeStausEvent(coordAction.getSlaXml(), coordAction.getId(), cstore, slaStatus,
+ SlaAppType.COORDINATOR_ACTION);
+ }
+ }
+
+ }
+ catch (XException ex) {
+ log.warn("CoordActionCheckCommand Failed ", ex);
+ throw new CommandException(ex);
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionCheckCommand for actionId = " + actionId);
+ try {
+ coordAction = store.getEntityManager().find(CoordinatorActionBean.class, actionId);
+ setLogInfo(coordAction);
+ if (lock(coordAction.getJobId())) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionCheckCommand(actionId, actionCheckDelay), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionCheckCommand lock was not acquired - failed jobId=" + coordAction.getJobId()
+ + ", actionId=" + actionId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionCheckCommand(actionId, actionCheckDelay), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionCheckCommand lock acquiring failed with exception " + e.getMessage() + " for jobId="
+ + coordAction.getJobId() + ", actionId=" + actionId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordActionCheckCommand for actionId:" + actionId);
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionInfoCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionInfoCommand.java
new file mode 100644
index 000000000..30d6458fb
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionInfoCommand.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.util.List;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+public class CoordActionInfoCommand extends CoordinatorCommand {
+ private String id;
+
+ public CoordActionInfoCommand(String id) {
+ super("action.info", "action.info", 0, XLog.OPS);
+ this.id = ParamChecker.notEmpty(id, "id");
+ }
+
+ @Override
+ protected CoordinatorActionBean call(CoordinatorStore store) throws StoreException, CommandException {
+ CoordinatorActionBean action = store.getCoordinatorAction(id, false);
+ return action;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionInputCheckCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionInputCheckCommand.java
new file mode 100644
index 000000000..216825e17
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionInputCheckCommand.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.coord.CoordELEvaluator;
+import org.apache.oozie.coord.CoordELFunctions;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.ELEvaluator;
+import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XmlUtils;
+import org.jdom.Element;
+
+public class CoordActionInputCheckCommand extends CoordinatorCommand {
+
+ private String actionId;
+ private final XLog log = XLog.getLog(getClass());
+ private int COMMAND_REQUEUE_INTERVAL = 60000; // 1 minute
+ private CoordinatorActionBean coordAction = null;
+
+ public CoordActionInputCheckCommand(String actionId) {
+ super("coord_action_input", "coord_action_input", 0, XLog.STD);
+ this.actionId = actionId;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ log.debug("After store.get() for action ID " + actionId + " : " + coordAction.getStatus());
+ // this action should only get processed if current time >
+ // materialization time
+ // otherwise, requeue this action after 30 seconds
+ Date nominalTime = coordAction.getNominalTime();
+ Date currentTime = new Date();
+ if (nominalTime.compareTo(currentTime) > 0) {
+ log.info("[" + actionId
+ + "]::ActionInputCheck:: nominal Time is newer than current time, so requeue and wait. Current="
+ + currentTime + ", nominal=" + nominalTime);
+ queueCallable(new CoordActionInputCheckCommand(coordAction.getId()), Math.max(
+ (nominalTime.getTime() - currentTime.getTime()), COMMAND_REQUEUE_INTERVAL));
+ //update lastModifiedTime
+ store.updateCoordinatorAction(coordAction);
+ return null;
+ }
+ if (coordAction.getStatus() == CoordinatorActionBean.Status.WAITING) {
+ log.info("[" + actionId + "]::ActionInputCheck:: Action is in WAITING state.");
+ StringBuilder actionXml = new StringBuilder(coordAction.getActionXml());// job.getXml();
+ Instrumentation.Cron cron = new Instrumentation.Cron();
+ try {
+ Configuration actionConf = new XConfiguration(new StringReader(coordAction.getRunConf()));
+ cron.start();
+ StringBuilder existList = new StringBuilder();
+ StringBuilder nonExistList = new StringBuilder();
+ StringBuilder nonResolvedList = new StringBuilder();
+ CoordActionMaterializeCommand.getResolvedList(coordAction.getMissingDependencies(), nonExistList,
+ nonResolvedList);
+
+ log.info("[" + actionId + "]::ActionInputCheck:: Missing deps:" + nonExistList.toString() + " "
+ + nonResolvedList.toString());
+ Date actualTime = new Date();
+ boolean status = checkInput(actionXml, existList, nonExistList, actionConf, actualTime);
+ coordAction.setLastModifiedTime(actualTime);
+ coordAction.setActionXml(actionXml.toString());
+ if (nonResolvedList.length() > 0 && status == false) {
+ nonExistList.append(CoordActionMaterializeCommand.RESOLVED_UNRESOLVED_SEPARATOR).append(
+ nonResolvedList);
+ }
+ coordAction.setMissingDependencies(nonExistList.toString());
+ if (status == true) {
+ coordAction.setStatus(CoordinatorAction.Status.READY);
+ // pass jobID to the ReadyCommand
+ queueCallable(new CoordActionReadyCommand(coordAction.getJobId()), 100);
+ }
+ else {
+ long waitingTime = (actualTime.getTime() - coordAction.getNominalTime().getTime()) / (60 * 1000);
+ int timeOut = coordAction.getTimeOut();
+ if ((timeOut >= 0) && (waitingTime > timeOut)) {
+ queueCallable(new CoordActionTimeOut(coordAction), 100);
+ coordAction.setStatus(CoordinatorAction.Status.TIMEDOUT);
+ }
+ else {
+ queueCallable(new CoordActionInputCheckCommand(coordAction.getId()), COMMAND_REQUEUE_INTERVAL);
+ }
+ }
+ store.updateCoordinatorAction(coordAction);
+ }
+ catch (Exception e) {
+ log.warn(actionId + ": Exception occurs: " + e + " STORE is active " + store.isActive(), e);
+ throw new CommandException(ErrorCode.E1005, e.getMessage(), e);
+ }
+ cron.stop();
+ }
+ else {
+ log.info("[" + actionId + "]::ActionInputCheck:: Ignoring action. Should be in WAITING state, but state="
+ + coordAction.getStatus());
+ }
+ return null;
+ }
+
+ protected boolean checkInput(StringBuilder actionXml, StringBuilder existList, StringBuilder nonExistList,
+ Configuration conf, Date actualTime) throws Exception {
+ Element eAction = XmlUtils.parseXml(actionXml.toString());
+ boolean allExist = checkResolvedUris(eAction, existList, nonExistList, conf);
+ if (allExist) {
+ log.info("[" + actionId + "]::ActionInputCheck:: Checking Latest");
+ allExist = checkUnresolvedInstances(eAction, conf, actualTime);
+ }
+ if (allExist == true) {
+ materializeDataProperties(eAction, conf);
+ actionXml.replace(0, actionXml.length(), XmlUtils.prettyPrint(eAction).toString());
+ }
+ return allExist;
+ }
+
+ /**
+ * Materialize data properties defined in tag. it includes dataIn() and dataOut() it creates a list
+ * of files that will be needed.
+ *
+ * @param eAction
+ * @param conf
+ * @throws Exception
+ * @update modify 'Action' element with appropriate list of files.
+ */
+ private void materializeDataProperties(Element eAction, Configuration conf) throws Exception {
+ ELEvaluator eval = CoordELEvaluator.createDataEvaluator(eAction, conf, actionId);
+ Element configElem = eAction.getChild("action", eAction.getNamespace()).getChild("workflow",
+ eAction.getNamespace()).getChild("configuration", eAction.getNamespace());
+ if (configElem != null) {
+ for (Element propElem : (List) configElem.getChildren("property", configElem.getNamespace())) {
+ resolveTagContents("value", propElem, eval);
+ }
+ }
+ }
+
+ private void resolveTagContents(String tagName, Element elem, ELEvaluator eval) throws Exception {
+ if (elem == null) {
+ return;
+ }
+ Element tagElem = elem.getChild(tagName, elem.getNamespace());
+ if (tagElem != null) {
+ String updated = CoordELFunctions.evalAndWrap(eval, tagElem.getText());
+ tagElem.removeContent();
+ tagElem.addContent(updated);
+ }
+ else {
+ log.warn(" Value NOT FOUND " + tagName);
+ }
+ }
+
+ private boolean checkUnresolvedInstances(Element eAction, Configuration actionConf, Date actualTime)
+ throws Exception {
+ String strAction = XmlUtils.prettyPrint(eAction).toString();
+ Date nominalTime = DateUtils.parseDateUTC(eAction.getAttributeValue("action-nominal-time"));
+ StringBuffer resultedXml = new StringBuffer();
+
+ boolean ret;
+ Element inputList = eAction.getChild("input-events", eAction.getNamespace());
+ if (inputList != null) {
+ ret = materializeUnresolvedEvent((List) inputList.getChildren("data-in", eAction.getNamespace()),
+ nominalTime, actualTime, actionConf);
+ if (ret == false) {
+ resultedXml.append(strAction);
+ return false;
+ }
+ }
+
+ // Using latest() in output-event is not intuitive. We need to make
+ // sure, this assumption is correct.
+ Element outputList = eAction.getChild("output-events", eAction.getNamespace());
+ if (outputList != null) {
+ for (Element dEvent : (List) outputList.getChildren("data-out", eAction.getNamespace())) {
+ if (dEvent.getChild("unresolved-instances", dEvent.getNamespace()) != null) {
+ throw new CommandException(ErrorCode.E1006, "coord:latest()", " not permitted in output-event ");
+ }
+ }
+ /*
+ * ret = materializeUnresolvedEvent( (List)
+ * outputList.getChildren("data-out", eAction.getNamespace()),
+ * actualTime, nominalTime, actionConf); if (ret == false) {
+ * resultedXml.append(strAction); return false; }
+ */
+ }
+ return true;
+ }
+
+ private boolean materializeUnresolvedEvent(List eDataEvents, Date nominalTime, Date actualTime,
+ Configuration conf) throws Exception {
+ for (Element dEvent : eDataEvents) {
+ if (dEvent.getChild("unresolved-instances", dEvent.getNamespace()) == null) {
+ continue;
+ }
+ ELEvaluator eval = CoordELEvaluator.createLazyEvaluator(actualTime, nominalTime, dEvent, conf);
+ String uresolvedInstance = dEvent.getChild("unresolved-instances", dEvent.getNamespace()).getTextTrim();
+ String unresolvedList[] = uresolvedInstance.split(CoordELFunctions.INSTANCE_SEPARATOR);
+ StringBuffer resolvedTmp = new StringBuffer();
+ for (int i = 0; i < unresolvedList.length; i++) {
+ String ret = CoordELFunctions.evalAndWrap(eval, unresolvedList[i]);
+ Boolean isResolved = (Boolean) eval.getVariable("is_resolved");
+ if (isResolved == false) {
+ log.info("[" + actionId + "]::Cannot resolve: " + ret);
+ return false;
+ }
+ if (resolvedTmp.length() > 0) {
+ resolvedTmp.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ resolvedTmp.append((String) eval.getVariable("resolved_path"));
+ }
+ if (resolvedTmp.length() > 0) {
+ if (dEvent.getChild("uris", dEvent.getNamespace()) != null) {
+ resolvedTmp.append(CoordELFunctions.INSTANCE_SEPARATOR).append(
+ dEvent.getChild("uris", dEvent.getNamespace()).getTextTrim());
+ dEvent.removeChild("uris", dEvent.getNamespace());
+ }
+ Element uriInstance = new Element("uris", dEvent.getNamespace());
+ uriInstance.addContent(resolvedTmp.toString());
+ dEvent.getContent().add(1, uriInstance);
+ }
+ dEvent.removeChild("unresolved-instances", dEvent.getNamespace());
+ }
+
+ return true;
+ }
+
+ private boolean checkResolvedUris(Element eAction, StringBuilder existList, StringBuilder nonExistList,
+ Configuration conf) throws IOException {
+
+ log.info("[" + actionId + "]::ActionInputCheck:: In checkResolvedUris...");
+ Element inputList = eAction.getChild("input-events", eAction.getNamespace());
+ if (inputList != null) {
+ // List eDataEvents = inputList.getChildren("data-in",
+ // eAction.getNamespace());
+ // for (Element event : eDataEvents) {
+ // Element uris = event.getChild("uris", event.getNamespace());
+ if (nonExistList.length() > 0) {
+ checkListOfPaths(existList, nonExistList, conf);
+ }
+ // }
+ return nonExistList.length() == 0;
+ }
+ return true;
+ }
+
+ private boolean checkListOfPaths(StringBuilder existList, StringBuilder nonExistList, Configuration conf)
+ throws IOException {
+
+ log.info("[" + actionId + "]::ActionInputCheck:: In checkListOfPaths for: " + nonExistList.toString());
+
+ String[] uriList = nonExistList.toString().split(CoordELFunctions.INSTANCE_SEPARATOR);
+ nonExistList.delete(0, nonExistList.length());
+ boolean allExists = true;
+ for (int i = 0; i < uriList.length; i++) {
+ boolean exists = pathExists(uriList[i], conf);
+ log.info("[" + actionId + "]::ActionInputCheck:: File:" + uriList[i] + ", Exists? :" + exists);
+ if (exists) {
+ if (existList.length() > 0) {
+ existList.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ existList.append(uriList[i]);
+ }
+ else {
+ allExists = false;
+ if (nonExistList.length() > 0) {
+ nonExistList.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ nonExistList.append(uriList[i]);
+ }
+ }
+ return allExists;
+ }
+
+ private boolean pathExists(String sPath, Configuration actionConf) throws IOException {
+ log.debug("checking for the file " + sPath);
+ Path path = new Path(sPath);
+ String user = ParamChecker.notEmpty(actionConf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
+ String group = ParamChecker.notEmpty(actionConf.get(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
+ return Services.get().get(HadoopAccessorService.class).
+ createFileSystem(user, group, path.toUri(), new Configuration()).exists(path);
+ }
+
+ /**
+ * The function create a list of URIs separated by "," using the instances time stamp and URI-template
+ *
+ * @param event : event
+ * @param instances : List of time stanmp seprated by ","
+ * @param unresolvedInstances : list of instance with latest function
+ * @return : list of URIs separated by ",".
+ * @throws Exception
+ */
+ private String createURIs(Element event, String instances, StringBuilder unresolvedInstances) throws Exception {
+ if (instances == null || instances.length() == 0) {
+ return "";
+ }
+ String[] instanceList = instances.split(CoordELFunctions.INSTANCE_SEPARATOR);
+ StringBuilder uris = new StringBuilder();
+
+ for (int i = 0; i < instanceList.length; i++) {
+ if (instanceList[i].indexOf("latest") >= 0) {
+ if (unresolvedInstances.length() > 0) {
+ unresolvedInstances.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ unresolvedInstances.append(instanceList[i]);
+ continue;
+ }
+ ELEvaluator eval = CoordELEvaluator.createURIELEvaluator(instanceList[i]);
+ // uris.append(eval.evaluate(event.getChild("dataset",
+ // event.getNamespace()).getChild("uri-template",
+ // event.getNamespace()).getTextTrim(), String.class));
+ if (uris.length() > 0) {
+ uris.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ uris.append(CoordELFunctions.evalAndWrap(eval, event.getChild("dataset", event.getNamespace()).getChild(
+ "uri-template", event.getNamespace()).getTextTrim()));
+ }
+ return uris.toString();
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionInputCheckCommand for actionid=" + actionId);
+ try {
+ coordAction = store.getEntityManager().find(CoordinatorActionBean.class, actionId);
+ setLogInfo(coordAction);
+ if (lock(coordAction.getJobId())) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionInputCheckCommand(actionId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionInputCheckCommand lock was not acquired - failed jobId=" + coordAction.getJobId()
+ + ", actionId=" + actionId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionInputCheckCommand(actionId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionInputCheckCommand lock acquiring failed with exception " + e.getMessage() + " for jobId="
+ + coordAction.getJobId() + ", actionId=" + actionId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordActionInputCheckCommand for actionid=" + actionId);
+ }
+ return null;
+ }
+
+ /**
+ * @param args
+ * @throws Exception
+ */
+ public static void main(String[] args) throws Exception {
+ new Services().init();
+ String actionId = "0000000-091221141623042-oozie-dani-C@4";
+ try {
+ new CoordActionInputCheckCommand(actionId).call();
+ Thread.sleep(10000);
+ }
+ finally {
+ new Services().destroy();
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionMaterializeCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionMaterializeCommand.java
new file mode 100644
index 000000000..b081c9e1d
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionMaterializeCommand.java
@@ -0,0 +1,672 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.coord.CoordELEvaluator;
+import org.apache.oozie.coord.CoordELFunctions;
+import org.apache.oozie.coord.CoordUtils;
+import org.apache.oozie.coord.CoordinatorJobException;
+import org.apache.oozie.coord.SyncCoordAction;
+import org.apache.oozie.coord.TimeUnit;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.UUIDService;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.ELEvaluator;
+import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XmlUtils;
+import org.apache.oozie.util.db.SLADbOperations;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+
+public class CoordActionMaterializeCommand extends CoordinatorCommand {
+ public static final String RESOLVED_UNRESOLVED_SEPARATOR = ";";
+ private String jobId;
+ private Date startTime;
+ private Date endTime;
+ private int lastActionNumber = 1; // over-ride by DB value
+ private final XLog log = XLog.getLog(getClass());
+ private String user;
+ private String group;
+
+ public CoordActionMaterializeCommand(String jobId, Date startTime, Date endTime) {
+ super("coord_action_mater", "coord_action_mater", 0, XLog.STD);
+ this.jobId = jobId;
+ this.startTime = startTime;
+ this.endTime = endTime;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ //CoordinatorJobBean job = store.getCoordinatorJob(jobId, true);
+ CoordinatorJobBean job = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(job);
+ if (job.getLastActionTime() != null && job.getLastActionTime().compareTo(endTime) >= 0) {
+ log.info("ENDED Coordinator materialization for jobId=" + jobId
+ + " Action is *already* materialized for time " + startTime + " : " + endTime);
+ return null;
+ }
+
+ this.user = job.getUser();
+ this.group = job.getGroup();
+
+ if (job.getStatus().equals(CoordinatorJobBean.Status.PREMATER)) {
+ Configuration jobConf = null;
+ log.debug("start job :" + jobId + " Materialization ");
+ try {
+ jobConf = new XConfiguration(new StringReader(job.getConf()));
+ }
+ catch (IOException e1) {
+ log.warn("Configuration parse error. read from DB :" + job.getConf(), e1);
+ throw new CommandException(ErrorCode.E1005, e1.getMessage(), e1);
+ }
+
+ Instrumentation.Cron cron = new Instrumentation.Cron();
+ cron.start();
+ try {
+ materializeJobs(false, job, jobConf, store);
+ updateJobTable(job, store);
+ }
+ catch (CommandException ex) {
+ log.warn("Exception occurs:" + ex + " Making the job failed ");
+ job.setStatus(CoordinatorJobBean.Status.FAILED);
+ store.updateCoordinatorJob(job);
+ }
+ catch (Exception e) {
+ log.error("Excepion thrown :", e);
+ throw new CommandException(ErrorCode.E1001, e.getMessage(), e);
+ }
+ cron.stop();
+ }
+ else {
+ log.info("WARN: action is not in PREMATER state! It's in state=" + job.getStatus());
+ }
+ return null;
+ }
+
+ /**
+ * Create action instances starting from "start-time" to end-time" and store them into Action table.
+ *
+ * @param dryrun
+ * @param jobBean
+ * @param conf
+ * @param store
+ * @throws Exception
+ */
+ protected String materializeJobs(boolean dryrun, CoordinatorJobBean jobBean, Configuration conf,
+ CoordinatorStore store) throws Exception {
+ String jobXml = jobBean.getJobXml();
+ Element eJob = XmlUtils.parseXml(jobXml);
+ // TODO: always UTC?
+ TimeZone appTz = DateUtils.getTimeZone(jobBean.getTimeZone());
+ // TimeZone appTz = DateUtils.getTimeZone("UTC");
+ int frequency = jobBean.getFrequency();
+ TimeUnit freqTU = TimeUnit.valueOf(eJob.getAttributeValue("freq_timeunit"));
+ TimeUnit endOfFlag = TimeUnit.valueOf(eJob.getAttributeValue("end_of_duration"));
+ Calendar start = Calendar.getInstance(appTz);
+ start.setTime(startTime);
+ DateUtils.moveToEnd(start, endOfFlag);
+ Calendar end = Calendar.getInstance(appTz);
+ end.setTime(endTime);
+ lastActionNumber = jobBean.getLastActionNumber();
+ // DateUtils.moveToEnd(end, endOfFlag);
+ log.info(" *** materialize Actions for tz=" + appTz.getDisplayName() + ",\n start=" + start.getTime()
+ + ", end=" + end.getTime() + "\n TimeUNIT " + freqTU.getCalendarUnit() + " Frequency :" + frequency
+ + ":" + freqTU + " lastActionNumber " + lastActionNumber);
+ // Keep the actual start time
+ Calendar origStart = Calendar.getInstance(appTz);
+ origStart.setTime(jobBean.getStartTimestamp());
+ // Move to the End of duration, if needed.
+ DateUtils.moveToEnd(origStart, endOfFlag);
+ // Cloning the start time to be used in loop iteration
+ Calendar effStart = (Calendar) origStart.clone();
+ // Move the time when the previous action finished
+ effStart.add(freqTU.getCalendarUnit(), lastActionNumber * frequency);
+
+ String action = null;
+ StringBuilder actionStrings = new StringBuilder();
+ while (effStart.compareTo(end) < 0) {
+ CoordinatorActionBean actionBean = new CoordinatorActionBean();
+ lastActionNumber++;
+
+ actionBean.setTimeOut(jobBean.getTimeout());
+
+ log.info(origStart.getTime() + " Materializing action for time=" + effStart.getTime()
+ + ", lastactionnumber=" + lastActionNumber);
+ action = materializeOneInstance(dryrun, (Element) eJob.clone(), effStart.getTime(), lastActionNumber, conf,
+ actionBean);
+ if (actionBean.getNominalTimestamp().before(jobBean.getCreatedTimestamp())) {
+ actionBean.setTimeOut(-1);
+ }
+
+ if (!dryrun) {
+ storeToDB(actionBean, action, store); // Storing to table
+ }
+ else {
+ actionStrings.append("action for new instance");
+ actionStrings.append(action);
+ }
+ // Restore the original start time
+ effStart = (Calendar) origStart.clone();
+ effStart.add(freqTU.getCalendarUnit(), lastActionNumber * frequency);
+ }
+
+ endTime = new Date(effStart.getTimeInMillis());
+ if (!dryrun) {
+ return action;
+ }
+ else {
+ return actionStrings.toString();
+ }
+ }
+
+ /**
+ * materialize one instance for specific nominal time. It includes: 1. Materialize data events (i.e. and
+ * ) 2. Materialize data properties (i.e dataIn() and dataOut() 3. remove 'start' and 'end' tag 4.
+ * Add 'instance_number' and 'nominal-time' tag
+ *
+ * @param eAction : frequency unexploded-job
+ * @param nominalTime : materialization time
+ * @param instanceCount : instance numbers
+ * @param conf
+ * @return return one materialized job for specific nominal time
+ * @throws Exception
+ */
+ private String materializeOneInstance(boolean dryrun, Element eAction, Date nominalTime, int instanceCount,
+ Configuration conf, CoordinatorActionBean actionBean) throws Exception {
+ String actionId = Services.get().get(UUIDService.class).generateChildId(jobId, instanceCount + "");
+ SyncCoordAction appInst = new SyncCoordAction();
+ appInst.setActionId(actionId);
+ appInst.setName(eAction.getAttributeValue("name"));
+ appInst.setNominalTime(nominalTime);
+ int frequency = Integer.parseInt(eAction.getAttributeValue("frequency"));
+ appInst.setFrequency(frequency);
+ appInst.setTimeUnit(TimeUnit.valueOf(eAction.getAttributeValue("freq_timeunit"))); // TODO:
+ appInst.setTimeZone(DateUtils.getTimeZone(eAction.getAttributeValue("timezone")));
+ appInst.setEndOfDuration(TimeUnit.valueOf(eAction.getAttributeValue("end_of_duration")));
+
+ StringBuffer dependencyList = new StringBuffer();
+
+ Element inputList = eAction.getChild("input-events", eAction.getNamespace());
+ List dataInList = null;
+ if (inputList != null) {
+ dataInList = (List) inputList.getChildren("data-in", eAction.getNamespace());
+ materializeDataEvents(dataInList, appInst, conf, dependencyList);
+ }
+
+ Element outputList = eAction.getChild("output-events", eAction.getNamespace());
+ List dataOutList = null;
+ if (outputList != null) {
+ dataOutList = (List) outputList.getChildren("data-out", eAction.getNamespace());
+ StringBuffer tmp = new StringBuffer();
+ materializeDataEvents(dataOutList, appInst, conf, tmp);// no dependency checks
+ }
+
+ eAction.removeAttribute("start");
+ eAction.removeAttribute("end");
+ eAction.setAttribute("instance-number", Integer.toString(instanceCount));
+ eAction.setAttribute("action-nominal-time", DateUtils.formatDateUTC(nominalTime));
+
+ boolean isSla = materializeSLA(eAction.getChild("action", eAction.getNamespace()).getChild("info",
+ eAction.getNamespace("sla")), nominalTime, conf);
+
+ // Setting up action bean
+ actionBean.setCreatedConf(XmlUtils.prettyPrint(conf).toString());
+ actionBean.setRunConf(XmlUtils.prettyPrint(conf).toString()); // TODO:
+ actionBean.setCreatedTime(new Date());
+ actionBean.setJobId(jobId);
+ // actionBean.setId(jobId + "_" + instanceCount);
+ actionBean.setId(actionId);
+ actionBean.setLastModifiedTime(new Date());
+ actionBean.setStatus(CoordinatorAction.Status.WAITING);
+ actionBean.setActionNumber(instanceCount);
+ actionBean.setMissingDependencies(dependencyList.toString());
+ actionBean.setNominalTime(nominalTime);
+ if (isSla == true) {
+ actionBean.setSlaXml(XmlUtils.prettyPrint(
+ eAction.getChild("action", eAction.getNamespace()).getChild("info", eAction.getNamespace("sla")))
+ .toString());
+ }
+
+ // actionBean.setTrackerUri(trackerUri);//TOOD:
+ // actionBean.setConsoleUrl(consoleUrl); //TODO:
+ // actionBean.setType(type);//TODO:
+ // actionBean.setErrorInfo(errorCode, errorMessage); //TODO:
+ // actionBean.setExternalStatus(externalStatus);//TODO
+ if (!dryrun) {
+ return XmlUtils.prettyPrint(eAction).toString();
+ }
+ else {
+ String action = XmlUtils.prettyPrint(eAction).toString();
+ CoordActionInputCheckCommand coordActionInput = new CoordActionInputCheckCommand(actionBean.getId());
+ StringBuilder actionXml = new StringBuilder(action);
+ StringBuilder existList = new StringBuilder();
+ StringBuilder nonExistList = new StringBuilder();
+ StringBuilder nonResolvedList = new StringBuilder();
+ getResolvedList(actionBean.getMissingDependencies(), nonExistList, nonResolvedList);
+ Date actualTime = new Date();
+ Configuration actionConf = new XConfiguration(new StringReader(actionBean.getRunConf()));
+ coordActionInput.checkInput(actionXml, existList, nonExistList, actionConf, actualTime);
+ return actionXml.toString();
+ }
+
+ // return XmlUtils.prettyPrint(eAction).toString();
+ }
+
+ /**
+ * Materialize all / or / tags Create uris for resolved instances.
+ * Create unresolved instance for latest().
+ *
+ * @param events
+ * @param appInst
+ * @param conf
+ * @throws Exception
+ */
+ private void materializeDataEvents(List events, SyncCoordAction appInst, Configuration conf,
+ StringBuffer dependencyList) throws Exception {
+
+ if (events == null) {
+ return;
+ }
+ StringBuffer unresolvedList = new StringBuffer();
+ for (Element event : events) {
+ StringBuilder instances = new StringBuilder();
+ ELEvaluator eval = CoordELEvaluator.createInstancesELEvaluator(event, appInst, conf);
+ // Handle list of instance tag
+ resolveInstances(event, instances, appInst, conf, eval);
+ // Handle start-instance and end-instance
+ resolveInstanceRange(event, instances, appInst, conf, eval);
+ // Separate out the unresolved instances
+ separateResolvedAndUnresolved(event, instances, dependencyList);
+ String tmpUnresolved = event.getChildTextTrim("unresolved-instances", event.getNamespace());
+ if (tmpUnresolved != null) {
+ if (unresolvedList.length() > 0) {
+ unresolvedList.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ unresolvedList.append(tmpUnresolved);
+ }
+ }
+ if (unresolvedList.length() > 0) {
+ dependencyList.append(RESOLVED_UNRESOLVED_SEPARATOR);
+ dependencyList.append(unresolvedList);
+ }
+ return;
+ }
+
+ /**
+ * Resolve list of tags.
+ *
+ * @param event
+ * @param instances
+ * @param actionInst
+ * @param conf
+ * @throws Exception
+ */
+ private void resolveInstances(Element event, StringBuilder instances, SyncCoordAction actionInst,
+ Configuration conf, ELEvaluator eval) throws Exception {
+ for (Element eInstance : (List) event.getChildren("instance", event.getNamespace())) {
+ if (instances.length() > 0) {
+ instances.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ instances.append(materializeInstance(event, eInstance.getTextTrim(), actionInst, conf, eval));
+ }
+ event.removeChildren("instance", event.getNamespace());
+ }
+
+ /**
+ * Resolve tag. Don't resolve any latest()
+ *
+ * @param event
+ * @param instances
+ * @param appInst
+ * @param conf
+ * @throws Exception
+ */
+ private void resolveInstanceRange(Element event, StringBuilder instances, SyncCoordAction appInst,
+ Configuration conf, ELEvaluator eval) throws Exception {
+ Element eStartInst = event.getChild("start-instance", event.getNamespace());
+ Element eEndInst = event.getChild("end-instance", event.getNamespace());
+ if (eStartInst != null && eEndInst != null) {
+ String strStart = eStartInst.getTextTrim();
+ String strEnd = eEndInst.getTextTrim();
+ checkIfBothSameType(strStart, strEnd);
+ int startIndex = getInstanceNumber(strStart, event, appInst, conf);
+ int endIndex = getInstanceNumber(strEnd, event, appInst, conf);
+ if (startIndex > endIndex) {
+ throw new CommandException(ErrorCode.E1010,
+ " start-instance should be equal or earlier than the end-instance \n"
+ + XmlUtils.prettyPrint(event));
+ }
+ if (strStart.indexOf("latest") < 0 && strEnd.indexOf("latest") < 0) {
+ // Everything could be resolved NOW. no latest() ELs
+ for (int i = endIndex; i >= startIndex; i--) {
+ String matInstance = materializeInstance(event, "${coord:current(" + i + ")}", appInst, conf, eval);
+ if (matInstance == null || matInstance.length() == 0) {
+ // Earlier than dataset's initial instance
+ break;
+ }
+ if (instances.length() > 0) {
+ instances.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ instances.append(matInstance);
+ }
+ }
+ else { // latest(n) EL is present
+ for (; startIndex <= endIndex; startIndex++) {
+ if (instances.length() > 0) {
+ instances.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ instances.append("${coord:latest(" + startIndex + ")}");
+ }
+ }
+ // Remove start-instance and end-instances
+ event.removeChild("start-instance", event.getNamespace());
+ event.removeChild("end-instance", event.getNamespace());
+ }
+ }
+
+ private void checkIfBothSameType(String startInst, String endInst) throws CommandException {
+ if ((startInst.indexOf("current") >= 0 && endInst.indexOf("latest") >= 0)
+ || (startInst.indexOf("latest") >= 0 && endInst.indexOf("current") >= 0)) {
+ throw new CommandException(ErrorCode.E1010,
+ " start-instance and end-instance both should be either latest or current\n" + " start "
+ + startInst + " and end " + endInst);
+ }
+ }
+
+ /**
+ * Create two new tags with and .
+ *
+ * @param event
+ * @param instances
+ * @param dependencyList
+ * @throws Exception
+ */
+ private void separateResolvedAndUnresolved(Element event, StringBuilder instances, StringBuffer dependencyList)
+ throws Exception {
+ StringBuilder unresolvedInstances = new StringBuilder();
+ StringBuilder urisWithDoneFlag = new StringBuilder();
+ String uris = createURIs(event, instances.toString(), unresolvedInstances, urisWithDoneFlag);
+ if (uris.length() > 0) {
+ Element uriInstance = new Element("uris", event.getNamespace());
+ uriInstance.addContent(uris);
+ event.getContent().add(1, uriInstance);
+ if (dependencyList.length() > 0) {
+ dependencyList.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ dependencyList.append(urisWithDoneFlag);
+ }
+ if (unresolvedInstances.length() > 0) {
+ Element elemInstance = new Element("unresolved-instances", event.getNamespace());
+ elemInstance.addContent(unresolvedInstances.toString());
+ event.getContent().add(1, elemInstance);
+ }
+ }
+
+ /**
+ * The function create a list of URIs separated by "," using the instances time stamp and URI-template
+ *
+ * @param event : event
+ * @param instances : List of time stanmp seprated by ","
+ * @param unresolvedInstances : list of instance with latest function
+ * @param urisWithDoneFlag : list of URIs with the done flag appended
+ * @return : list of URIs separated by ",".
+ * @throws Exception
+ */
+ private String createURIs(Element event, String instances, StringBuilder unresolvedInstances,
+ StringBuilder urisWithDoneFlag) throws Exception {
+ if (instances == null || instances.length() == 0) {
+ return "";
+ }
+ String[] instanceList = instances.split(CoordELFunctions.INSTANCE_SEPARATOR);
+ StringBuilder uris = new StringBuilder();
+
+ Element doneFlagElement = event.getChild("dataset", event.getNamespace()).getChild("done-flag",
+ event.getNamespace());
+ String doneFlag = CoordUtils.getDoneFlag(doneFlagElement);
+
+ for (int i = 0; i < instanceList.length; i++) {
+ if (instanceList[i].indexOf("latest") >= 0) {
+ if (unresolvedInstances.length() > 0) {
+ unresolvedInstances.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+ unresolvedInstances.append(instanceList[i]);
+ continue;
+ }
+ ELEvaluator eval = CoordELEvaluator.createURIELEvaluator(instanceList[i]);
+ if (uris.length() > 0) {
+ uris.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ urisWithDoneFlag.append(CoordELFunctions.INSTANCE_SEPARATOR);
+ }
+
+ String uriPath = CoordELFunctions.evalAndWrap(eval, event.getChild("dataset", event.getNamespace())
+ .getChild("uri-template", event.getNamespace()).getTextTrim());
+ uris.append(uriPath);
+ if (doneFlag.length() > 0) {
+ uriPath += "/" + doneFlag;
+ }
+ urisWithDoneFlag.append(uriPath);
+ }
+ return uris.toString();
+ }
+
+ /**
+ * Materialize one instance like current(-2)
+ *
+ * @param event :
+ * @param expr : instance like current(-1)
+ * @param appInst : application specific info
+ * @param conf
+ * @return materialized date string
+ * @throws Exception
+ */
+ private String materializeInstance(Element event, String expr, SyncCoordAction appInst, Configuration conf,
+ ELEvaluator evalInst) throws Exception {
+ if (event == null) {
+ return null;
+ }
+ // ELEvaluator eval = CoordELEvaluator.createInstancesELEvaluator(event,
+ // appInst, conf);
+ return CoordELFunctions.evalAndWrap(evalInst, expr);
+ }
+
+ /**
+ * parse a function like coord:latest(n) and return the 'n'.
+ *
+ * @param function
+ * @return parameter of the function
+ * @throws Exception
+ */
+ private int getInstanceNumber(String function, Element event, SyncCoordAction appInst, Configuration conf)
+ throws Exception {
+ ELEvaluator eval = CoordELEvaluator
+ .createInstancesELEvaluator("coord-action-create-inst", event, appInst, conf);
+ String newFunc = CoordELFunctions.evalAndWrap(eval, function);
+ int firstPos = newFunc.indexOf("(");
+ int lastPos = newFunc.lastIndexOf(")");
+ if (firstPos >= 0 && lastPos > firstPos) {
+ String tmp = newFunc.substring(firstPos + 1, lastPos).trim();
+ if (tmp.length() > 0) {
+ return Integer.parseInt(tmp);
+ }
+ }
+ // Error handling
+ throw new RuntimeException("Unformatted function :" + newFunc);
+ }
+
+ /**
+ * Store an Action into database table.
+ *
+ * @param actionBean
+ * @param actionXml
+ * @param store
+ * @param wantSla
+ * @throws StoreException
+ * @throws JDOMException
+ */
+ private void storeToDB(CoordinatorActionBean actionBean, String actionXml, CoordinatorStore store) throws Exception {
+ log.debug("In storeToDB() action Id " + actionBean.getId() + " Size of actionXml " + actionXml.length());
+ actionBean.setActionXml(actionXml);
+ store.insertCoordinatorAction(actionBean);
+ writeActionRegistration(actionXml, actionBean, store);
+
+ // TODO: time 100s should be configurable
+ queueCallable(new CoordActionNotification(actionBean), 100);
+ queueCallable(new CoordActionInputCheckCommand(actionBean.getId()), 100);
+ }
+
+ private void writeActionRegistration(String actionXml, CoordinatorActionBean actionBean, CoordinatorStore store)
+ throws Exception {
+ Element eAction = XmlUtils.parseXml(actionXml);
+ Element eSla = eAction.getChild("action", eAction.getNamespace()).getChild("info", eAction.getNamespace("sla"));
+ SLADbOperations.writeSlaRegistrationEvent(eSla, store, actionBean.getId(), SlaAppType.COORDINATOR_ACTION, user,
+ group);
+ }
+
+ private void updateJobTable(CoordinatorJobBean job, CoordinatorStore store) throws StoreException {
+ // TODO: why do we need this? Isn't lastMatTime enough???
+ job.setLastActionTime(endTime);
+ job.setLastActionNumber(lastActionNumber);
+ // if the job endtime == action endtime, then set status of job to
+ // succeeded
+ // we dont need to materialize this job anymore
+ Date jobEndTime = job.getEndTime();
+ if (jobEndTime.compareTo(endTime) <= 0) {
+ job.setStatus(CoordinatorJob.Status.SUCCEEDED);
+ log.info("[" + job.getId() + "]: Update status from PREMATER to SUCCEEDED");
+ }
+ else {
+ job.setStatus(CoordinatorJob.Status.RUNNING);
+ log.info("[" + job.getId() + "]: Update status from PREMATER to RUNNING");
+ }
+ job.setNextMaterializedTime(endTime);
+ store.updateCoordinatorJob(job);
+ }
+
+ private boolean materializeSLA(Element eSla, Date nominalTime, Configuration conf) throws CoordinatorJobException {
+ if (eSla == null) {
+ // System.out.println("NO SLA presents " +
+ // eAppXml.getNamespace("sla"));
+ return false;
+ }
+ try {
+ ELEvaluator evalSla = CoordELEvaluator.createSLAEvaluator(nominalTime, conf);
+ // System.out.println("SLA presents");
+ List elemList = eSla.getChildren();
+ for (Element elem : elemList) {
+ String updated;
+ try {
+ updated = CoordELFunctions.evalAndWrap(evalSla, elem.getText().trim());
+ }
+ catch (Exception e) {
+ throw new CoordinatorJobException(ErrorCode.E1004, e.getMessage(), e);
+ }
+ elem.removeContent();
+ elem.addContent(updated);
+ }
+ }
+ catch (Exception e) {
+ throw new CoordinatorJobException(ErrorCode.E1004, e.getMessage(), e);
+ }
+ return true;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionMaterializeCommand for jobId=" + jobId + ", startTime=" + startTime + ", endTime="
+ + endTime);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionMaterializeCommand(jobId, startTime, endTime), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionMaterializeCommand lock was not acquired - failed jobId=" + jobId
+ + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionMaterializeCommand(jobId, startTime, endTime), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionMaterializeCommand lock acquiring failed with exception " + e.getMessage()
+ + " for jobId=" + jobId + " Requeing the same.");
+ }
+ finally {
+ log.info(" ENDED CoordActionMaterializeCommand for jobId=" + jobId + ", startTime=" + startTime
+ + ", endTime=" + endTime);
+ }
+ return null;
+ }
+
+ public static String getResolvedList(String missDepList, StringBuilder resolved, StringBuilder unresolved) {
+ if (missDepList != null) {
+ int index = missDepList.indexOf(RESOLVED_UNRESOLVED_SEPARATOR);
+ if (index < 0) {
+ resolved.append(missDepList);
+ }
+ else {
+ resolved.append(missDepList.substring(0, index));
+ unresolved.append(missDepList.substring(index + 1));
+ }
+ }
+ return resolved.toString();
+ }
+
+ /**
+ * For preliminery testing. Should be removed soon
+ *
+ * @param args
+ * @throws Exception
+ */
+ public static void main(String[] args) throws Exception {
+ new Services().init();
+ try {
+ Date startTime = DateUtils.parseDateUTC("2009-02-01T01:00Z");
+ Date endTime = DateUtils.parseDateUTC("2009-02-02T01:00Z");
+ String jobId = "0000000-091207151850551-oozie-dani-C";
+ CoordActionMaterializeCommand matCmd = new CoordActionMaterializeCommand(jobId, startTime, endTime);
+ matCmd.call();
+ }
+ finally {
+ try {
+ Thread.sleep(60000);
+ }
+ catch (Exception ex) {
+ }
+ new Services().destroy();
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionNotification.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionNotification.java
new file mode 100644
index 000000000..38c600a28
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionNotification.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XLog;
+
+public class CoordActionNotification extends CoordinatorCommand {
+
+ private CoordinatorActionBean actionBean;
+ private static final String STATUS_PATTERN = "\\$status";
+ private static final String ACTION_ID_PATTERN = "\\$actionId";
+
+ private int retries = 0;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordActionNotification(CoordinatorActionBean actionBean) {
+ super("coord_action_notification", "coord_action_notification", 0,
+ XLog.STD);
+ this.actionBean = actionBean;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException,
+ CommandException {
+ setLogInfo(actionBean);
+ log.info("STARTED Coordinator Notification actionId="
+ + actionBean.getId() + " : " + actionBean.getStatus());
+ Configuration conf;
+ try {
+ conf = new XConfiguration(new StringReader(actionBean.getRunConf()));
+ }
+ catch (IOException e1) {
+ log.warn("Configuration parse error. read from DB :"
+ + actionBean.getRunConf());
+ throw new CommandException(ErrorCode.E1005, e1.getMessage(), e1);
+ }
+ String url = conf.get(OozieClient.COORD_ACTION_NOTIFICATION_URL);
+ if (url != null) {
+ url = url.replaceAll(ACTION_ID_PATTERN, actionBean.getId());
+ url = url.replaceAll(STATUS_PATTERN, actionBean.getStatus()
+ .toString());
+ log.debug("Notification URL :" + url);
+ try {
+ URL urlObj = new URL(url);
+ HttpURLConnection urlConn = (HttpURLConnection) urlObj
+ .openConnection();
+ if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) {
+ handleRetry(url);
+ }
+ }
+ catch (IOException ex) {
+ handleRetry(url);
+ }
+ }
+ else {
+ log
+ .info("No Notification URL is defined. Therefore nothing to notify for job "
+ + actionBean.getJobId()
+ + " action ID "
+ + actionBean.getId());
+ // System.out.println("No Notification URL is defined. Therefore nothing is notified");
+ }
+ log.info("ENDED Coordinator Notification actionId="
+ + actionBean.getId());
+ return null;
+ }
+
+ private void handleRetry(String url) {
+ if (retries < 3) {
+ retries++;
+ queueCallable(this, 60 * 1000);
+ }
+ else {
+ XLog.getLog(getClass()).warn(XLog.OPS,
+ "could not send notification [{0}]", url);
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionReadyCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionReadyCommand.java
new file mode 100644
index 000000000..ba424e2eb
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionReadyCommand.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.util.List;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+public class CoordActionReadyCommand extends CoordinatorCommand {
+ private String jobId;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordActionReadyCommand(String id) {
+ super("coord_action_ready", "coord_action_ready", 0, XLog.STD);
+ this.jobId = id;
+ }
+
+ @Override
+ /**
+ * Check for READY actions and change state to SUBMITTED by a command to submit the job to WF engine.
+ * This method checks all the actions associated with a jobId to figure out which actions
+ * to start (based on concurrency and execution order [FIFO, LIFO, LAST_ONLY])
+ *
+ * @param store Coordinator Store
+ */
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ // number of actions to start (-1 means start ALL)
+ int numActionsToStart = -1;
+ // get CoordinatorJobBean for jobId
+ //CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId, false);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+ // get execution setting for this job (FIFO, LIFO, LAST_ONLY)
+ String jobExecution = coordJob.getExecution();
+ // get concurrency setting for this job
+ int jobConcurrency = coordJob.getConcurrency();
+ // if less than 0, then UNLIMITED concurrency
+ if (jobConcurrency >= 0) {
+ // count number of actions that are already RUNNING or SUBMITTED
+ // subtract from CONCURRENCY to calculate number of actions to start
+ // in WF engine
+ int numRunningJobs = store.getCoordinatorRunningActionsCount(jobId);
+ numActionsToStart = jobConcurrency - numRunningJobs;
+ if (numActionsToStart < 0) {
+ numActionsToStart = 0;
+ }
+ log.debug("concurrency=" + jobConcurrency + ", execution=" + jobExecution + ", numRunningJobs="
+ + numRunningJobs + ", numLeftover=" + numActionsToStart);
+ // no actions to start
+ if (numActionsToStart == 0) {
+ log.warn("No actions to start! for jobId=" + jobId);
+ return null;
+ }
+ }
+ // get list of actions that are READY and fit in the concurrency and
+ // execution
+ List actions = store.getCoordinatorActionsForJob(jobId, numActionsToStart, jobExecution);
+ log.debug("Number of READY actions = " + actions.size());
+ String user = coordJob.getUser();
+ String authToken = coordJob.getAuthToken();
+ // make sure auth token is not null
+ // log.denug("user=" + user + ", token=" + authToken);
+ int counter = 0;
+ for (CoordinatorActionBean action : actions) {
+ // continue if numActionsToStart is negative (no limit on number of
+ // actions), or if the counter is less than numActionsToStart
+ if ((numActionsToStart < 0) || (counter < numActionsToStart)) {
+ log.debug("Set status to SUBMITTED for id: " + action.getId());
+ // change state of action to SUBMITTED
+ action.setStatus(CoordinatorAction.Status.SUBMITTED);
+ // queue action to start action
+ queueCallable(new CoordActionStartCommand(action.getId(), user, authToken), 100);
+ store.updateCoordinatorAction(action);
+ }
+ else {
+ break;
+ }
+ counter++;
+
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionReadyCommand for jobId=" + jobId);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionReadyCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionReadyCommand lock was not acquired - failed jobId=" + jobId
+ + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionReadyCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionReadyCommand lock acquiring failed with exception " + e.getMessage()
+ + " for jobId=" + jobId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordActionReadyCommand for jobId=" + jobId);
+ }
+ return null;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionStartCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionStartCommand.java
new file mode 100644
index 000000000..e10213d75
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionStartCommand.java
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.hadoop.conf.Configuration;
+
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.DagEngine;
+import org.apache.oozie.DagEngineException;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.service.DagEngineService;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XmlUtils;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.db.SLADbOperations;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
+
+import org.jdom.Element;
+import org.jdom.JDOMException;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class CoordActionStartCommand extends CoordinatorCommand {
+
+ public static final String EL_ERROR = "EL_ERROR";
+ public static final String EL_EVAL_ERROR = "EL_EVAL_ERROR";
+ public static final String COULD_NOT_START = "COULD_NOT_START";
+ public static final String START_DATA_MISSING = "START_DATA_MISSING";
+ public static final String EXEC_DATA_MISSING = "EXEC_DATA_MISSING";
+
+ private final XLog log = XLog.getLog(getClass());
+ private String actionId = null;
+ private String user = null;
+ private String authToken = null;
+ private CoordinatorActionBean coordAction = null;
+
+ public CoordActionStartCommand(String id, String user, String token) {
+ super("coord_action_start", "coord_action_start", 0, XLog.OPS);
+ this.actionId = ParamChecker.notEmpty(id, "id");
+ this.user = ParamChecker.notEmpty(user, "user");
+ this.authToken = ParamChecker.notEmpty(token, "token");
+ }
+
+ /**
+ * Create config to pass to WF Engine 1. Get createdConf from coord_actions table 2. Get actionXml from
+ * coord_actions table. Extract all 'property' tags and merge createdConf (overwrite duplicate keys). 3. Extract
+ * 'app-path' from actionXML. Create a new property called 'oozie.wf.application.path' and merge with createdConf
+ * (overwrite duplicate keys) 4. Read contents of config-default.xml in workflow directory. 5. Merge createdConf
+ * with config-default.xml (overwrite duplicate keys). 6. Results is runConf which is saved in coord_actions table.
+ * Merge Action createdConf with actionXml to create new runConf with replaced variables
+ *
+ * @param action CoordinatorActionBean
+ * @return Configuration
+ * @throws CommandException
+ */
+ private Configuration mergeConfig(CoordinatorActionBean action) throws CommandException {
+ String createdConf = action.getCreatedConf();
+ String actionXml = action.getActionXml();
+ Element workflowProperties = null;
+ try {
+ workflowProperties = XmlUtils.parseXml(actionXml);
+ }
+ catch (JDOMException e1) {
+ log.warn("Configuration parse error in:" + actionXml);
+ throw new CommandException(ErrorCode.E1005, e1.getMessage(), e1);
+ }
+ // generate the 'runConf' for this action
+ // Step 1: runConf = createdConf
+ Configuration runConf = null;
+ try {
+ runConf = new XConfiguration(new StringReader(createdConf));
+ }
+ catch (IOException e1) {
+ log.warn("Configuration parse error in:" + createdConf);
+ throw new CommandException(ErrorCode.E1005, e1.getMessage(), e1);
+ }
+ // Step 2: Merge local properties into runConf
+ // extract 'property' tags under 'configuration' block in the
+ // coordinator.xml (saved in actionxml column)
+ // convert Element to XConfiguration
+ Element configElement = (Element) workflowProperties.getChild("action", workflowProperties.getNamespace())
+ .getChild("workflow", workflowProperties.getNamespace()).getChild("configuration",
+ workflowProperties.getNamespace());
+ if (configElement != null) {
+ String strConfig = XmlUtils.prettyPrint(configElement).toString();
+ Configuration localConf;
+ try {
+ localConf = new XConfiguration(new StringReader(strConfig));
+ }
+ catch (IOException e1) {
+ log.warn("Configuration parse error in:" + strConfig);
+ throw new CommandException(ErrorCode.E1005, e1.getMessage(), e1);
+ }
+
+ // copy configuration properties in coordinator.xml to the runConf
+ XConfiguration.copy(localConf, runConf);
+ }
+
+ // Step 3: Extract value of 'app-path' in actionxml, and save it as a
+ // new property called 'oozie.wf.application.path'
+ // WF Engine requires the path to the workflow.xml to be saved under
+ // this property name
+ String appPath = workflowProperties.getChild("action", workflowProperties.getNamespace()).getChild("workflow",
+ workflowProperties.getNamespace()).getChild("app-path", workflowProperties.getNamespace()).getValue();
+ runConf.set("oozie.wf.application.path", appPath);
+ return runConf;
+ }
+
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ boolean makeFail = true;
+ String errCode = "";
+ String errMsg = "";
+ ParamChecker.notEmpty(user, "user");
+ ParamChecker.notEmpty(authToken, "authToken");
+
+ // CoordinatorActionBean coordAction = store.getCoordinatorAction(id, true);
+ log.debug("actionid=" + actionId + ", status=" + coordAction.getStatus());
+ if (coordAction.getStatus() == CoordinatorAction.Status.SUBMITTED) {
+ // log.debug("getting.. job id: " + coordAction.getJobId());
+ // create merged runConf to pass to WF Engine
+ Configuration runConf = mergeConfig(coordAction);
+ coordAction.setRunConf(XmlUtils.prettyPrint(runConf).toString());
+ // log.debug("%%% merged runconf=" +
+ // XmlUtils.prettyPrint(runConf).toString());
+ DagEngine dagEngine = Services.get().get(DagEngineService.class).getDagEngine(user, authToken);
+ try {
+ boolean startJob = true;
+ Configuration conf = new XConfiguration(new StringReader(coordAction.getRunConf()));
+ SLADbOperations.writeStausEvent(coordAction.getSlaXml(), coordAction.getId(), store, Status.STARTED,
+ SlaAppType.COORDINATOR_ACTION);
+ String wfId = dagEngine.submitJob(conf, startJob);
+ coordAction.setStatus(CoordinatorAction.Status.RUNNING);
+ coordAction.setExternalId(wfId);
+ store.updateCoordinatorAction(coordAction);
+ makeFail = false;
+ }
+ catch (StoreException se) {
+ makeFail = false;
+ throw se;
+ }
+ catch (DagEngineException dee) {
+ errMsg = dee.getMessage();
+ errCode = "E1005";
+ log.warn("can not create DagEngine for submitting jobs", dee);
+ }
+ catch (CommandException ce) {
+ errMsg = ce.getMessage();
+ errCode = ce.getErrorCode().toString();
+ log.warn("command exception occured ", ce);
+ }
+ catch (java.io.IOException ioe) {
+ errMsg = ioe.getMessage();
+ errCode = "E1005";
+ log.warn("Configuration parse error. read from DB :" + coordAction.getRunConf(), ioe);
+ }
+ catch (Exception ex) {
+ errMsg = ex.getMessage();
+ errCode = "E1005";
+ log.warn("can not create DagEngine for submitting jobs", ex);
+ }
+ finally {
+ if (makeFail == true) { // No DB exception occurs
+ log.warn("Failing the action " + coordAction.getId() + ". Because " + errCode + " : " + errMsg);
+ coordAction.setStatus(CoordinatorAction.Status.FAILED);
+ if (errMsg.length() > 254) { // Because table column size is 255
+ errMsg = errMsg.substring(0, 255);
+ }
+ coordAction.setErrorMessage(errMsg);
+ coordAction.setErrorCode(errCode);
+ store.updateCoordinatorAction(coordAction);
+ queueCallable(new CoordActionReadyCommand(coordAction.getJobId()));
+ }
+ }
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionStartCommand actionId=" + actionId);
+ try {
+ coordAction = store.getEntityManager().find(CoordinatorActionBean.class, actionId);
+ setLogInfo(coordAction);
+ if (lock(coordAction.getJobId())) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionStartCommand(actionId, user, authToken), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionStartCommand lock was not acquired - failed jobId=" + coordAction.getJobId()
+ + ", actionId=" + actionId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionStartCommand(actionId, user, authToken), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionStartCommand lock acquiring failed with exception " + e.getMessage() + " for jobId="
+ + coordAction.getJobId() + ", actionId=" + actionId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordActionStartCommand actionId=" + actionId);
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionTimeOut.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionTimeOut.java
new file mode 100644
index 000000000..fd4e9daa1
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionTimeOut.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.util.Date;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+public class CoordActionTimeOut extends CoordinatorCommand {
+ private CoordinatorActionBean actionBean;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordActionTimeOut(CoordinatorActionBean actionBean) {
+ super("coord_action_timeout", "coord_action_timeout", 0, XLog.STD);
+ this.actionBean = actionBean;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ // actionBean = store.getCoordinatorAction(actionBean.getId(), false);
+ actionBean = store.getEntityManager().find(CoordinatorActionBean.class, actionBean.getId());
+ if (actionBean.getStatus() == CoordinatorAction.Status.WAITING) {
+ actionBean.setStatus(CoordinatorAction.Status.TIMEDOUT);
+ queueCallable(new CoordActionNotification(actionBean), 100);
+ store.updateCoordinatorAction(actionBean);
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ String jobId = actionBean.getJobId();
+ setLogInfo(actionBean);
+ log.info("STARTED CoordinatorActionTimeOut for Action Id " + actionBean.getId() + " of job Id :"
+ + actionBean.getJobId() + ". Timeout value is " + actionBean.getTimeOut() + " mins");
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionTimeOut(actionBean), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordinatorActionTimeOut lock was not acquired - " + " failed " + jobId
+ + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionTimeOut(actionBean), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordinatorActionTimeOut lock acquiring failed " + " with exception " + e.getMessage()
+ + " for job id " + jobId + ". Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordinatorActionTimeOut for Action Id " + actionBean.getId());
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordActionUpdateCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordActionUpdateCommand.java
new file mode 100644
index 000000000..a03bbf6e1
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordActionUpdateCommand.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.XException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.db.SLADbOperations;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
+import org.apache.oozie.command.CommandException;
+
+public class CoordActionUpdateCommand extends CoordinatorCommand {
+ private final XLog log = XLog.getLog(getClass());
+ private WorkflowJobBean workflow;
+ private CoordinatorActionBean caction = null;
+
+ public CoordActionUpdateCommand(WorkflowJobBean workflow) {
+ super("coord-action-update", "coord-action-update", -1, XLog.OPS);
+ this.workflow = workflow;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore cstore) throws StoreException, CommandException {
+ try {
+ if (workflow.getStatus() == WorkflowJob.Status.RUNNING
+ || workflow.getStatus() == WorkflowJob.Status.SUSPENDED) {
+ //update lastModifiedTime
+ cstore.updateCoordinatorAction(caction);
+ return null;
+ }
+ // CoordinatorActionBean caction =
+ // cstore.getCoordinatorActionForExternalId(workflow.getId());
+ Status slaStatus = null;
+ if (caction != null) {
+ if (workflow.getStatus() == WorkflowJob.Status.SUCCEEDED) {
+ caction.setStatus(CoordinatorAction.Status.SUCCEEDED);
+ slaStatus = Status.SUCCEEDED;
+ }
+ else {
+ if (workflow.getStatus() == WorkflowJob.Status.FAILED) {
+ caction.setStatus(CoordinatorAction.Status.FAILED);
+ slaStatus = Status.FAILED;
+ }
+ else {
+ if (workflow.getStatus() == WorkflowJob.Status.KILLED) {
+ caction.setStatus(CoordinatorAction.Status.KILLED);
+ slaStatus = Status.KILLED;
+ }
+ else {
+ log.warn(
+ "Unexpected workflow " + workflow.getId() + " STATUS " + workflow.getStatus());
+ //update lastModifiedTime
+ cstore.updateCoordinatorAction(caction);
+ return null;
+ }
+ }
+ }
+
+ log.info(
+ "Updating Coordintaor id :" + caction.getId() + "status to =" + caction.getStatus());
+ cstore.updateCoordinatorAction(caction);
+ if (slaStatus != null) {
+ SLADbOperations.writeStausEvent(caction.getSlaXml(), caction.getId(), cstore, slaStatus,
+ SlaAppType.COORDINATOR_ACTION);
+ }
+ queueCallable(new CoordActionReadyCommand(caction.getJobId()));
+ }
+ }
+ catch (XException ex) {
+ log.warn("CoordActionUpdate Failed ", ex.getMessage());
+ throw new CommandException(ex);
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordActionUpdateCommand for wfId=" + workflow.getId());
+ caction = store.getCoordinatorActionForExternalId(workflow.getId());
+ if (caction == null) {
+ log.info("ENDED CoordActionUpdateCommand for wfId=" + workflow.getId() + ", coord action is null");
+ return null;
+ }
+ setLogInfo(caction);
+ String jobId = caction.getJobId();
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordActionUpdateCommand(workflow), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionUpdateCommand lock was not acquired - failed JobId=" + jobId + ", wfId="
+ + workflow.getId() + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordActionUpdateCommand(workflow), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordActionUpdateCommand lock acquiring failed with exception " + e.getMessage() + " for jobId="
+ + jobId + ", wfId=" + workflow.getId() + ". Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordActionUpdateCommand for wfId=" + workflow.getId() + ", jobId=" + jobId);
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordCheckRunningActionCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordCheckRunningActionCommand.java
new file mode 100644
index 000000000..acd82012d
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordCheckRunningActionCommand.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+public class CoordCheckRunningActionCommand extends CoordinatorCommand {
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordCheckRunningActionCommand() {
+ super("check_running_action", "check_running_action", -1, XLog.STD);
+
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException,
+ CommandException {
+ log.info("IN CoordCheckRunningActionCommand:call(store) ");
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordJobCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordJobCommand.java
new file mode 100644
index 000000000..434714839
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordJobCommand.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+/**
+ * Command for loading a coordinator job information
+ */
+public class CoordJobCommand extends CoordinatorCommand {
+ private String id;
+ private boolean getActionInfo;
+ private int start = 1;
+ private int len = Integer.MAX_VALUE;
+
+ /**
+ * @param id coord jobId
+ */
+ public CoordJobCommand(String id) {
+ this(id, 1, Integer.MAX_VALUE);
+ }
+
+ /**
+ * @param id coord jobId
+ * @param start starting index in the list of actions belonging to the job
+ * @param length number of actions to be returned
+ */
+ public CoordJobCommand(String id, int start, int length) {
+ super("job.info", "job.info", 0, XLog.OPS);
+ this.id = ParamChecker.notEmpty(id, "id");
+ this.getActionInfo = true;
+ this.start = start;
+ this.len = length;
+ }
+
+ /**
+ * @param id coord jobId
+ * @param getActionInfo false to ignore loading actions for the job
+ */
+ public CoordJobCommand(String id, boolean getActionInfo) {
+ super("job.info", "job.info", 0, XLog.OPS);
+ this.id = ParamChecker.notEmpty(id, "id");
+ this.getActionInfo = getActionInfo;
+ }
+
+ @Override
+ protected CoordinatorJobBean call(CoordinatorStore store) throws StoreException, CommandException {
+ CoordinatorJobBean coord = store.getCoordinatorJob(id, false);
+ if (this.getActionInfo == true) {
+ coord.setActions(store.getActionsSubsetForCoordinatorJob(id, start, len));
+ }
+ else {
+ coord.setActions(null);
+ }
+ return coord;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordJobMatLookupCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordJobMatLookupCommand.java
new file mode 100644
index 000000000..657cfaeaf
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordJobMatLookupCommand.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.sql.Timestamp;
+
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.XLog;
+
+public class CoordJobMatLookupCommand extends CoordinatorCommand {
+ private final XLog log = XLog.getLog(getClass());
+ private int materializationWindow;
+ private String jobId;
+
+ public CoordJobMatLookupCommand(String id, int materializationWindow) {
+ super("materialization_lookup", "materialization_lookup", -1, XLog.STD);
+ this.jobId = id;
+ this.materializationWindow = materializationWindow;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ //CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId, true);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+
+ if (!(coordJob.getStatus() == CoordinatorJobBean.Status.PREP || coordJob.getStatus() == CoordinatorJobBean.Status.RUNNING)) {
+ log.debug("CoordJobMatLookupCommand for jobId=" + jobId + " job is not in PREP or RUNNING but in "
+ + coordJob.getStatus());
+ return null;
+ }
+
+ if (coordJob.getNextMaterializedTimestamp() != null
+ && coordJob.getNextMaterializedTimestamp().compareTo(coordJob.getEndTimestamp()) >= 0) {
+ log.debug("CoordJobMatLookupCommand for jobId=" + jobId + " job is already materialized");
+ return null;
+ }
+
+ if (coordJob.getNextMaterializedTimestamp() != null
+ && coordJob.getNextMaterializedTimestamp().compareTo(new Timestamp(System.currentTimeMillis())) >= 0) {
+ log.debug("CoordJobMatLookupCommand for jobId=" + jobId + " job is already materialized");
+ return null;
+ }
+
+ Timestamp startTime = coordJob.getNextMaterializedTimestamp();
+ if (startTime == null) {
+ startTime = coordJob.getStartTimestamp();
+ }
+ // calculate end time by adding materializationWindow to start time.
+ // need to convert materializationWindow from secs to milliseconds
+ long startTimeMilli = startTime.getTime();
+ long endTimeMilli = startTimeMilli + (materializationWindow * 1000);
+ Timestamp endTime = new Timestamp(endTimeMilli);
+ // if MaterializationWindow end time is greater than endTime
+ // for job, then set it to endTime of job
+ Timestamp jobEndTime = coordJob.getEndTimestamp();
+ if (endTime.compareTo(jobEndTime) > 0) {
+ endTime = jobEndTime;
+ }
+ // update status of job from PREP or RUNNING to PREMATER in coordJob
+ coordJob.setStatus(CoordinatorJob.Status.PREMATER);
+ store.updateCoordinatorJobStatus(coordJob);
+
+ log.debug("Materializing coord job id=" + jobId + ", start=" + DateUtils.toDate(startTime) + ", end=" + DateUtils.toDate(endTime)
+ + ", window=" + materializationWindow + ", status=PREMATER");
+ queueCallable(new CoordActionMaterializeCommand(jobId, startTime, endTime), 100);
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordJobMatLookupCommand jobId=" + jobId + ", materializationWindow="
+ + materializationWindow);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordJobMatLookupCommand(jobId, materializationWindow), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordJobMatLookupCommand lock was not acquired - failed jobId=" + jobId
+ + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordJobMatLookupCommand(jobId, materializationWindow), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordJobMatLookupCommand lock acquiring failed with exception " + e.getMessage() + " for jobId="
+ + jobId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordJobMatLookupCommand jobId=" + jobId + ", materializationWindow="
+ + materializationWindow);
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordJobsCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordJobsCommand.java
new file mode 100644
index 000000000..b782120f2
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordJobsCommand.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.CoordinatorJobInfo;
+import org.apache.oozie.DagEngineException;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.command.wf.JobCommand;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+public class CoordJobsCommand extends CoordinatorCommand {
+ private Map> filter;
+ private int start;
+ private int len;
+
+ public CoordJobsCommand(Map> filter, int start, int length) {
+ super("job.info", "job.info", 0, XLog.OPS);
+ this.filter = filter;
+ this.start = start;
+ this.len = length;
+ }
+
+ @Override
+ protected CoordinatorJobInfo call(CoordinatorStore store) throws StoreException, CommandException {
+ CoordinatorJobInfo coord = store.getCoordinatorInfo(filter, start, len);
+ // workflow.setConsoleUrl(getJobConsoleUrl(id));
+ // workflow.setActions((List) store.getActionsForWorkflow(id,
+ // false));
+ return coord;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordKillCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordKillCommand.java
new file mode 100644
index 000000000..9d8741e31
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordKillCommand.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.XException;
+import org.apache.oozie.command.wf.KillCommand;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+import java.util.Date;
+import java.util.List;
+
+public class CoordKillCommand extends CoordinatorCommand {
+
+ private String jobId;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordKillCommand(String id) {
+ super("coord_kill", "coord_kill", 0, XLog.STD);
+ this.jobId = ParamChecker.notEmpty(id, "id");
+ }
+
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ try {
+ // CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId,
+ // false);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+ if (coordJob.getStatus() != CoordinatorJob.Status.SUCCEEDED
+ || coordJob.getStatus() != CoordinatorJob.Status.FAILED) {
+ coordJob.setEndTime(new Date());
+ incrJobCounter(1);
+ coordJob.setStatus(CoordinatorJob.Status.KILLED);
+ List actionList = store.getActionsForCoordinatorJob(jobId, false);
+ for (CoordinatorActionBean action : actionList) {
+ if (action.getStatus() != CoordinatorActionBean.Status.FAILED
+ && action.getStatus() != CoordinatorActionBean.Status.TIMEDOUT
+ && action.getStatus() != CoordinatorActionBean.Status.SUCCEEDED
+ && action.getStatus() != CoordinatorActionBean.Status.KILLED) {
+ // queue a KillCommand to delete the workflow job
+ if (action.getExternalId() != null) {
+ queueCallable(new KillCommand(action.getExternalId()));
+ }
+ action.setStatus(CoordinatorActionBean.Status.KILLED);
+ store.updateCoordinatorAction(action);
+ }
+ }
+ store.updateCoordinatorJob(coordJob);
+ // TODO queueCallable(new NotificationCommand(coordJob));
+ }
+ else {
+ log.info("CoordKillCommand not killed - job either " + "finished successfully or does not exist "
+ + jobId);
+ }
+ return null;
+ }
+ catch (XException ex) {
+ throw new CommandException(ex);
+ }
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordKillCommand for jobId=" + jobId);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordKillCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordKillCommand lock was not acquired - " + " failed " + jobId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordKillCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordKillCommand lock acquiring failed " + " with exception " + e.getMessage() + " for job id "
+ + jobId + ". Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordKillCommand for jobId=" + jobId);
+ }
+ return null;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordPurgeCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordPurgeCommand.java
new file mode 100644
index 000000000..4706c0372
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordPurgeCommand.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.store.Store;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.command.Command;
+import org.apache.oozie.command.CommandException;
+
+public class CoordPurgeCommand extends CoordinatorCommand {
+ private int olderThan;
+ private int limit;
+
+ public CoordPurgeCommand(int olderThan, int limit) {
+ super("coord_purge", "coord_purge", -1, XLog.OPS);
+ this.olderThan = olderThan;
+ this.limit = limit;
+ }
+
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ XLog.getLog(getClass()).debug("STARTED Coord Purge to purge Jobs older than [{0}] days.", olderThan);
+ int actionDeleted = store.purgeActions(this.olderThan, this.limit);
+ int jobsDeleted = store.purgeJobs(this.olderThan, this.limit);
+ XLog.getLog(getClass()).debug("ENDED Coord Purge deleted actions :" + actionDeleted + " and jobs " + jobsDeleted);
+ return null;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordRecoveryCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordRecoveryCommand.java
new file mode 100644
index 000000000..6937dfb5b
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordRecoveryCommand.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+public class CoordRecoveryCommand extends CoordinatorCommand {
+ private final XLog log = XLog.getLog(getClass());
+ private String jobId;
+
+ public CoordRecoveryCommand(String id) {
+ super("coord_recovery", "coord_recovery", 0, XLog.STD);
+ this.jobId = id;
+ }
+
+ @Override
+ protected Void call(CoordinatorStore store) throws StoreException {
+ //CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId, true);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+ // update status of job from PREMATER to RUNNING in coordJob
+ coordJob.setStatus(CoordinatorJob.Status.RUNNING);
+ store.updateCoordinatorJob(coordJob);
+ log.debug("[" + jobId + "]: Recover status from PREMATER to RUNNING");
+ return null;
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordRecoveryCommand for jobId=" + jobId);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordRecoveryCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordRecoveryCommand lock was not acquired - failed jobId=" + jobId
+ + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordRecoveryCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordRecoveryCommand lock acquiring failed with exception " + e.getMessage()
+ + " for jobId=" + jobId + " Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordRecoveryCommand for jobId=" + jobId);
+ }
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordResumeCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordResumeCommand.java
new file mode 100644
index 000000000..900287d0f
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordResumeCommand.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.XException;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+import org.apache.oozie.command.wf.ResumeCommand;
+
+import java.util.Date;
+import java.util.List;
+
+public class CoordResumeCommand extends CoordinatorCommand {
+
+ private String jobId;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordResumeCommand(String id) {
+ super("coord_resume", "coord_resume", 0, XLog.STD);
+ this.jobId = ParamChecker.notEmpty(id, "id");
+ }
+
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ try {
+ // CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId,
+ // false);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+ if (coordJob.getStatus() == CoordinatorJob.Status.SUSPENDED) {
+ incrJobCounter(1);
+ coordJob.setStatus(CoordinatorJob.Status.PREP);
+ List actionList = store.getActionsForCoordinatorJob(jobId, false);
+ for (CoordinatorActionBean action : actionList) {
+ // queue a ResumeCommand
+ if (action.getExternalId() != null) {
+ queueCallable(new ResumeCommand(action.getExternalId()));
+ }
+ }
+ store.updateCoordinatorJob(coordJob);
+ }
+ // TODO queueCallable(new NotificationCommand(coordJob));
+ else {
+ log.info("CoordResumeCommand not Resumed - " + "job not in SUSPENDED state " + jobId);
+ }
+ return null;
+ }
+ catch (XException ex) {
+ throw new CommandException(ex);
+ }
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordResumeCommand for jobId=" + jobId);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordResumeCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordResumeCommand lock was not acquired - " + " failed " + jobId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordResumeCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordResumeCommand lock acquiring failed " + " with exception " + e.getMessage() + " for job id "
+ + jobId + ". Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordResumeCommand for jobId=" + jobId);
+ }
+ return null;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordSubmitCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordSubmitCommand.java
new file mode 100644
index 000000000..dcc6a92f5
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordSubmitCommand.java
@@ -0,0 +1,869 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+import javax.xml.transform.stream.StreamSource;
+import javax.xml.validation.Validator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.ErrorCode;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.CoordinatorJob.Execution;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.coord.CoordELEvaluator;
+import org.apache.oozie.coord.CoordELFunctions;
+import org.apache.oozie.coord.CoordUtils;
+import org.apache.oozie.coord.CoordinatorJobException;
+import org.apache.oozie.coord.TimeUnit;
+import org.apache.oozie.service.DagXLogInfoService;
+import org.apache.oozie.service.SchemaService;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.UUIDService;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.WorkflowAppService;
+import org.apache.oozie.service.SchemaService.SchemaName;
+import org.apache.oozie.service.UUIDService.ApplicationType;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.DateUtils;
+import org.apache.oozie.util.ELEvaluator;
+import org.apache.oozie.util.IOUtils;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.PropertiesUtils;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XmlUtils;
+import org.apache.oozie.workflow.WorkflowException;
+import org.jdom.Attribute;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.Namespace;
+import org.xml.sax.SAXException;
+
+/**
+ * This class provides the functionalities to resolve a coordinator job XML and write the job information into a DB
+ * table. Specifically it performs the following functions: 1. Resolve all the variables or properties using job
+ * configurations. 2. Insert all datasets definition as part of the and tags. 3. Validate the XML
+ * at runtime.
+ */
+public class CoordSubmitCommand extends CoordinatorCommand {
+
+ private Configuration conf;
+ private String authToken;
+ private boolean dryrun;
+
+ public static final String CONFIG_DEFAULT = "coord-config-default.xml";
+ public static final String COORDINATOR_XML_FILE = "coordinator.xml";
+
+ private static final Set DISALLOWED_USER_PROPERTIES = new HashSet();
+ private static final Set DISALLOWED_DEFAULT_PROPERTIES = new HashSet();
+
+ private final XLog log = XLog.getLog(getClass());
+ private ELEvaluator evalFreq = null;
+ private ELEvaluator evalNofuncs = null;
+ private ELEvaluator evalData = null;
+ private ELEvaluator evalInst = null;
+ private ELEvaluator evalSla = null;
+
+ static {
+ String[] badUserProps = {PropertiesUtils.YEAR, PropertiesUtils.MONTH, PropertiesUtils.DAY,
+ PropertiesUtils.HOUR, PropertiesUtils.MINUTE, PropertiesUtils.DAYS, PropertiesUtils.HOURS,
+ PropertiesUtils.MINUTES, PropertiesUtils.KB, PropertiesUtils.MB, PropertiesUtils.GB,
+ PropertiesUtils.TB, PropertiesUtils.PB, PropertiesUtils.RECORDS, PropertiesUtils.MAP_IN,
+ PropertiesUtils.MAP_OUT, PropertiesUtils.REDUCE_IN, PropertiesUtils.REDUCE_OUT, PropertiesUtils.GROUPS};
+ PropertiesUtils.createPropertySet(badUserProps, DISALLOWED_USER_PROPERTIES);
+
+ String[] badDefaultProps = {PropertiesUtils.HADOOP_USER, PropertiesUtils.HADOOP_UGI,
+ WorkflowAppService.HADOOP_JT_KERBEROS_NAME, WorkflowAppService.HADOOP_NN_KERBEROS_NAME};
+ PropertiesUtils.createPropertySet(badUserProps, DISALLOWED_DEFAULT_PROPERTIES);
+ PropertiesUtils.createPropertySet(badDefaultProps, DISALLOWED_DEFAULT_PROPERTIES);
+ }
+
+ /**
+ * Constructor to create the Coordinator Submit Command.
+ *
+ * @param conf : Configuration for Coordinator job
+ * @param authToken : To be used for authentication
+ */
+ public CoordSubmitCommand(Configuration conf, String authToken) {
+ super("coord_submit", "coord_submit", 0, XLog.STD);
+ this.conf = ParamChecker.notNull(conf, "conf");
+ this.authToken = ParamChecker.notEmpty(authToken, "authToken");
+ }
+
+ public CoordSubmitCommand(boolean dryrun, Configuration conf, String authToken) {
+ super("coord_submit", "coord_submit", 0, XLog.STD, dryrun);
+ this.conf = ParamChecker.notNull(conf, "conf");
+ this.authToken = ParamChecker.notEmpty(authToken, "authToken");
+ this.dryrun = dryrun;
+ // TODO Auto-generated constructor stub
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.apache.oozie.command.Command#call(org.apache.oozie.store.Store)
+ */
+ @Override
+ protected String call(CoordinatorStore store) throws StoreException, CommandException {
+ String jobId = null;
+ log.info("STARTED Coordinator Submit");
+ incrJobCounter(1);
+ CoordinatorJobBean coordJob = new CoordinatorJobBean();
+ try {
+ XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
+ mergeDefaultConfig();
+
+ String appXml = readAndValidateXml();
+ coordJob.setOrigJobXml(appXml);
+ log.debug("jobXml after initial validation " + XmlUtils.prettyPrint(appXml).toString());
+ appXml = XmlUtils.removeComments(appXml);
+ initEvaluators();
+ Element eJob = basicResolveAndIncludeDS(appXml, conf, coordJob);
+ log.debug("jobXml after all validation " + XmlUtils.prettyPrint(eJob).toString());
+
+ jobId = storeToDB(eJob, store, coordJob);
+
+ // log JOB info for coordinator jobs
+ setLogInfo(coordJob);
+
+ if (!dryrun) {
+ // submit a command to materialize jobs for the next 1 hour (3600 secs)
+ // so we don't wait 10 mins for the Service to run.
+ queueCallable(new CoordJobMatLookupCommand(jobId, 3600), 100);
+ }
+ else {
+ Date startTime = coordJob.getStartTime();
+ long startTimeMilli = startTime.getTime();
+ long endTimeMilli = startTimeMilli + (3600 * 1000);
+ Date jobEndTime = coordJob.getEndTime();
+ Date endTime = new Date(endTimeMilli);
+ if (endTime.compareTo(jobEndTime) > 0) {
+ endTime = jobEndTime;
+ }
+ jobId = coordJob.getId();
+ log.info("[" + jobId + "]: Update status to PREMATER");
+ coordJob.setStatus(CoordinatorJob.Status.PREMATER);
+ CoordActionMaterializeCommand coordActionMatCom = new CoordActionMaterializeCommand(jobId, startTime,
+ endTime);
+ Configuration jobConf = null;
+ try {
+ jobConf = new XConfiguration(new StringReader(coordJob.getConf()));
+ }
+ catch (IOException e1) {
+ log.warn("Configuration parse error. read from DB :" + coordJob.getConf(), e1);
+ }
+ String action = coordActionMatCom.materializeJobs(true, coordJob, jobConf, null);
+ String output = coordJob.getJobXml() + System.getProperty("line.separator")
+ + "***actions for instance***" + action;
+ return output;
+ }
+ }
+ catch (CoordinatorJobException ex) {
+ log.warn("ERROR: ", ex);
+ throw new CommandException(ex);
+ }
+ catch (IllegalArgumentException iex) {
+ log.warn("ERROR: ", iex);
+ throw new CommandException(ErrorCode.E1003, iex);
+ }
+ catch (Exception ex) {// TODO
+ log.warn("ERROR: ", ex);
+ throw new CommandException(ErrorCode.E0803, ex);
+ }
+ log.info("ENDED Coordinator Submit jobId=" + jobId);
+ return jobId;
+ }
+
+ /**
+ * Read the application XML and validate against coordinator Schema
+ *
+ * @return validated coordinator XML
+ * @throws CoordinatorJobException
+ */
+ private String readAndValidateXml() throws CoordinatorJobException {
+ String appPath = ParamChecker.notEmpty(conf.get(OozieClient.COORDINATOR_APP_PATH),
+ OozieClient.COORDINATOR_APP_PATH);// TODO: COORDINATOR_APP_PATH
+ String coordXml = readDefinition(appPath, COORDINATOR_XML_FILE);
+ validateXml(coordXml);
+ return coordXml;
+ }
+
+ /**
+ * Validate against Coordinator XSD file
+ *
+ * @param xmlContent : Input coordinator xml
+ * @throws CoordinatorJobException
+ */
+ private void validateXml(String xmlContent) throws CoordinatorJobException {
+ javax.xml.validation.Schema schema = Services.get().get(SchemaService.class).getSchema(SchemaName.COORDINATOR);
+ Validator validator = schema.newValidator();
+ // log.warn("XML " + xmlContent);
+ try {
+ validator.validate(new StreamSource(new StringReader(xmlContent)));
+ }
+ catch (SAXException ex) {
+ log.warn("SAXException :", ex);
+ throw new CoordinatorJobException(ErrorCode.E0701, ex.getMessage(), ex);
+ }
+ catch (IOException ex) {
+ // ex.printStackTrace();
+ log.warn("IOException :", ex);
+ throw new CoordinatorJobException(ErrorCode.E0702, ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Merge default configuration with user-defined configuration.
+ *
+ * @throws CommandException
+ */
+ protected void mergeDefaultConfig() throws CommandException {
+ Path configDefault = new Path(conf.get(OozieClient.COORDINATOR_APP_PATH), CONFIG_DEFAULT);
+ // Configuration fsConfig = new Configuration();
+ // log.warn("CONFIG :" + configDefault.toUri());
+ Configuration fsConfig = CoordUtils.getHadoopConf(conf);
+ FileSystem fs;
+ // TODO: which conf?
+ try {
+ String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
+ String group = ParamChecker.notEmpty(conf.get(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
+ fs = Services.get().get(HadoopAccessorService.class).createFileSystem(user, group, configDefault.toUri(),
+ new Configuration());
+ if (fs.exists(configDefault)) {
+ Configuration defaultConf = new XConfiguration(fs.open(configDefault));
+ PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
+ XConfiguration.injectDefaults(defaultConf, conf);
+ }
+ else {
+ log.info("configDefault Doesn't exist " + configDefault);
+ }
+ PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
+ }
+ catch (IOException e) {
+ throw new CommandException(ErrorCode.E0702, e.getMessage() + " : Problem reading default config "
+ + configDefault, e);
+ }
+ log.debug("Merged CONF :" + XmlUtils.prettyPrint(conf).toString());
+ }
+
+ /**
+ * The method resolve all the variables that are defined in configuration. It also include the data set definition
+ * from dataset file into XML.
+ *
+ * @param appXml : Original job XML
+ * @param conf : Configuration of the job
+ * @param coordJob : Coordinator job bean to be populated.
+ * @return : Resolved and modified job XML element.
+ * @throws Exception
+ */
+ public Element basicResolveAndIncludeDS(String appXml, Configuration conf, CoordinatorJobBean coordJob)
+ throws CoordinatorJobException, Exception {
+ Element basicResolvedApp = resolveInitial(conf, appXml, coordJob);
+ includeDataSets(basicResolvedApp, conf);
+ return basicResolvedApp;
+ }
+
+ /**
+ * Insert data set into data-in and data-out tags.
+ *
+ * @param eAppXml : coordinator application XML
+ * @param eDatasets : DataSet XML
+ * @return updated application
+ */
+ private void insertDataSet(Element eAppXml, Element eDatasets) {
+ // Adding DS definition in the coordinator XML
+ Element inputList = eAppXml.getChild("input-events", eAppXml.getNamespace());
+ if (inputList != null) {
+ for (Element dataIn : (List) inputList.getChildren("data-in", eAppXml.getNamespace())) {
+ Element eDataset = findDataSet(eDatasets, dataIn.getAttributeValue("dataset"));
+ dataIn.getContent().add(0, eDataset);
+ }
+ }
+ Element outputList = eAppXml.getChild("output-events", eAppXml.getNamespace());
+ if (outputList != null) {
+ for (Element dataOut : (List) outputList.getChildren("data-out", eAppXml.getNamespace())) {
+ Element eDataset = findDataSet(eDatasets, dataOut.getAttributeValue("dataset"));
+ dataOut.getContent().add(0, eDataset);
+ }
+ }
+ }
+
+ /**
+ * Find a specific dataset from a list of Datasets.
+ *
+ * @param eDatasets : List of data sets
+ * @param name : queried data set name
+ * @return one Dataset element. otherwise throw Exception
+ */
+ private static Element findDataSet(Element eDatasets, String name) {
+ for (Element eDataset : (List) eDatasets.getChildren("dataset", eDatasets.getNamespace())) {
+ if (eDataset.getAttributeValue("name").equals(name)) {
+ eDataset = (Element) eDataset.clone();
+ eDataset.detach();
+ return eDataset;
+ }
+ }
+ throw new RuntimeException("undefined dataset: " + name);
+ }
+
+ /**
+ * Initialize all the required EL Evaluators.
+ */
+ protected void initEvaluators() {
+ evalFreq = CoordELEvaluator.createELEvaluatorForGroup(conf, "coord-job-submit-freq");
+ evalNofuncs = CoordELEvaluator.createELEvaluatorForGroup(conf, "coord-job-submit-nofuncs");
+ evalInst = CoordELEvaluator.createELEvaluatorForGroup(conf, "coord-job-submit-instances");
+ evalSla = CoordELEvaluator.createELEvaluatorForGroup(conf, "coord-sla-submit");
+ }
+
+ /**
+ * Resolve basic entities using job Configuration.
+ *
+ * @param conf :Job configuration
+ * @param appXml : Original job XML
+ * @param coordJob : Coordinator job bean to be populated.
+ * @return Resolved job XML element.
+ * @throws Exception
+ */
+ protected Element resolveInitial(Configuration conf, String appXml, CoordinatorJobBean coordJob)
+ throws CoordinatorJobException, Exception {
+ Element eAppXml = XmlUtils.parseXml(appXml);
+ // job's main attributes
+ // frequency
+ String val = resolveAttribute("frequency", eAppXml, evalFreq);
+ int ival = ParamChecker.checkInteger(val, "frequency");
+ ParamChecker.checkGTZero(ival, "frequency");
+ coordJob.setFrequency(ival);
+ TimeUnit tmp = (evalFreq.getVariable("timeunit") == null) ? TimeUnit.MINUTE : ((TimeUnit) evalFreq
+ .getVariable("timeunit"));
+ addAnAttribute("freq_timeunit", eAppXml, tmp.toString()); // TODO: Store
+ // TimeUnit
+ coordJob.setTimeUnit(CoordinatorJob.Timeunit.valueOf(tmp.toString()));
+ // End Of Duration
+ tmp = evalFreq.getVariable("endOfDuration") == null ? TimeUnit.NONE : ((TimeUnit) evalFreq
+ .getVariable("endOfDuration"));
+ addAnAttribute("end_of_duration", eAppXml, tmp.toString());
+ // coordJob.setEndOfDuration(tmp) // TODO: Add new attribute in Job bean
+
+ // start time
+ val = resolveAttribute("start", eAppXml, evalNofuncs);
+ ParamChecker.checkUTC(val, "start");
+ coordJob.setStartTime(DateUtils.parseDateUTC(val));
+ // end time
+ val = resolveAttribute("end", eAppXml, evalNofuncs);
+ ParamChecker.checkUTC(val, "end");
+ coordJob.setEndTime(DateUtils.parseDateUTC(val));
+ // Time zone
+ val = resolveAttribute("timezone", eAppXml, evalNofuncs);
+ ParamChecker.checkTimeZone(val, "timezone");
+ coordJob.setTimeZone(val);
+
+ // controls
+ val = resolveTagContents("timeout", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
+ if (val == "") {
+ val = "-1";
+ }
+ ival = ParamChecker.checkInteger(val, "timeout");
+ // ParamChecker.checkGEZero(ival, "timeout");
+ coordJob.setTimeout(ival);
+ val = resolveTagContents("concurrency", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
+ if (val == "") {
+ val = "-1";
+ }
+ ival = ParamChecker.checkInteger(val, "concurrency");
+ // ParamChecker.checkGEZero(ival, "concurrency");
+ coordJob.setConcurrency(ival);
+ val = resolveTagContents("execution", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
+ if (val == "") {
+ val = Execution.FIFO.toString();
+ }
+ coordJob.setExecution(Execution.valueOf(val));
+ String[] acceptedVals = {Execution.LIFO.toString(), Execution.FIFO.toString(), Execution.LAST_ONLY.toString()};
+ ParamChecker.isMember(val, acceptedVals, "execution");
+
+ // datasets
+ resolveTagContents("include", eAppXml.getChild("datasets", eAppXml.getNamespace()), evalNofuncs);
+ // for each data set
+ resolveDataSets(eAppXml);
+ HashMap dataNameList = new HashMap();
+ resolveIOEvents(eAppXml, dataNameList);
+
+ resolveTagContents("app-path", eAppXml.getChild("action", eAppXml.getNamespace()).getChild("workflow",
+ eAppXml.getNamespace()), evalNofuncs);
+ // TODO: If action or workflow tag is missing, NullPointerException will
+ // occur
+ Element configElem = eAppXml.getChild("action", eAppXml.getNamespace()).getChild("workflow",
+ eAppXml.getNamespace()).getChild("configuration", eAppXml.getNamespace());
+ evalData = CoordELEvaluator.createELEvaluatorForDataEcho(conf, "coord-job-submit-data", dataNameList);
+ if (configElem != null) {
+ for (Element propElem : (List) configElem.getChildren("property", configElem.getNamespace())) {
+ resolveTagContents("name", propElem, evalData);
+ // log.warn("Value :");
+ // Want to check the data-integrity but don't want to modify the
+ // XML
+ // for properties only
+ Element tmpProp = (Element) propElem.clone();
+ resolveTagContents("value", tmpProp, evalData);
+ // val = resolveTagContents("value", propElem, evalData);
+ // log.warn("Value OK :" + val);
+ }
+ }
+ resolveSLA(eAppXml, coordJob);
+ return eAppXml;
+ }
+
+ private void resolveSLA(Element eAppXml, CoordinatorJobBean coordJob) throws CommandException {
+ // String prefix = XmlUtils.getNamespacePrefix(eAppXml,
+ // SchemaService.SLA_NAME_SPACE_URI);
+ Element eSla = eAppXml.getChild("action", eAppXml.getNamespace()).getChild("info",
+ Namespace.getNamespace(SchemaService.SLA_NAME_SPACE_URI));
+
+ if (eSla != null) {
+ String slaXml = XmlUtils.prettyPrint(eSla).toString();
+ try {
+ // EL evaluation
+ slaXml = evalSla.evaluate(slaXml, String.class);
+ // Validate against semantic SXD
+ XmlUtils.validateData(slaXml, SchemaName.SLA_ORIGINAL);
+ }
+ catch (Exception e) {
+ throw new CommandException(ErrorCode.E1004, "Validation ERROR :" + e.getMessage(), e);
+ }
+ }
+ }
+
+ /**
+ * Resolve input-events/data-in and output-events/data-out tags.
+ *
+ * @param eJob : Job element
+ * @throws CoordinatorJobException
+ */
+ private void resolveIOEvents(Element eJobOrg, HashMap dataNameList) throws CoordinatorJobException {
+ // Resolving input-events/data-in
+ // Clone the job and don't update anything in the original
+ Element eJob = (Element) eJobOrg.clone();
+ Element inputList = eJob.getChild("input-events", eJob.getNamespace());
+ if (inputList != null) {
+ TreeSet eventNameSet = new TreeSet();
+ for (Element dataIn : (List) inputList.getChildren("data-in", eJob.getNamespace())) {
+ String dataInName = dataIn.getAttributeValue("name");
+ dataNameList.put(dataInName, "data-in");
+ // check whether there is any duplicate data-in name
+ if (eventNameSet.contains(dataInName)) {
+ throw new RuntimeException("Duplicate dataIn name " + dataInName);
+ }
+ else {
+ eventNameSet.add(dataInName);
+ }
+ resolveTagContents("instance", dataIn, evalInst);
+ resolveTagContents("start-instance", dataIn, evalInst);
+ resolveTagContents("end-instance", dataIn, evalInst);
+ }
+ }
+ // Resolving output-events/data-out
+ Element outputList = eJob.getChild("output-events", eJob.getNamespace());
+ if (outputList != null) {
+ TreeSet eventNameSet = new TreeSet();
+ for (Element dataOut : (List) outputList.getChildren("data-out", eJob.getNamespace())) {
+ String dataOutName = dataOut.getAttributeValue("name");
+ dataNameList.put(dataOutName, "data-out");
+ // check whether there is any duplicate data-out name
+ if (eventNameSet.contains(dataOutName)) {
+ throw new RuntimeException("Duplicate dataIn name " + dataOutName);
+ }
+ else {
+ eventNameSet.add(dataOutName);
+ }
+ resolveTagContents("instance", dataOut, evalInst);
+ }
+ }
+
+ }
+
+ /**
+ * Add an attribute into XML element.
+ *
+ * @param attrName :attribute name
+ * @param elem : Element to add attribute
+ * @param value :Value of attribute
+ */
+ private void addAnAttribute(String attrName, Element elem, String value) {
+ elem.setAttribute(attrName, value);
+ }
+
+ /**
+ * Resolve Data set using job configuration.
+ *
+ * @param eAppXml : Job Element XML
+ * @throws Exception
+ */
+ private void resolveDataSets(Element eAppXml) throws Exception {
+ Element datasetList = eAppXml.getChild("datasets", eAppXml.getNamespace());
+ if (datasetList != null) {
+
+ List dsElems = datasetList.getChildren("dataset", eAppXml.getNamespace());
+ resolveDataSets(dsElems);
+ resolveTagContents("app-path", eAppXml.getChild("action", eAppXml.getNamespace()).getChild("workflow",
+ eAppXml.getNamespace()), evalNofuncs);
+ }
+ }
+
+ /**
+ * Resolve Data set using job configuration.
+ *
+ * @param dsElems : Data set XML element.
+ * @throws CoordinatorJobException
+ * @throws Exception
+ */
+ private void resolveDataSets(List dsElems) throws CoordinatorJobException /*
+ * throws
+ * Exception
+ */ {
+ for (Element dsElem : dsElems) {
+ // Setting up default TimeUnit and EndOFDuraion
+ evalFreq.setVariable("timeunit", TimeUnit.MINUTE);
+ evalFreq.setVariable("endOfDuration", TimeUnit.NONE);
+
+ String val = resolveAttribute("frequency", dsElem, evalFreq);
+ int ival = ParamChecker.checkInteger(val, "frequency");
+ ParamChecker.checkGTZero(ival, "frequency");
+ addAnAttribute("freq_timeunit", dsElem, evalFreq.getVariable("timeunit") == null ? TimeUnit.MINUTE
+ .toString() : ((TimeUnit) evalFreq.getVariable("timeunit")).toString());
+ addAnAttribute("end_of_duration", dsElem, evalFreq.getVariable("endOfDuration") == null ? TimeUnit.NONE
+ .toString() : ((TimeUnit) evalFreq.getVariable("endOfDuration")).toString());
+ val = resolveAttribute("initial-instance", dsElem, evalNofuncs);
+ ParamChecker.checkUTC(val, "initial-instance");
+ val = resolveAttribute("timezone", dsElem, evalNofuncs);
+ ParamChecker.checkTimeZone(val, "timezone");
+ resolveTagContents("uri-template", dsElem, evalNofuncs);
+ resolveTagContents("done-flag", dsElem, evalNofuncs);
+ }
+ }
+
+ /**
+ * Resolve the content of a tag.
+ *
+ * @param tagName : Tag name of job XML i.e. 10
+ * @param elem : Element where the tag exists.
+ * @param eval :
+ * @return Resolved tag content.
+ * @throws CoordinatorJobException
+ */
+ private String resolveTagContents(String tagName, Element elem, ELEvaluator eval) throws CoordinatorJobException {
+ String ret = "";
+ if (elem != null) {
+ for (Element tagElem : (List) elem.getChildren(tagName, elem.getNamespace())) {
+ if (tagElem != null) {
+ String updated;
+ try {
+ updated = CoordELFunctions.evalAndWrap(eval, tagElem.getText().trim());
+
+ }
+ catch (Exception e) {
+ // e.printStackTrace();
+ throw new CoordinatorJobException(ErrorCode.E1004, e.getMessage(), e);
+ }
+ tagElem.removeContent();
+ tagElem.addContent(updated);
+ ret += updated;
+ }
+ /*
+ * else { //TODO: unlike event }
+ */
+ }
+ }
+ return ret;
+ }
+
+ /**
+ * Resolve an attribute value.
+ *
+ * @param attrName : Attribute name.
+ * @param elem : XML Element where attribute is defiend
+ * @param eval : ELEvaluator used to resolve
+ * @return Resolved attribute value
+ * @throws CoordinatorJobException
+ */
+ private String resolveAttribute(String attrName, Element elem, ELEvaluator eval) throws CoordinatorJobException {
+ Attribute attr = elem.getAttribute(attrName);
+ String val = null;
+ if (attr != null) {
+ try {
+ val = CoordELFunctions.evalAndWrap(eval, attr.getValue().trim());
+
+ }
+ catch (Exception e) {
+ // e.printStackTrace();
+ throw new CoordinatorJobException(ErrorCode.E1004, e.getMessage(), e);
+ }
+ attr.setValue(val);
+ }
+ return val;
+ }
+
+ /**
+ * Include referred Datasets into XML.
+ *
+ * @param resolvedXml : Job XML element.
+ * @param conf : Job configuration
+ * @throws CoordinatorJobException
+ */
+ protected void includeDataSets(Element resolvedXml, Configuration conf) throws CoordinatorJobException
+ /* throws Exception */ {
+ Element datasets = resolvedXml.getChild("datasets", resolvedXml.getNamespace());
+ Element allDataSets = new Element("all_datasets", resolvedXml.getNamespace());
+ List dsList = new ArrayList();
+ if (datasets != null) {
+ for (Element includeElem : (List) datasets.getChildren("include", datasets.getNamespace())) {
+ String incDSFile = includeElem.getTextTrim();
+ // log.warn(" incDSFile " + incDSFile);
+ includeOneDSFile(incDSFile, dsList, allDataSets, datasets.getNamespace());
+ }
+ for (Element e : (List) datasets.getChildren("dataset", datasets.getNamespace())) {
+ String dsName = (String) e.getAttributeValue("name");
+ if (dsList.contains(dsName)) {// Override with this DS
+ // Remove old DS
+ removeDataSet(allDataSets, dsName);
+ // throw new RuntimeException("Duplicate Dataset " +
+ // dsName);
+ }
+ else {
+ dsList.add(dsName);
+ }
+ allDataSets.addContent((Element) e.clone());
+ }
+ }
+ insertDataSet(resolvedXml, allDataSets);
+ resolvedXml.removeChild("datasets", resolvedXml.getNamespace());
+ }
+
+ /**
+ * Include One Dataset file.
+ *
+ * @param incDSFile : Include data set filename.
+ * @param dsList :List of dataset names to verify the duplicate.
+ * @param allDataSets : Element that includes all dataset definitions.
+ * @param dsNameSpace : Data set name space
+ * @throws CoordinatorJobException
+ * @throws Exception
+ */
+ private void includeOneDSFile(String incDSFile, List dsList, Element allDataSets, Namespace dsNameSpace)
+ throws CoordinatorJobException {
+ Element tmpDataSets = null;
+ try {
+ String dsXml = readDefinition(incDSFile, "");
+ log.debug("DSFILE :" + incDSFile + "\n" + dsXml);
+ tmpDataSets = XmlUtils.parseXml(dsXml);
+ }
+ /*
+ * catch (IOException iex) {XLog.getLog(getClass()).warn(
+ * "Error reading included dataset file [{0}]. Message [{1}]",
+ * incDSFile, iex.getMessage()); throw new
+ * CommandException(ErrorCode.E0803, iex.getMessage()); }
+ */
+ catch (JDOMException e) {
+ log.warn("Error parsing included dataset [{0}]. Message [{1}]", incDSFile, e.getMessage());
+ throw new CoordinatorJobException(ErrorCode.E0700, e.getMessage());
+ }
+ resolveDataSets((List) tmpDataSets.getChildren("dataset"));
+ for (Element e : (List) tmpDataSets.getChildren("dataset")) {
+ String dsName = (String) e.getAttributeValue("name");
+ if (dsList.contains(dsName)) {
+ throw new RuntimeException("Duplicate Dataset " + dsName);
+ }
+ dsList.add(dsName);
+ Element tmp = (Element) e.clone();
+ // TODO: Don't like to over-write the external/include DS's
+ // namespace
+ tmp.setNamespace(dsNameSpace);// TODO:
+ tmp.getChild("uri-template").setNamespace(dsNameSpace);
+ if (e.getChild("done-flag") != null) {
+ tmp.getChild("done-flag").setNamespace(dsNameSpace);
+ }
+ allDataSets.addContent(tmp);
+ }
+ // nested include
+ for (Element includeElem : (List) tmpDataSets.getChildren("include", tmpDataSets.getNamespace())) {
+ String incFile = includeElem.getTextTrim();
+ // log.warn("incDSFile "+ incDSFile);
+ includeOneDSFile(incFile, dsList, allDataSets, dsNameSpace);
+ }
+ }
+
+ /**
+ * Remove a dataset from a list of dataset.
+ *
+ * @param eDatasets : List of dataset
+ * @param name : Dataset name to be removed.
+ */
+ private static void removeDataSet(Element eDatasets, String name) {
+ for (Element eDataset : (List) eDatasets.getChildren("dataset", eDatasets.getNamespace())) {
+ if (eDataset.getAttributeValue("name").equals(name)) {
+ eDataset.detach();
+ }
+ }
+ throw new RuntimeException("undefined dataset: " + name);
+ }
+
+ /**
+ * Read workflow definition.
+ *
+ * @param appPath application path.
+ * @param user user name.
+ * @param group group name.
+ * @param autToken authentication token.
+ * @return workflow definition.
+ * @throws WorkflowException thrown if the definition could not be read.
+ */
+ protected String readDefinition(String appPath, String fileName) throws CoordinatorJobException {// TODO:
+ String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
+ String group = ParamChecker.notEmpty(conf.get(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
+ Configuration confHadoop = CoordUtils.getHadoopConf(conf);
+ try {
+ URI uri = new URI(appPath);
+ log.debug("user =" + user + " group =" + group);
+ FileSystem fs = Services.get().get(HadoopAccessorService.class).createFileSystem(user, group, uri,
+ new Configuration());
+ Path p;
+ if (fileName == null || fileName.length() == 0) {
+ p = new Path(uri.getPath());
+ }
+ else {
+ p = new Path(uri.getPath(), fileName);
+ }
+ // Reader reader = new InputStreamReader(fs.open(new Path(uri
+ // .getPath(), fileName)));
+ Reader reader = new InputStreamReader(fs.open(p));// TODO
+ StringWriter writer = new StringWriter();
+ IOUtils.copyCharStream(reader, writer);
+ return writer.toString();
+ }
+ catch (IOException ex) {
+ log.warn("IOException :" + XmlUtils.prettyPrint(confHadoop), ex);
+ throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex); // TODO:
+ }
+ catch (URISyntaxException ex) {
+ log.warn("URISyException :" + ex.getMessage());
+ throw new CoordinatorJobException(ErrorCode.E1002, appPath, ex.getMessage(), ex);// TODO:
+ }
+ catch (Exception ex) {
+ log.warn("Exception :", ex);
+ throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex);// TODO:
+ }
+ }
+
+ /**
+ * Write a Coordinator Job into database
+ *
+ * @param eJob : XML element of job
+ * @param store : Coordinator Store to write.
+ * @param coordJob : Coordinator job bean
+ * @return Job if.
+ * @throws StoreException
+ */
+ private String storeToDB(Element eJob, CoordinatorStore store, CoordinatorJobBean coordJob) throws StoreException {
+ String jobId = Services.get().get(UUIDService.class).generateId(ApplicationType.COORDINATOR);
+ coordJob.setId(jobId);
+ coordJob.setAuthToken(this.authToken);
+ coordJob.setAppName(eJob.getAttributeValue("name"));
+ coordJob.setAppPath(conf.get(OozieClient.COORDINATOR_APP_PATH));
+ coordJob.setStatus(CoordinatorJob.Status.PREP);
+ coordJob.setCreatedTime(new Date()); // TODO: Do we need that?
+ coordJob.setUser(conf.get(OozieClient.USER_NAME));
+ coordJob.setGroup(conf.get(OozieClient.GROUP_NAME));
+ coordJob.setConf(XmlUtils.prettyPrint(conf).toString());
+ coordJob.setJobXml(XmlUtils.prettyPrint(eJob).toString());
+ coordJob.setLastActionNumber(0);
+ coordJob.setLastModifiedTime(new Date());
+
+ if (!dryrun) {
+ store.insertCoordinatorJob(coordJob);
+ }
+ return jobId;
+ }
+
+ /**
+ * For unit-testing only. Will ultimately go away
+ *
+ * @param args
+ * @throws Exception
+ * @throws JDOMException
+ */
+ public static void main(String[] args) throws Exception {
+ // TODO Auto-generated method stub
+ // Configuration conf = new XConfiguration(IOUtils.getResourceAsReader(
+ // "org/apache/oozie/coord/conf.xml", -1));
+
+ Configuration conf = new XConfiguration();
+
+ // base case
+ // conf.set(OozieClient.COORDINATOR_APP_PATH,
+ // "file:///Users/danielwo/oozie/workflows/coord/test1/");
+
+ // no input datasets
+ // conf.set(OozieClient.COORDINATOR_APP_PATH,
+ // "file:///Users/danielwo/oozie/workflows/coord/coord_noinput/");
+ // conf.set(OozieClient.COORDINATOR_APP_PATH,
+ // "file:///Users/danielwo/oozie/workflows/coord/coord_use_apppath/");
+
+ // only 1 instance
+ // conf.set(OozieClient.COORDINATOR_APP_PATH,
+ // "file:///Users/danielwo/oozie/workflows/coord/coord_oneinstance/");
+
+ // no local props in xml
+ // conf.set(OozieClient.COORDINATOR_APP_PATH,
+ // "file:///Users/danielwo/oozie/workflows/coord/coord_noprops/");
+
+ conf.set(OozieClient.COORDINATOR_APP_PATH,
+ "file:///homes/test/workspace/sandbox_krishna/oozie-main/core/src/main/java/org/apache/oozie/coord/");
+ conf.set(OozieClient.USER_NAME, "test");
+ // conf.set(OozieClient.USER_NAME, "danielwo");
+ conf.set(OozieClient.GROUP_NAME, "other");
+ // System.out.println("appXml :"+ appXml + "\n conf :"+ conf);
+ new Services().init();
+ try {
+ CoordSubmitCommand sc = new CoordSubmitCommand(conf, "TESTING");
+ String jobId = sc.call();
+ System.out.println("Job Id " + jobId);
+ Thread.sleep(80000);
+ }
+ finally {
+ Services.get().destroy();
+ }
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordSuspendCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordSuspendCommand.java
new file mode 100644
index 000000000..bb873d619
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordSuspendCommand.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.XException;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+import org.apache.oozie.command.wf.SuspendCommand;
+
+import java.util.Date;
+import java.util.List;
+
+public class CoordSuspendCommand extends CoordinatorCommand {
+
+ private String jobId;
+ private final XLog log = XLog.getLog(getClass());
+
+ public CoordSuspendCommand(String id) {
+ super("coord_suspend", "coord_suspend", 0, XLog.STD);
+ this.jobId = ParamChecker.notEmpty(id, "id");
+ }
+
+ protected Void call(CoordinatorStore store) throws StoreException, CommandException {
+ try {
+ // CoordinatorJobBean coordJob = store.getCoordinatorJob(jobId,
+ // false);
+ CoordinatorJobBean coordJob = store.getEntityManager().find(CoordinatorJobBean.class, jobId);
+ setLogInfo(coordJob);
+ if (coordJob.getStatus() != CoordinatorJob.Status.SUCCEEDED
+ && coordJob.getStatus() != CoordinatorJob.Status.FAILED) {
+ incrJobCounter(1);
+ coordJob.setStatus(CoordinatorJob.Status.SUSPENDED);
+ List actionList = store.getActionsForCoordinatorJob(jobId, false);
+ for (CoordinatorActionBean action : actionList) {
+ if (action.getStatus() == CoordinatorActionBean.Status.RUNNING) {
+ // queue a SuspendCommand
+ if (action.getExternalId() != null) {
+ queueCallable(new SuspendCommand(action.getExternalId()));
+ }
+ }
+ }
+ store.updateCoordinatorJob(coordJob);
+ }
+ // TODO queueCallable(new NotificationCommand(coordJob));
+ else {
+ log.info("CoordSuspendCommand not suspended - " + "job finished or does not exist " + jobId);
+ }
+ return null;
+ }
+ catch (XException ex) {
+ throw new CommandException(ex);
+ }
+ }
+
+ @Override
+ protected Void execute(CoordinatorStore store) throws StoreException, CommandException {
+ log.info("STARTED CoordSuspendCommand for jobId=" + jobId);
+ try {
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new CoordSuspendCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordSuspendCommand lock was not acquired - " + " failed " + jobId + ". Requeing the same.");
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new CoordSuspendCommand(jobId), LOCK_FAILURE_REQUEUE_INTERVAL);
+ log.warn("CoordSuspendCommand lock acquiring failed " + " with exception " + e.getMessage()
+ + " for job id " + jobId + ". Requeing the same.");
+ }
+ finally {
+ log.info("ENDED CoordSuspendCommand for jobId=" + jobId);
+ }
+ return null;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/CoordinatorCommand.java b/core/src/main/java/org/apache/oozie/command/coord/CoordinatorCommand.java
new file mode 100644
index 000000000..7be13ec54
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/CoordinatorCommand.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.command.Command;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.service.DagXLogInfoService;
+import org.apache.oozie.service.XLogService;
+import org.apache.oozie.store.CoordinatorStore;
+import org.apache.oozie.store.Store;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.util.XLog;
+
+public abstract class CoordinatorCommand extends Command {
+
+ public CoordinatorCommand(String name, String type, int priority, int logMask) {
+ super(name, type, priority, logMask);
+ }
+
+ public CoordinatorCommand(String name, String type, int priority, int logMask,
+ boolean dryrun) {
+ super(name, type, priority, logMask, (dryrun) ? false : true, dryrun);
+ }
+
+ /**
+ * Return the public interface of the Coordinator Store.
+ *
+ * @return {@link WorkflowStore}
+ */
+ public Class extends Store> getStoreClass() {
+ return CoordinatorStore.class;
+ }
+}
diff --git a/core/src/main/java/org/apache/oozie/command/coord/SLAEventsCommand.java b/core/src/main/java/org/apache/oozie/command/coord/SLAEventsCommand.java
new file mode 100644
index 000000000..bebd8c0f9
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/command/coord/SLAEventsCommand.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.command.coord;
+
+import java.util.List;
+
+import org.apache.oozie.SLAEventBean;
+import org.apache.oozie.command.Command;
+import org.apache.oozie.command.CommandException;
+import org.apache.oozie.store.SLAStore;
+import org.apache.oozie.store.Store;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+public class SLAEventsCommand extends Command, SLAStore> {
+
+ private long seqId;
+ private int maxNoEvents;
+ private long lastSeqId = -1;
+
+ public SLAEventsCommand(long seqId, int maxNoEvnts) {
+ super("SLAEventsCommand", "SLAEventsCommand", 0, XLog.OPS);
+ this.seqId = seqId;
+ this.maxNoEvents = maxNoEvnts;
+ }
+
+ @Override
+ protected List call(SLAStore store) throws StoreException, CommandException {
+ long lsId[] = new long[1];
+ List slaEvntList = store.getSLAEventListNewerSeqLimited(seqId, maxNoEvents, lsId);
+ store.getEntityManager().clear();
+ setLastSeqId(lsId[0]);
+ return slaEvntList;
+ }
+
+ public void setLastSeqId(long lastSeqId) {
+ this.lastSeqId = lastSeqId;
+ }
+
+ public long getLastSeqId() {
+ return lastSeqId;
+ }
+
+ @Override
+ public Class extends Store> getStoreClass() {
+ // TODO Auto-generated method stub
+ return SLAStore.class;
+ }
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionCheckCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionCheckCommand.java
index b5f717429..4137ce612 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionCheckCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionCheckCommand.java
@@ -17,50 +17,81 @@
*/
package org.apache.oozie.command.wf;
+import java.sql.Timestamp;
import java.util.Date;
+
+import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.client.WorkflowAction.Status;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.command.CommandException;
+import org.apache.oozie.command.coord.CoordActionInputCheckCommand;
+import org.apache.oozie.command.wf.ActionCommand.ActionExecutorContext;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.service.ActionService;
+import org.apache.oozie.service.UUIDService;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.UUIDService;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.workflow.WorkflowException;
+import org.apache.oozie.workflow.WorkflowInstance;
+import org.apache.oozie.workflow.lite.LiteWorkflowInstance;
/**
- * Executes the check command for ActionHandlers.
- *
- * Ensures the action is in RUNNING state before executing
- * {@link ActionExecutor#check(org.apache.oozie.action.ActionExecutor.Context, org.apache.oozie.client.WorkflowAction)}
+ * Executes the check command for ActionHandlers. Ensures the action is in RUNNING state before executing {@link
+ * ActionExecutor#check(org.apache.oozie.action.ActionExecutor.Context, org.apache.oozie.client.WorkflowAction)}
*/
public class ActionCheckCommand extends ActionCommand {
public static final String EXEC_DATA_MISSING = "EXEC_DATA_MISSING";
private String id;
+ private String jobId;
+ private int actionCheckDelay;
+
+ public ActionCheckCommand(String id) {
+ this(id, -1);
+ }
- public ActionCheckCommand(String id, String type) {
- super("action.check", type, -1);
+ public ActionCheckCommand(String id, int priority, int checkDelay) {
+ super("action.check", "action.check", priority);
this.id = id;
+ this.actionCheckDelay = checkDelay;
+ }
+
+ public ActionCheckCommand(String id, int checkDelay) {
+ this(id, -1, checkDelay);
}
@Override
protected Void call(WorkflowStore store) throws StoreException, CommandException {
- String jobId = Services.get().get(UUIDService.class).getId(id);
- WorkflowJobBean workflow = store.getWorkflow(jobId, true);
+
+ // String jobId = Services.get().get(UUIDService.class).getId(id);
+ WorkflowJobBean workflow = store.getWorkflow(jobId, false);
setLogInfo(workflow);
- WorkflowActionBean action = store.getAction(id, true);
+ WorkflowActionBean action = store.getAction(id, false);
setLogInfo(action);
if (action.isPending() && action.getStatus() == WorkflowActionBean.Status.RUNNING) {
+ setLogInfo(workflow);
+ // if the action has been updated, quit this command
+ if (actionCheckDelay > 0) {
+ Timestamp actionCheckTs = new Timestamp(System.currentTimeMillis() - actionCheckDelay * 1000);
+ Timestamp actionLmt = action.getLastCheckTimestamp();
+ if (actionLmt.after(actionCheckTs)) {
+ XLog.getLog(getClass()).debug(
+ "The wf action :" + id + " has been udated recently. Ignoring ActionCheckCommand!");
+ return null;
+ }
+ }
if (workflow.getStatus() == WorkflowJob.Status.RUNNING) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
if (executor != null) {
+ ActionExecutorContext context = null;
try {
boolean isRetry = false;
- ActionExecutorContext context = new ActionCommand.ActionExecutorContext(workflow, action, isRetry);
+ context = new ActionCommand.ActionExecutorContext(workflow, action, isRetry);
incrActionCounter(action.getType(), 1);
Instrumentation.Cron cron = new Instrumentation.Cron();
@@ -72,11 +103,12 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
if (action.isExecutionComplete()) {
if (!context.isExecuted()) {
XLog.getLog(getClass()).warn(XLog.OPS,
- "Action Completed, ActionExecutor [{0}] must call setExecutionData()",
- executor.getType());
+ "Action Completed, ActionExecutor [{0}] must call setExecutionData()",
+ executor.getType());
action.setErrorInfo(EXEC_DATA_MISSING,
- "Execution Complete, but Execution Data Missing from Action");
+ "Execution Complete, but Execution Data Missing from Action");
failJob(context);
+ action.setLastCheckTime(new Date());
store.updateAction(action);
store.updateWorkflow(workflow);
return null;
@@ -92,6 +124,15 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
XLog.getLog(getClass()).warn(
"Exception while executing check(). Error Code [{0}], Message[{1}]", ex.getErrorCode(),
ex.getMessage(), ex);
+
+ switch (ex.getErrorType()) {
+ case FAILED:
+ failAction(workflow, action);
+ break;
+ }
+ action.setLastCheckTime(new Date());
+ store.updateAction(action);
+ store.updateWorkflow(workflow);
return null;
}
}
@@ -99,4 +140,50 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
}
return null;
}
+
+ private void failAction(WorkflowJobBean workflow, WorkflowActionBean action) throws CommandException {
+ XLog.getLog(getClass()).warn("Failing Job [{0}] due to failed action [{1}]", workflow.getId(), action.getId());
+ action.resetPending();
+ action.setStatus(Status.FAILED);
+ workflow.setStatus(WorkflowJob.Status.FAILED);
+ incrJobCounter(INSTR_FAILED_JOBS_COUNTER, 1);
+ }
+
+ /**
+ * @param args
+ * @throws Exception
+ */
+ public static void main(String[] args) throws Exception {
+ new Services().init();
+
+ try {
+ new ActionCheckCommand("0000001-100122154231282-oozie-dani-W@pig1").call();
+ Thread.sleep(100000);
+ }
+ finally {
+ new Services().destroy();
+ }
+ }
+
+ @Override
+ protected Void execute(WorkflowStore store) throws CommandException, StoreException {
+ try {
+ XLog.getLog(getClass()).debug("STARTED ActionCheckCommand for wf actionId=" + id + " priority =" + getPriority());
+ jobId = Services.get().get(UUIDService.class).getId(id);
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new ActionCheckCommand(id, actionCheckDelay), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionCheckCommand lock was not acquired - failed {0}", id);
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new ActionCheckCommand(id, actionCheckDelay), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionCheckCommand lock was not acquired - interrupted exception failed {0}",
+ id);
+ }
+ XLog.getLog(getClass()).debug("ENDED ActionCheckCommand for wf actionId=" + id + ", jobId=" + jobId);
+ return null;
+ }
}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionCommand.java
index 4d2e648e6..f7c865d70 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionCommand.java
@@ -26,6 +26,8 @@
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.DagELFunctions;
import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.store.Store;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.Instrumentation;
import org.apache.oozie.util.XConfiguration;
@@ -36,6 +38,7 @@
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.workflow.WorkflowException;
+import org.apache.oozie.workflow.lite.LiteWorkflowInstance;
import org.apache.oozie.service.ELService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.HadoopAccessorService;
@@ -48,10 +51,10 @@
import java.util.Properties;
/**
- * Base class for Action execution commands. Provides common functionality to
- * handle different types of errors while attempting to start or end an action.
+ * Base class for Action execution commands. Provides common functionality to handle different types of errors while
+ * attempting to start or end an action.
*/
-public abstract class ActionCommand extends Command {
+public abstract class ActionCommand extends WorkflowCommand {
private static final String INSTRUMENTATION_GROUP = "action.executors";
protected static final String INSTR_FAILED_JOBS_COUNTER = "failed";
@@ -63,16 +66,14 @@ public ActionCommand(String name, String type, int priority) {
}
/**
- * Takes care of Transient failures. Sets the action status to retry and
- * increments the retry count if not enough attempts have been made.
- * Otherwise returns false.
- *
+ * Takes care of Transient failures. Sets the action status to retry and increments the retry count if not enough
+ * attempts have been made. Otherwise returns false.
+ *
* @param context the execution context.
* @param executor the executor instance being used.
* @param status the status to be set for the action.
- * @return true if the action is scheduled for another retry. false if the
- * number of retries has exceeded the maximum number of configured
- * retries.
+ * @return true if the action is scheduled for another retry. false if the number of retries has exceeded the
+ * maximum number of configured retries.
* @throws StoreException
* @throws org.apache.oozie.command.CommandException
*/
@@ -84,7 +85,6 @@ protected boolean handleTransient(ActionExecutor.Context context, ActionExecutor
incrActionErrorCounter(action.getType(), "transient", 1);
int actionRetryCount = action.getRetries();
-
if (actionRetryCount >= executor.getMaxRetries()) {
XLog.getLog(getClass()).warn("Exceeded max retry count [{0}]. Suspending Job", executor.getMaxRetries());
return false;
@@ -96,16 +96,15 @@ protected boolean handleTransient(ActionExecutor.Context context, ActionExecutor
long retryDelayMillis = executor.getRetryInterval() * 1000;
action.setPendingAge(new Date(System.currentTimeMillis() + retryDelayMillis));
XLog.getLog(getClass()).info("Next Retry, Attempt Number [{0}] in [{1}] milliseconds",
- actionRetryCount + 1, retryDelayMillis);
+ actionRetryCount + 1, retryDelayMillis);
queueCallable(this, retryDelayMillis);
return true;
}
}
/**
- * Takes care of non transient failures. The job is suspended, and the state
- * of the action is changed to *MANUAL
- *
+ * Takes care of non transient failures. The job is suspended, and the state of the action is changed to *MANUAL
+ *
* @param context the execution context.
* @param executor the executor instance being used.
* @param status the status to be set for the action.
@@ -130,24 +129,19 @@ protected void handleNonTransient(ActionExecutor.Context context, ActionExecutor
}
/**
- * Takes care of errors.
- *
- * For errors while attempting to start the action, the job state is updated
- * and an {@link ActionEndCommand} is queued.
+ * Takes care of errors. For errors while attempting to start the action, the job state is updated and an
+ * {@link ActionEndCommand} is queued. For errors while attempting to end the action, the job state is updated.
*
- * For errors while attempting to end the action, the job state is updated.
- *
- *
+ *
* @param context the execution context.
* @param executor the executor instance being used.
* @param message
- * @param isStart whether the error was generated while starting or ending
- * an action.
+ * @param isStart whether the error was generated while starting or ending an action.
* @param status the status to be set for the action.
* @throws org.apache.oozie.command.CommandException
*/
protected void handleError(ActionExecutor.Context context, ActionExecutor executor, String message,
- boolean isStart, WorkflowAction.Status status) throws CommandException {
+ boolean isStart, WorkflowAction.Status status) throws CommandException {
XLog.getLog(getClass()).warn("Setting Action Status to [{0}]", status);
ActionExecutorContext aContext = (ActionExecutorContext) context;
WorkflowActionBean action = (WorkflowActionBean) aContext.getAction();
@@ -170,12 +164,17 @@ public void failJob(ActionExecutor.Context context) throws CommandException {
XLog.getLog(getClass()).warn("Failing Job due to failed action [{0}]", action.getName());
try {
workflow.getWorkflowInstance().fail(action.getName());
+ WorkflowInstance wfInstance = workflow.getWorkflowInstance();
+ ((LiteWorkflowInstance) wfInstance).setStatus(WorkflowInstance.Status.FAILED);
+ workflow.setWorkflowInstance(wfInstance);
workflow.setStatus(WorkflowJob.Status.FAILED);
action.setStatus(WorkflowAction.Status.FAILED);
+ action.resetPending();
queueCallable(new NotificationCommand(workflow, action));
queueCallable(new KillCommand(workflow.getId()));
incrJobCounter(INSTR_FAILED_JOBS_COUNTER, 1);
- } catch (WorkflowException ex) {
+ }
+ catch (WorkflowException ex) {
throw new CommandException(ex);
}
}
@@ -230,14 +229,17 @@ public WorkflowAction getAction() {
}
public ELEvaluator getELEvaluator() {
- ELEvaluator evaluator = Services.get().get(ELService.class).createEvaluator();
+ ELEvaluator evaluator = Services.get().get(ELService.class).createEvaluator("workflow");
DagELFunctions.configureEvaluator(evaluator, workflow, action);
return evaluator;
}
public void setVar(String name, String value) {
name = action.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + name;
- workflow.getWorkflowInstance().setVar(name, value);
+ WorkflowInstance wfInstance = workflow.getWorkflowInstance();
+ wfInstance.setVar(name, value);
+ //workflow.getWorkflowInstance().setVar(name, value);
+ workflow.setWorkflowInstance(wfInstance);
}
public String getVar(String name) {
@@ -301,7 +303,7 @@ public String getRecoveryId() {
return action.getId() + RECOVERY_ID_SEPARATOR + workflow.getRun();
}
- public Path getActionDir() throws URISyntaxException, IOException{
+ public Path getActionDir() throws URISyntaxException, IOException {
String name = getWorkflow().getId() + "/" + action.getName() + "--" + action.getType();
FileSystem fs = getAppFileSystem();
String actionDirPath = Services.get().getSystemId() + "/" + name;
@@ -309,7 +311,7 @@ public Path getActionDir() throws URISyntaxException, IOException{
return fqActionDir;
}
- public FileSystem getAppFileSystem() throws IOException, URISyntaxException{
+ public FileSystem getAppFileSystem() throws IOException, URISyntaxException {
WorkflowJob workflow = getWorkflow();
XConfiguration jobConf = new XConfiguration(new StringReader(workflow.getConf()));
Configuration fsConf = new Configuration();
@@ -318,5 +320,11 @@ public FileSystem getAppFileSystem() throws IOException, URISyntaxException{
createFileSystem(workflow.getUser(), workflow.getGroup(), new URI(getWorkflow().getAppPath()),
fsConf);
}
+
+ @Override
+ public void setErrorInfo(String str, String exMsg) {
+ action.setErrorInfo(str, exMsg);
+ }
}
-}
\ No newline at end of file
+
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionEndCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionEndCommand.java
index bff012da7..ee83ff49c 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionEndCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionEndCommand.java
@@ -24,6 +24,7 @@
import org.apache.oozie.ErrorCode;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.service.ActionService;
+import org.apache.oozie.service.UUIDService;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.store.StoreException;
@@ -31,10 +32,13 @@
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.UUIDService;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.db.SLADbOperations;
+import org.apache.oozie.workflow.WorkflowInstance;
import java.util.Date;
@@ -43,6 +47,7 @@ public class ActionEndCommand extends ActionCommand {
public static final String END_DATA_MISSING = "END_DATA_MISSING";
private String id;
+ private String jobId = null;
public ActionEndCommand(String id, String type) {
super("action.end", type, 0);
@@ -50,14 +55,13 @@ public ActionEndCommand(String id, String type) {
}
protected Void call(WorkflowStore store) throws StoreException, CommandException {
- String jobId = Services.get().get(UUIDService.class).getId(id);
- WorkflowJobBean workflow = store.getWorkflow(jobId, true);
+ WorkflowJobBean workflow = store.getWorkflow(jobId, false);
setLogInfo(workflow);
- WorkflowActionBean action = store.getAction(id, true);
+ WorkflowActionBean action = store.getAction(id, false);
setLogInfo(action);
if (action.isPending()
- && (action.getStatus() == WorkflowActionBean.Status.DONE || action.getStatus() == WorkflowActionBean.Status.END_RETRY || action
- .getStatus() == WorkflowActionBean.Status.END_MANUAL)) {
+ && (action.getStatus() == WorkflowActionBean.Status.DONE
+ || action.getStatus() == WorkflowActionBean.Status.END_RETRY || action.getStatus() == WorkflowActionBean.Status.END_MANUAL)) {
if (workflow.getStatus() == WorkflowJob.Status.RUNNING) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
@@ -80,8 +84,9 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
"End, name [{0}] type [{1}] status[{2}] external status [{3}] signal value [{4}]",
action.getName(), action.getType(), action.getStatus(), action.getExternalStatus(),
action.getSignalValue());
-
- DagELFunctions.setActionInfo(workflow.getWorkflowInstance(), action);
+ WorkflowInstance wfInstance = workflow.getWorkflowInstance();
+ DagELFunctions.setActionInfo(wfInstance, action);
+ workflow.setWorkflowInstance(wfInstance);
incrActionCounter(action.getType(), 1);
Instrumentation.Cron cron = new Instrumentation.Cron();
@@ -92,7 +97,7 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
if (!context.isEnded()) {
XLog.getLog(getClass()).warn(XLog.OPS,
- "Action Ended, ActionExecutor [{0}] must call setEndData()", executor.getType());
+ "Action Ended, ActionExecutor [{0}] must call setEndData()", executor.getType());
action.setErrorInfo(END_DATA_MISSING, "Execution Ended, but End Data Missing from Action");
failJob(context);
store.updateAction(action);
@@ -103,7 +108,32 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
action.setEndTime(new Date());
store.updateAction(action);
store.updateWorkflow(workflow);
+ Status slaStatus = null;
+ switch (action.getStatus()) {
+ case OK:
+ slaStatus = Status.SUCCEEDED;
+ break;
+ case KILLED:
+ slaStatus = Status.KILLED;
+ break;
+ case FAILED:
+ slaStatus = Status.FAILED;
+ break;
+ case ERROR:
+ XLog.getLog(getClass()).info("ERROR is considered as FAILED for SLA");
+ slaStatus = Status.KILLED;
+ break;
+ default: // TODO: What will happen for other Action
+ // status
+ slaStatus = Status.FAILED;
+ break;
+ }
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, slaStatus,
+ SlaAppType.WORKFLOW_ACTION);
queueCallable(new NotificationCommand(workflow, action));
+ XLog.getLog(getClass()).debug(
+ "Queuing commands for action " + id + " status " + action.getStatus()
+ + ", Set pending=" + action.getPending());
queueCallable(new SignalCommand(workflow.getId(), id));
}
catch (ActionExecutorException ex) {
@@ -140,11 +170,39 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
else {
throw new CommandException(ErrorCode.E0802, action.getType());
}
- } else {
- XLog.getLog(getClass()).warn("Job state is not {0}. Skipping Action Execution",
- WorkflowJob.Status.RUNNING.toString());
}
+ else {
+ XLog.getLog(getClass()).warn("Job state is not {0}. Skipping ActionEnd Execution",
+ WorkflowJob.Status.RUNNING.toString());
+ }
+ }
+ else {
+ XLog.getLog(getClass()).debug("Action pending={0}, status={1}. Skipping ActionEnd Execution",
+ action.getPending(), action.getStatusStr());
+ }
+ return null;
+ }
+
+ @Override
+ protected Void execute(WorkflowStore store) throws CommandException, StoreException {
+ XLog.getLog(getClass()).debug("STARTED ActionEndCommand for action " + id);
+ try {
+ jobId = Services.get().get(UUIDService.class).getId(id);
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new ActionEndCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionEnd lock was not acquired - failed {0}", id);
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new ActionEndCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionEnd lock was not acquired - interrupted exception failed {0}", id);
+ }
+ finally {
+ XLog.getLog(getClass()).debug("ENDED ActionEndCommand for action " + id);
}
return null;
}
-}
\ No newline at end of file
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionKillCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionKillCommand.java
index 28e7c684d..f30897c9d 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionKillCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionKillCommand.java
@@ -19,19 +19,23 @@
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.service.ActionService;
+import org.apache.oozie.service.UUIDService;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.UUIDService;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.db.SLADbOperations;
public class ActionKillCommand extends ActionCommand {
private String id;
+ private String jobId;
public ActionKillCommand(String id, String type) {
super("action.kill", type, 0);
@@ -39,13 +43,12 @@ public ActionKillCommand(String id, String type) {
}
protected Void call(WorkflowStore store) throws StoreException, CommandException {
- String jobId = Services.get().get(UUIDService.class).getId(id);
- WorkflowJobBean workflow = store.getWorkflow(jobId, true);
+ // String jobId = Services.get().get(UUIDService.class).getId(id);
+ WorkflowJobBean workflow = store.getWorkflow(jobId, false);
setLogInfo(workflow);
- WorkflowActionBean action = store.getAction(id, true);
+ WorkflowActionBean action = store.getAction(id, false);
setLogInfo(action);
- if (action.isPending()
- && (action.getStatus() == WorkflowActionBean.Status.KILLED)) {
+ if (action.isPending() && (action.getStatus() == WorkflowActionBean.Status.KILLED)) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
if (executor != null) {
try {
@@ -59,19 +62,55 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
cron.stop();
addActionCron(action.getType(), cron);
- action.setStatus(WorkflowActionBean.Status.KILLED);
action.resetPending();
+ action.setStatus(WorkflowActionBean.Status.KILLED);
+
store.updateAction(action);
store.updateWorkflow(workflow);
+ // Add SLA status event (KILLED) for WF_ACTION
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.KILLED,
+ SlaAppType.WORKFLOW_ACTION);
queueCallable(new NotificationCommand(workflow, action));
}
catch (ActionExecutorException ex) {
- XLog.getLog(getClass()).warn(
- "Exception while executing kill(). Error Code [{0}], Message[{1}]", ex.getErrorCode(),
- ex.getMessage(), ex);
+ action.resetPending();
+ action.setStatus(WorkflowActionBean.Status.FAILED);
+ action.setErrorInfo(ex.getErrorCode().toString(),
+ "KILL COMMAND FAILED - exception while executing job kill");
+ workflow.setStatus(WorkflowJobBean.Status.KILLED);
+ store.updateAction(action);
+ store.updateWorkflow(workflow);
+ // What will happen to WF and COORD_ACTION, NOTIFICATION?
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.FAILED,
+ SlaAppType.WORKFLOW_ACTION);
+ XLog.getLog(getClass()).warn("Exception while executing kill(). Error Code [{0}], Message[{1}]",
+ ex.getErrorCode(), ex.getMessage(), ex);
}
}
}
return null;
}
+
+ @Override
+ protected Void execute(WorkflowStore store) throws CommandException, StoreException {
+ XLog.getLog(getClass()).debug("STARTED ActionKillCommand for action " + id);
+ try {
+ jobId = Services.get().get(UUIDService.class).getId(id);
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new ActionKillCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionKill lock was not acquired - failed {0}", id);
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new ActionKillCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionKill lock was not acquired - interrupted exception failed {0}", id);
+ }
+ finally {
+ XLog.getLog(getClass()).debug("ENDED ActionKillCommand for action " + id);
+ }
+ return null;
+ }
}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionStartCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionStartCommand.java
index 46748dc43..801031e37 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionStartCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionStartCommand.java
@@ -25,14 +25,19 @@
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.SLAEvent.SlaAppType;
+import org.apache.oozie.client.SLAEvent.Status;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.FaultInjection;
+import org.apache.oozie.XException;
import org.apache.oozie.command.CommandException;
+import org.apache.oozie.command.coord.CoordActionUpdateCommand;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.service.ActionService;
+import org.apache.oozie.service.UUIDService;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.service.Services;
@@ -40,6 +45,8 @@
import org.apache.oozie.util.ELEvaluationException;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.XmlUtils;
+import org.apache.oozie.util.db.SLADbOperations;
import org.apache.oozie.util.XConfiguration;
import javax.servlet.jsp.el.ELException;
@@ -52,6 +59,7 @@ public class ActionStartCommand extends ActionCommand {
public static final String EXEC_DATA_MISSING = "EXEC_DATA_MISSING";
private String id;
+ private String jobId;
public ActionStartCommand(String id, String type) {
super("action.start", type, 0);
@@ -59,18 +67,20 @@ public ActionStartCommand(String id, String type) {
}
protected Void call(WorkflowStore store) throws StoreException, CommandException {
- String jobId = Services.get().get(UUIDService.class).getId(id);
- WorkflowJobBean workflow = store.getWorkflow(jobId, true);
+ WorkflowJobBean workflow = store.getWorkflow(jobId, false);
setLogInfo(workflow);
- WorkflowActionBean action = store.getAction(id, true);
+ WorkflowActionBean action = store.getAction(id, false);
+ XLog.getLog(getClass()).warn(XLog.STD,
+ "[***" + action.getId() + "***]" + "In call()....status=" + action.getStatusStr());
setLogInfo(action);
- if (action.isPending() && (action.getStatus() == WorkflowActionBean.Status.PREP ||
- action.getStatus() == WorkflowActionBean.Status.START_RETRY ||
- action.getStatus() == WorkflowActionBean.Status.START_MANUAL)) {
+ if (action.isPending()
+ && (action.getStatus() == WorkflowActionBean.Status.PREP
+ || action.getStatus() == WorkflowActionBean.Status.START_RETRY || action.getStatus() == WorkflowActionBean.Status.START_MANUAL)) {
if (workflow.getStatus() == WorkflowJob.Status.RUNNING) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
Configuration conf = workflow.getWorkflowInstance().getConf();
+
int maxRetries = conf.getInt(OozieClient.ACTION_MAX_RETRIES, executor.getMaxRetries());
long retryInterval = conf.getLong(OozieClient.ACTION_RETRY_INTERVAL, executor.getRetryInterval());
executor.setMaxRetries(maxRetries);
@@ -86,19 +96,35 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
}
context = new ActionCommand.ActionExecutorContext(workflow, action, isRetry);
try {
- String actionConf = context.getELEvaluator().evaluate(action.getConf(), String.class);
+ String tmpActionConf = XmlUtils.removeComments(action.getConf());
+ String actionConf = context.getELEvaluator().evaluate(tmpActionConf, String.class);
action.setConf(actionConf);
XLog.getLog(getClass()).debug("Start, name [{0}] type [{1}] configuration{E}{E}{2}{E}",
- action.getName(), action.getType(), actionConf);
+ action.getName(), action.getType(), actionConf);
+
}
catch (ELEvaluationException ex) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT,
EL_EVAL_ERROR, ex.getMessage(), ex);
}
catch (ELException ex) {
- throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, EL_ERROR,
- ex.getMessage(), ex);
+ context.setErrorInfo(EL_ERROR, ex.getMessage());
+ XLog.getLog(getClass()).warn("ELException in ActionStartCommand ", ex.getMessage(), ex);
+ handleError(context, store, workflow, action);
+ return null;
+ }
+ catch (org.jdom.JDOMException je) {
+ context.setErrorInfo("ParsingError", je.getMessage());
+ XLog.getLog(getClass()).warn("JDOMException in ActionStartCommand ", je.getMessage(), je);
+ handleError(context, store, workflow, action);
+ return null;
+ }
+ catch (Exception ex) {
+ context.setErrorInfo(EL_ERROR, ex.getMessage());
+ XLog.getLog(getClass()).warn("Exception in ActionStartCommand ", ex.getMessage(), ex);
+ handleError(context, store, workflow, action);
+ return null;
}
action.setErrorInfo(null, null);
incrActionCounter(action.getType(), 1);
@@ -114,8 +140,8 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
if (action.isExecutionComplete()) {
if (!context.isExecuted()) {
XLog.getLog(getClass()).warn(XLog.OPS,
- "Action Completed, ActionExecutor [{0}] must call setExecutionData()",
- executor.getType());
+ "Action Completed, ActionExecutor [{0}] must call setExecutionData()",
+ executor.getType());
action.setErrorInfo(EXEC_DATA_MISSING,
"Execution Complete, but Execution Data Missing from Action");
failJob(context);
@@ -129,7 +155,8 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
else {
if (!context.isStarted()) {
XLog.getLog(getClass()).warn(XLog.OPS,
- "Action Started, ActionExecutor [{0}] must call setStartData()", executor.getType());
+ "Action Started, ActionExecutor [{0}] must call setStartData()",
+ executor.getType());
action.setErrorInfo(START_DATA_MISSING,
"Execution Started, but Start Data Missing from Action");
failJob(context);
@@ -139,8 +166,20 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
}
queueCallable(new NotificationCommand(workflow, action));
}
+
+ XLog.getLog(getClass()).warn(XLog.STD,
+ "[***" + action.getId() + "***]" + "Action status=" + action.getStatusStr());
+
store.updateAction(action);
store.updateWorkflow(workflow);
+ // Add SLA status event (STARTED) for WF_ACTION
+ // SLADbOperations.writeSlaStatusEvent(eSla,
+ // action.getId(), Status.STARTED, store);
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.STARTED,
+ SlaAppType.WORKFLOW_ACTION);
+ XLog.getLog(getClass()).warn(XLog.STD,
+ "[***" + action.getId() + "***]" + "Action updated in DB!");
+
}
catch (ActionExecutorException ex) {
XLog.getLog(getClass()).warn(
@@ -164,7 +203,17 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
WorkflowAction.Status.DONE);
break;
case FAILED:
- failJob(context);
+ try {
+ failJob(context);
+ queueCallable(new CoordActionUpdateCommand(workflow));
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store,
+ Status.FAILED, SlaAppType.WORKFLOW_ACTION);
+ SLADbOperations.writeStausEvent(workflow.getSlaXml(), workflow.getId(), store,
+ Status.FAILED, SlaAppType.WORKFLOW_JOB);
+ }
+ catch (XException x) {
+ XLog.getLog(getClass()).warn("ActionStartCommand - case:FAILED ", x.getMessage());
+ }
break;
}
store.updateAction(action);
@@ -178,9 +227,45 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
}
else {
XLog.getLog(getClass()).warn("Job state is not {0}. Skipping Action Execution",
- WorkflowJob.Status.RUNNING.toString());
+ WorkflowJob.Status.RUNNING.toString());
}
}
return null;
}
+
+ private void handleError(ActionExecutorContext context, WorkflowStore store, WorkflowJobBean workflow,
+ WorkflowActionBean action) throws CommandException, StoreException {
+ failJob(context);
+ store.updateAction(action);
+ store.updateWorkflow(workflow);
+ SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.FAILED,
+ SlaAppType.WORKFLOW_ACTION);
+ SLADbOperations.writeStausEvent(workflow.getSlaXml(), workflow.getId(), store, Status.FAILED,
+ SlaAppType.WORKFLOW_JOB);
+ queueCallable(new CoordActionUpdateCommand(workflow));
+ return;
+ }
+
+ @Override
+ protected Void execute(WorkflowStore store) throws CommandException, StoreException {
+ try {
+ XLog.getLog(getClass()).debug("STARTED ActionStartCommand for wf actionId=" + id);
+ jobId = Services.get().get(UUIDService.class).getId(id);
+ if (lock(jobId)) {
+ call(store);
+ }
+ else {
+ queueCallable(new ActionStartCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionStartCommand lock was not acquired - failed {0}", id);
+ }
+ }
+ catch (InterruptedException e) {
+ queueCallable(new ActionStartCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
+ XLog.getLog(getClass()).warn("ActionStartCommand lock was not acquired - interrupted exception failed {0}",
+ id);
+ }
+ XLog.getLog(getClass()).debug("ENDED ActionStartCommand for wf actionId=" + id + ", jobId=" + jobId);
+ return null;
+ }
+
}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/CompletedActionCommand.java b/core/src/main/java/org/apache/oozie/command/wf/CompletedActionCommand.java
index 3eaf03f24..521cbd4ad 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/CompletedActionCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/CompletedActionCommand.java
@@ -25,34 +25,39 @@
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.store.Store;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XLog;
import org.apache.oozie.service.Services;
import java.util.Properties;
-public class CompletedActionCommand extends Command {
+public class CompletedActionCommand extends WorkflowCommand {
private String actionId;
private String externalStatus;
private Properties actionData;
-
- public CompletedActionCommand(String actionId, String externalStatus, Properties actionData) {
- super("callback", "callback", 0, XLog.STD);
+ public CompletedActionCommand(String actionId, String externalStatus, Properties actionData, int priority) {
+ super("callback", "callback", priority, XLog.STD);
this.actionId = ParamChecker.notEmpty(actionId, "actionId");
this.externalStatus = ParamChecker.notEmpty(externalStatus, "externalStatus");
this.actionData = actionData;
}
+ public CompletedActionCommand(String actionId, String externalStatus, Properties actionData) {
+ this(actionId, externalStatus, actionData, 0);
+ }
+
@Override
protected Void call(WorkflowStore store) throws StoreException, CommandException {
WorkflowActionBean action = store.getAction(actionId, false);
setLogInfo(action);
if (action.getStatus() == WorkflowActionBean.Status.RUNNING) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
- //this is done because oozie notifications (of sub-wfs) is send every status change, not only on completion.
+ // this is done because oozie notifications (of sub-wfs) is send
+ // every status change, not only on completion.
if (executor.isCompleted(externalStatus)) {
- queueCallable(new ActionCheckCommand(action.getId(), action.getType()));
+ queueCallable(new ActionCheckCommand(action.getId(), getPriority(), -1));
}
}
else {
@@ -61,4 +66,4 @@ protected Void call(WorkflowStore store) throws StoreException, CommandException
return null;
}
-}
\ No newline at end of file
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/DefinitionCommand.java b/core/src/main/java/org/apache/oozie/command/wf/DefinitionCommand.java
index 42b147c13..5552041d9 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/DefinitionCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/DefinitionCommand.java
@@ -21,10 +21,11 @@
import org.apache.oozie.command.Command;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.store.Store;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XLog;
-public class DefinitionCommand extends Command {
+public class DefinitionCommand extends WorkflowCommand {
private String id;
public DefinitionCommand(String id) {
@@ -39,4 +40,4 @@ protected String call(WorkflowStore store) throws StoreException {
return workflow.getWorkflowInstance().getApp().getDefinition();
}
-}
\ No newline at end of file
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ExternalIdCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ExternalIdCommand.java
index 89cc73a08..8e971d365 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ExternalIdCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ExternalIdCommand.java
@@ -19,12 +19,13 @@
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
+import org.apache.oozie.store.Store;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XLog;
import org.apache.oozie.command.Command;
import org.apache.oozie.command.CommandException;
-public class ExternalIdCommand extends Command {
+public class ExternalIdCommand extends WorkflowCommand {
private String id;
public ExternalIdCommand(String id) {
@@ -37,4 +38,4 @@ protected String call(WorkflowStore store) throws StoreException, CommandExcepti
return store.getWorkflowIdForExternalId(id);
}
-}
\ No newline at end of file
+}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/JobCommand.java b/core/src/main/java/org/apache/oozie/command/wf/JobCommand.java
index 257b9a304..f13e14df7 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/JobCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/JobCommand.java
@@ -18,29 +18,43 @@
package org.apache.oozie.command.wf;
import org.apache.oozie.WorkflowJobBean;
-import org.apache.oozie.command.Command;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XLog;
import org.apache.oozie.service.Services;
-import java.util.List;
-
-public class JobCommand extends Command {
+/**
+ * Command for loading a job information
+ */
+public class JobCommand extends WorkflowCommand {
private String id;
+ private int start = 1;
+ private int len = Integer.MAX_VALUE;
+ /**
+ * @param id wf jobId
+ */
public JobCommand(String id) {
- super("job.info", "job.info", 0, XLog.OPS);
+ this(id, 1, Integer.MAX_VALUE);
+ }
+
+ /**
+ * @param id wf jobId
+ * @param start starting index in the list of actions belonging to the job
+ * @param length number of actions to be returned
+ */
+ public JobCommand(String id, int start, int length) {
+ super("job.info", "job.info", 0, XLog.OPS, true);
this.id = ParamChecker.notEmpty(id, "id");
+ this.start = start;
+ this.len = length;
}
@Override
- @SuppressWarnings("unchecked")
protected WorkflowJobBean call(WorkflowStore store) throws StoreException {
- WorkflowJobBean workflow = store.getWorkflowInfo(id);
+ WorkflowJobBean workflow = store.getWorkflowInfoWithActionsSubset(id, start, len);
workflow.setConsoleUrl(getJobConsoleUrl(id));
- workflow.setActions((List)store.getActionsForWorkflow(id, false));
return workflow;
}
diff --git a/core/src/main/java/org/apache/oozie/command/wf/JobsCommand.java b/core/src/main/java/org/apache/oozie/command/wf/JobsCommand.java
index 829ea0b5f..801e17e7f 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/JobsCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/JobsCommand.java
@@ -22,7 +22,6 @@
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.WorkflowsInfo;
-import org.apache.oozie.command.Command;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.util.XLog;
@@ -30,33 +29,33 @@
/**
* Command for loading the Workflows according to the given filter information
*/
-public class JobsCommand extends Command {
- private Map> filter;
- private int start;
- private int len;
+public class JobsCommand extends WorkflowCommand {
+ private Map> filter;
+ private int start;
+ private int len;
- /**
- * Constructor taking the filter information
- *
- * @param filter Can be name, status, user, group and combination of these
- * @param start starting from this index in the list of workflows matching the filter are returned
- * @param length number of workflows to be returned from the list of workflows matching the filter
- * and starting from index "start".
- */
- public JobsCommand(Map