diff --git a/ambari-infra/.gitignore b/ambari-infra/.gitignore
deleted file mode 100644
index a7d91c4d714..00000000000
--- a/ambari-infra/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-target
-.settings
-.classpath
-.project
-/bin/
-job-repository.db
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml
deleted file mode 100644
index 5badd26e465..00000000000
--- a/ambari-infra/ambari-infra-assembly/pom.xml
+++ /dev/null
@@ -1,429 +0,0 @@
-
-
-
-
- ambari-infra
- org.apache.ambari
- 2.0.0.0-SNAPSHOT
-
- 4.0.0
- ambari-infra-assembly
- Ambari Infra Assembly
- http://maven.apache.org
-
-
- /usr/lib
-
- http://public-repo-1.hortonworks.com/ARTIFACTS/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz
- ${mapping.base.path}/ambari-infra-solr
- ambari-infra-solr
- ambari-infra-solr-client
- ${mapping.base.path}/${solr.client.package.name}
- ${project.basedir}/../ambari-infra-solr-client
- ${project.basedir}/../ambari-infra-solr-plugin
- ambari-infra-manager
- ${project.basedir}/../ambari-infra-manager
- ${mapping.base.path}/${infra-manager.package.name}
- ${infra-manager.mapping.path}/conf
- http://central.maven.org/maven2/commons-fileupload/commons-fileupload/1.3.3/commons-fileupload-1.3.3.jar
- commons-fileupload-1.3.3.jar
- commons-fileupload-1.3.2.jar
-
-
-
-
-
- rpm
-
- true
-
-
-
- build-rpm
-
-
-
-
-
- org.codehaus.mojo
- rpm-maven-plugin
- 2.1.4
-
- 2012, Apache Software Foundation
- Development
- Maven Recipe: RPM Package.
- false
- /
- noarch
- linux
-
- ${package-version}
- ${package-release}
-
- root
- root
-
-
-
- infra-solr
- package
-
- rpm
-
-
- Development
- ${solr.package.name}
-
-
- ${solr.mapping.path}
-
-
- ${project.build.directory}/solr
-
-
-
-
-
-
-
- infra-solr-client
- package
-
- rpm
-
-
- Development
- ${solr.client.package.name}
- noarch
- linux
-
- ${project.basedir}/src/main/package/rpm/solr-client/postinstall.sh
- utf-8
-
-
-
- ${solr.client.mapping.path}
-
-
- ${solr.client.dir}/target/package
-
- libs/checkstyle*.jar
-
-
-
-
-
-
-
-
- infra-manager
- package
-
- rpm
-
-
- Development
- ${infra-manager.package.name}
-
- ${project.basedir}/src/main/package/rpm/manager/postinstall.sh
- utf-8
-
-
- ${project.basedir}/src/main/package/rpm/manager/postremove.sh
- utf-8
-
-
-
- ${infra-manager.mapping.path}
-
-
- ${infra-manager.dir}/target/package
-
-
-
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.7
-
-
- generate-resources
-
- run
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- deb
-
-
-
- build-deb
-
-
-
-
-
-
- maven-resources-plugin
- 2.7
-
-
-
- copy-resources
- package
-
- copy-resources
-
-
- ${project.build.directory}/resources/deb
-
-
- ${project.basedir}/src/main/package/deb
-
- solr/postinst
- solr-client/postinst
-
- false
-
-
- ${project.basedir}/src/main/package/deb
-
- solr/postinst
- solr-client/postinst
-
- true
-
-
-
-
-
-
-
- org.vafer
- jdeb
- 1.4
-
-
- package
- jdeb-solr
-
- jdeb
-
-
- ${basedir}/src/main/package/deb/solr
- ${basedir}/target/${solr.package.name}_${package-version}-${package-release}.deb
- false
- false
-
-
- ${project.build.directory}/solr
- directory
-
- perm
- root
- root
- ${solr.mapping.path}
-
-
- bin/**,server/scripts/**
-
-
-
- ${project.build.directory}/solr
- directory
-
- perm
- root
- root
- 755
- ${solr.mapping.path}
-
-
- bin/**,server/scripts/**
-
-
-
-
-
-
-
- package
- jdeb-solr-client
-
- jdeb
-
-
- ${basedir}/src/main/package/deb/solr-client
- ${basedir}/target/${solr.client.package.name}_${package-version}-${package-release}.deb
- false
- false
-
-
- ${solr.client.dir}/target/ambari-infra-solr-client.tar.gz
- archive
-
- perm
- root
- root
- ${solr.client.mapping.path}
-
- libs/checkstyle*.jar
-
-
-
-
-
-
- package
- jdeb-infra-manager
-
- jdeb
-
-
- ${basedir}/src/main/package/deb/manager
- ${basedir}/target/${infra-manager.package.name}_${package-version}-${package-release}.deb
- false
- false
-
-
- ${infra-manager.dir}/target/ambari-infra-manager.tar.gz
- archive
-
- perm
- root
- root
- ${infra-manager.mapping.path}
-
-
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.7
-
-
- generate-resources
-
- run
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.ambari
- ambari-infra-solr-client
- ${project.version}
-
-
- org.apache.ambari
- ambari-infra-solr-plugin
- ${project.version}
-
-
- org.apache.ambari
- ambari-infra-manager
- ${project.version}
-
-
-
-
-
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control
deleted file mode 100644
index 03663a06c61..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-Package: [[infra-manager.package.name]]
-Version: [[package-version]]-[[package-release]]
-Section: [[deb.section]]
-Priority: [[deb.priority]]
-Depends: [[deb.dependency.list]]
-Architecture: [[deb.architecture]]
-Description: [[description]]
-Maintainer: [[deb.publisher]]
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
deleted file mode 100644
index acce62dd2cb..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
-INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/bin/infraManager.sh"
-INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager"
-INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf"
-INFRA_MANAGER_CONF_SOURCE="/usr/lib/ambari-infra-manager/conf"
-
-rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME
-rm -f $INFRA_MANAGER_CONF_LINK_NAME
-rm -rf $INFRA_MANAGER_CONF_LINK_DIR
-mkdir -p $INFRA_MANAGER_CONF_LINK_DIR
-ln -s $INFRA_MANAGER_CONF_SOURCE $INFRA_MANAGER_CONF_LINK_NAME
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm
deleted file mode 100644
index e62abc6f2bc..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager"
-INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf"
-INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
-
-rm -f $INFRA_MANAGER_LINK_NAME
-rm -f $INFRA_MANAGER_CONF_LINK_NAME
-rm -rf $INFRA_MANAGER_CONF_LINK_DIR
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control
deleted file mode 100644
index 88bafcb590b..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-Package: [[solr.client.package.name]]
-Version: [[package-version]]-[[package-release]]
-Section: [[deb.section]]
-Priority: [[deb.priority]]
-Depends: [[deb.dependency.list]]
-Architecture: [[deb.architecture]]
-Description: [[description]]
-Maintainer: [[deb.publisher]]
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
deleted file mode 100644
index ccc377b7e3f..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli"
-SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
-
-SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
-SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
-
-SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager"
-SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py"
-
-rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
-rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
-rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control
deleted file mode 100644
index 5087cd0e8c8..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-Package: [[solr.package.name]]
-Version: [[package-version]]-[[package-release]]
-Section: [[deb.section]]
-Priority: [[deb.priority]]
-Depends: [[deb.dependency.list]]
-Architecture: [[deb.architecture]]
-Description: [[description]]
-Maintainer: [[deb.publisher]]
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm
deleted file mode 100644
index 21a01faa534..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh
deleted file mode 100644
index acce62dd2cb..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
-INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/bin/infraManager.sh"
-INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager"
-INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf"
-INFRA_MANAGER_CONF_SOURCE="/usr/lib/ambari-infra-manager/conf"
-
-rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME
-rm -f $INFRA_MANAGER_CONF_LINK_NAME
-rm -rf $INFRA_MANAGER_CONF_LINK_DIR
-mkdir -p $INFRA_MANAGER_CONF_LINK_DIR
-ln -s $INFRA_MANAGER_CONF_SOURCE $INFRA_MANAGER_CONF_LINK_NAME
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh
deleted file mode 100644
index e62abc6f2bc..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager"
-INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf"
-INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
-
-rm -f $INFRA_MANAGER_LINK_NAME
-rm -f $INFRA_MANAGER_CONF_LINK_NAME
-rm -rf $INFRA_MANAGER_CONF_LINK_DIR
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
deleted file mode 100644
index ccc377b7e3f..00000000000
--- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli"
-SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
-
-SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
-SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
-
-SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager"
-SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py"
-
-rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
-rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
-rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager-it/pom.xml b/ambari-infra/ambari-infra-manager-it/pom.xml
deleted file mode 100644
index c9b8bad5019..00000000000
--- a/ambari-infra/ambari-infra-manager-it/pom.xml
+++ /dev/null
@@ -1,224 +0,0 @@
-
-
-
-
-
- ambari-infra
- org.apache.ambari
- 2.0.0.0-SNAPSHOT
-
-
- Ambari Infra Manager Integration Tests
- http://maven.apache.org
- 4.0.0
-
- ambari-infra-manager-it
-
-
- 4.0.5
- 2.20
- localhost
- NONE
-
-
-
-
- org.apache.solr
- solr-solrj
- ${solr.version}
-
-
- com.amazonaws
- aws-java-sdk-s3
- 1.11.5
-
-
- commons-io
- commons-io
- 2.5
-
-
-
- org.slf4j
- slf4j-api
- 1.7.20
-
-
- org.slf4j
- slf4j-log4j12
- 1.7.20
-
-
-
- org.jbehave
- jbehave-core
- ${jbehave.version}
- test
-
-
- junit
- junit
- test
-
-
- org.easymock
- easymock
- 3.4
- test
-
-
- org.hamcrest
- hamcrest-all
- 1.3
- test
-
-
- com.sparkjava
- spark-core
- 2.7.1
- test
-
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop.version}
- test
-
-
- javax.servlet
- servlet-api
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- com.sun.jersey
- jetty-util
-
-
- com.sun.jersey
- jersey-core
-
-
- com.sun.jersey
- jersey-json
-
-
- com.sun.jersey
- jersey-server
-
-
- org.slf4j
- slf4j-log4j12
-
-
- org.eclipse.jetty
- jetty-server
-
-
- org.eclipse.jetty
- jetty-util
-
-
- org.eclipse.jetty
- jetty-servlet
-
-
- org.eclipse.jetty
- jetty-security
-
-
-
-
- org.apache.hadoop
- hadoop-hdfs-client
- ${hadoop.version}
-
-
-
-
- target/classes
-
-
- src/test/java/
-
- **/*.story
-
-
-
- src/test/resources
-
-
-
-
-
-
- it
-
-
- it
-
-
-
-
-
- org.apache.maven.plugins
- maven-failsafe-plugin
- ${failsafe-plugin.version}
-
-
- run-integration-tests
- integration-test
-
- integration-test
-
-
-
- **/*Stories.java
- **/*IT.java
-
-
- file:${project.build.testOutputDirectory}/log4j.properties
- ${docker.host}
- ${stories.location}
-
-
-
-
- verify-integration-tests
- verify
-
- verify
-
-
-
-
-
-
-
-
-
-
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java
deleted file mode 100644
index 3d8711b3eb5..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-public class HttpResponse {
- private final int code;
- private final String body;
-
- public HttpResponse(int code, String body) {
- this.code = code;
- this.body = body;
- }
-
- public int getCode() {
- return code;
- }
-
- public String getBody() {
- return body;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
deleted file mode 100644
index 0118c769574..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.IOUtils;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-// TODO: use swagger
-public class InfraClient implements AutoCloseable {
- private static final Logger LOG = LoggerFactory.getLogger(InfraClient.class);
-
- private final CloseableHttpClient httpClient;
- private final URI baseUrl;
-
- public InfraClient(String baseUrl) {
- try {
- this.baseUrl = new URI(baseUrl);
- } catch (URISyntaxException e) {
- throw new RuntimeException(e);
- }
- httpClient = HttpClientBuilder.create().setRetryHandler(new DefaultHttpRequestRetryHandler(0, false)).build();
- }
-
- @Override
- public void close() throws Exception {
- httpClient.close();
- }
-
- // TODO: return job data
- public void getJobs() {
- execute(new HttpGet(baseUrl));
- }
-
- private HttpResponse execute(HttpRequestBase post) {
- try (CloseableHttpResponse response = httpClient.execute(post)) {
- String responseBodyText = IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
- int statusCode = response.getStatusLine().getStatusCode();
- LOG.info("Response code {} body {} ", statusCode, responseBodyText);
- if (!(200 <= statusCode && statusCode <= 299))
- throw new RuntimeException("Error while executing http request: " + responseBodyText);
- return new HttpResponse(statusCode, responseBodyText);
- } catch (ClientProtocolException e) {
- throw new RuntimeException(e);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- public JobExecutionInfo startJob(String jobName, String parameters) {
- URIBuilder uriBuilder = new URIBuilder(baseUrl);
- uriBuilder.setScheme("http");
- uriBuilder.setPath(uriBuilder.getPath() + "/" + jobName);
- if (!isBlank(parameters))
- uriBuilder.addParameter("params", parameters);
- try {
- String responseText = execute(new HttpPost(uriBuilder.build())).getBody();
- Map responseContent = new ObjectMapper().readValue(responseText, new TypeReference>() {});
- return new JobExecutionInfo(responseContent.get("jobId").toString(), ((Map)responseContent.get("jobExecutionData")).get("id").toString());
- } catch (URISyntaxException | JsonParseException | JsonMappingException e) {
- throw new RuntimeException(e);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- public void restartJob(String jobName, String jobId) {
- URIBuilder uriBuilder = new URIBuilder(baseUrl);
- uriBuilder.setScheme("http");
- uriBuilder.setPath(String.format("%s/%s/%s/executions", uriBuilder.getPath(), jobName, jobId));
- uriBuilder.addParameter("operation", "RESTART");
- try {
- HttpResponse httpResponse = execute(new HttpPost(uriBuilder.build()));
- if (httpResponse.getCode() != 200)
- throw new RuntimeException(httpResponse.getBody());
- } catch (URISyntaxException e) {
- throw new RuntimeException(e);
- }
- }
-
- public void stopJob(String jobExecutionId) {
- URIBuilder uriBuilder = new URIBuilder(baseUrl);
- uriBuilder.setScheme("http");
- uriBuilder.setPath(String.format("%s/executions/%s", uriBuilder.getPath(), jobExecutionId));
- uriBuilder.addParameter("operation", "STOP");
- try {
- execute(new HttpDelete(uriBuilder.build()));
- } catch (URISyntaxException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java
deleted file mode 100644
index 564de9a3a5f..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import org.apache.ambari.infra.steps.ExportJobsSteps;
-import org.apache.commons.lang.StringUtils;
-import org.jbehave.core.configuration.Configuration;
-import org.jbehave.core.configuration.MostUsefulConfiguration;
-import org.jbehave.core.io.LoadFromClasspath;
-import org.jbehave.core.io.LoadFromRelativeFile;
-import org.jbehave.core.io.StoryFinder;
-import org.jbehave.core.io.StoryLoader;
-import org.jbehave.core.junit.JUnitStories;
-import org.jbehave.core.reporters.Format;
-import org.jbehave.core.reporters.StoryReporterBuilder;
-import org.jbehave.core.steps.InjectableStepsFactory;
-import org.jbehave.core.steps.InstanceStepsFactory;
-import org.jbehave.core.steps.ParameterConverters;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-import static java.util.Collections.singletonList;
-import static org.jbehave.core.io.CodeLocations.codeLocationFromClass;
-
-public class InfraManagerStories extends JUnitStories {
- private static final String BACKEND_STORIES_LOCATION_PROPERTY = "backend.stories.location";
- private static final String STORY_SUFFIX = ".story";
-
- @Override
- public Configuration configuration() {
- return new MostUsefulConfiguration()
- .useStoryLoader(getStoryLoader(BACKEND_STORIES_LOCATION_PROPERTY, this.getClass()))
- .useParameterConverters(new ParameterConverters().addConverters(new OffsetDateTimeConverter()))
- .useStoryReporterBuilder(
- new StoryReporterBuilder().withFailureTrace(true).withDefaultFormats().withFormats(Format.CONSOLE, Format.TXT));
- }
-
- private static StoryLoader getStoryLoader(String property, Class clazz) {
- boolean useExternalStoryLocation = useExternalStoryLocation(property);
- if (useExternalStoryLocation) {
- try {
- return new LoadFromRelativeFile(new URL("file://" + System.getProperty(property)));
- } catch (Exception e) {
- throw new RuntimeException("Cannot load story files from url: file://" + System.getProperty(property));
- }
- } else {
- return new LoadFromClasspath(clazz);
- }
- }
-
- @Override
- public InjectableStepsFactory stepsFactory() {
- return new InstanceStepsFactory(configuration(), new ExportJobsSteps());
- }
-
- @Override
- protected List storyPaths() {
- return findStories(BACKEND_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass());
- }
-
- private static List findStories(String property, String suffix, Class clazz) {
- if (useExternalStoryLocation(property)) {
- return findStoriesInFolder(System.getProperty(property), suffix);
- } else {
- return new StoryFinder()
- .findPaths(codeLocationFromClass(clazz).getFile(), singletonList(String.format("**/*%s", suffix)), null);
- }
- }
-
- private static List findStoriesInFolder(String folderAbsolutePath, String suffix) {
- List results = new ArrayList<>();
- File folder = new File(folderAbsolutePath);
- File[] listOfFiles = folder.listFiles();
- if (listOfFiles != null) {
- for (File file : listOfFiles) {
- if (file.getName().endsWith(suffix)) {
- results.add(file.getName());
- }
- }
- }
- return results;
- }
-
- private static boolean useExternalStoryLocation(String property) {
- String storyLocationProp = System.getProperty(property);
- return StringUtils.isNotEmpty(storyLocationProp) && !"NONE".equals(storyLocationProp);
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java
deleted file mode 100644
index 92b783476f7..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-public class JobExecutionInfo {
- private final String jobId;
- private final String executionId;
-
- public JobExecutionInfo(String jobId, String executionId) {
- this.jobId = jobId;
- this.executionId = executionId;
- }
-
- public String getJobId() {
- return jobId;
- }
-
- public String getExecutionId() {
- return executionId;
- }
-
- @Override
- public String toString() {
- return "JobExecutionInfo{" +
- "jobId='" + jobId + '\'' +
- ", executionId='" + executionId + '\'' +
- '}';
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java
deleted file mode 100644
index ef469a48bba..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import org.jbehave.core.steps.ParameterConverters;
-
-import java.lang.reflect.Type;
-import java.time.OffsetDateTime;
-import java.time.format.DateTimeFormatter;
-
-public class OffsetDateTimeConverter implements ParameterConverters.ParameterConverter {
- public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX");
-
- @Override
- public boolean accept(Type type) {
- return type instanceof Class> && OffsetDateTime.class.isAssignableFrom((Class>) type);
- }
-
- @Override
- public Object convertValue(String value, Type type) {
- return OffsetDateTime.parse(value, SOLR_DATETIME_FORMATTER);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
deleted file mode 100644
index 1ffdb2a2da4..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import static org.apache.ambari.infra.TestUtil.doWithin;
-import static org.apache.ambari.infra.TestUtil.getDockerHost;
-import static org.apache.ambari.infra.TestUtil.runCommand;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.nio.file.Paths;
-
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrInputDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Solr {
- private static final Logger LOG = LoggerFactory.getLogger(Solr.class);
- public static final String AUDIT_LOGS_COLLECTION = "audit_logs";
- public static final String HADOOP_LOGS_COLLECTION = "hadoop_logs";
- private static final int SOLR_PORT = 8983;
-
- private final SolrClient solrClient;
- private final String configSetPath;
-
- public Solr() {
- this("");
- }
-
- public Solr(String configSetPath) {
- this.configSetPath = configSetPath;
- this.solrClient = new LBHttpSolrClient.Builder().withBaseSolrUrls(String.format("http://%s:%d/solr/%s_shard1_replica1",
- getDockerHost(),
- SOLR_PORT,
- AUDIT_LOGS_COLLECTION)).build();
- }
-
- public void waitUntilSolrIsUp() throws Exception {
- try (CloseableHttpClient httpClient = HttpClientBuilder.create().setRetryHandler(new DefaultHttpRequestRetryHandler(0, false)).build()) {
- doWithin(60, "Check Solr running", () -> pingSolr(httpClient));
- }
- }
-
- private boolean pingSolr(CloseableHttpClient httpClient) {
- try (CloseableHttpResponse response = httpClient.execute(new HttpGet(String.format("http://%s:%d/solr/admin/collections?action=LIST", getDockerHost(), SOLR_PORT)))) {
- return response.getStatusLine().getStatusCode() == 200;
- }
- catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- public void add(SolrInputDocument solrInputDocument) {
- try {
- solrClient.add(solrInputDocument);
- } catch (SolrServerException | IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public void createSolrCollection(String collectionName) {
- LOG.info("Creating collection");
- runCommand(new String[]{"docker", "exec", "docker_solr_1", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"});
- }
-
- public QueryResponse query(SolrQuery query) {
- try {
- return solrClient.query(query);
- } catch (SolrServerException | IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public void commit() {
- try {
- solrClient.commit();
- } catch (SolrServerException | IOException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
deleted file mode 100644
index f48e10734a7..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import static java.lang.System.currentTimeMillis;
-
-import java.nio.charset.StandardCharsets;
-import java.util.function.BooleanSupplier;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestUtil {
- private static final Logger LOG = LoggerFactory.getLogger(TestUtil.class);
-
- public static void doWithin(int sec, String actionName, BooleanSupplier predicate) {
- doWithin(sec, actionName, () -> {
- if (!predicate.getAsBoolean())
- throw new RuntimeException("Predicate was false!");
- });
- }
-
- public static void doWithin(int sec, String actionName, Runnable runnable) {
- long start = currentTimeMillis();
- Exception exception;
- while (true) {
- try {
- runnable.run();
- return;
- }
- catch (Exception e) {
- exception = e;
- }
-
- if (currentTimeMillis() - start > sec * 1000) {
- throw new AssertionError(String.format("Unable to perform action '%s' within %d seconds", actionName, sec), exception);
- }
- else {
- LOG.info("Performing action '{}' failed. retrying...", actionName);
- }
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- throw new RuntimeException(e);
- }
- }
- }
-
- public static String getDockerHost() {
- return System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost";
- }
-
- public static void runCommand(String[] command) {
- try {
- LOG.info("Exec command: {}", StringUtils.join(command, " "));
- Process process = Runtime.getRuntime().exec(command);
- String stdout = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8);
- LOG.info("Exec command result {}", stdout);
- } catch (Exception e) {
- throw new RuntimeException("Error during execute shell command: ", e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
deleted file mode 100644
index 3016d67c0e8..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.metrics;
-
-import static java.lang.System.currentTimeMillis;
-import static org.apache.ambari.infra.Solr.HADOOP_LOGS_COLLECTION;
-import static org.apache.ambari.infra.TestUtil.runCommand;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-
-import java.io.File;
-import java.net.URL;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.ambari.infra.Solr;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class MetricsIT {
- private static final Logger LOG = LoggerFactory.getLogger(MetricsIT.class);
-
- private static MockMetricsServer metricsServer;
- private static String shellScriptLocation;
-
- @BeforeClass
- public static void setupMetricsServer() throws Exception {
- URL location = MetricsIT.class.getProtectionDomain().getCodeSource().getLocation();
- String ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent();
-
- // TODO: use the same containers as ambari-infra-manager-it
- shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh";
- LOG.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ...");
- runCommand(new String[]{shellScriptLocation, "start"});
-
- Solr solr = new Solr("/usr/lib/ambari-infra-solr/server/solr");
- solr.waitUntilSolrIsUp();
- solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
-
- metricsServer = new MockMetricsServer();
- metricsServer.init();
- }
-
- @AfterClass
- public static void tearDown() throws Exception {
- LOG.info("shutdown containers");
- runCommand(new String[]{shellScriptLocation, "stop"});
- }
-
- @Test
- public void testAllMetricsArrived() throws Exception {
- metricsServer.addExpectedMetrics(EXPECTED_METRICS);
- long start = currentTimeMillis();
- while (!metricsServer.getNotReceivedMetrics().isEmpty()) {
- Thread.sleep(1000);
- if (currentTimeMillis() - start > 30 * 1000)
- break;
- LOG.info("Checking any metrics arrived...");
- }
-
- metricsServer.getNotReceivedMetrics().forEach(metric -> LOG.info("Metric not received: {}", metric));
- assertThat(metricsServer.getNotReceivedMetrics().isEmpty(), is(true));
- }
-
- private static final Set EXPECTED_METRICS = new HashSet() {{
- add("infra.solr.jvm.threads.count");
- add("infra.solr.jvm.threads.deadlock.count");
- add("infra.solr.jvm.memory.heap.used");
- add("infra.solr.jvm.memory.heap.max");
- add("infra.solr.jvm.memory.non-heap.used");
- add("infra.solr.jvm.memory.non-heap.max");
- add("infra.solr.jvm.memory.pools.CMS-Old-Gen.used");
- add("infra.solr.jvm.memory.pools.CMS-Old-Gen.max");
- add("infra.solr.jvm.gc.ConcurrentMarkSweep.count");
- add("infra.solr.jvm.gc.ConcurrentMarkSweep.time");
- add("infra.solr.jvm.gc.ParNew.count");
- add("infra.solr.jvm.gc.ParNew.time");
- add("infra.solr.jvm.memory.pools.Metaspace.used");
- add("infra.solr.jvm.memory.pools.Metaspace.max");
- add("infra.solr.jvm.memory.pools.Par-Eden-Space.used");
- add("infra.solr.jvm.memory.pools.Par-Eden-Space.max");
- add("infra.solr.jvm.memory.pools.Par-Survivor-Space.used");
- add("infra.solr.jvm.memory.pools.Par-Survivor-Space.max");
- add("infra.solr.jvm.os.processCpuLoad");
- add("infra.solr.jvm.os.systemCpuLoad");
- add("infra.solr.jvm.os.openFileDescriptorCount");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.adds");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.deletesById");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.errors");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.docsPending");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requests");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.avgRequestsPerSecond");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.avgTimePerRequest");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.medianRequestTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requests");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.avgRequestsPerSecond");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.avgTimePerRequest");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.medianRequestTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requests");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.avgRequestsPerSecond");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.avgTimePerRequest");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.medianRequestTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requests");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.avgRequestsPerSecond");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.avgTimePerRequest");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.medianRequestTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requests");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.avgRequestsPerSecond");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.avgTimePerRequest");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.medianRequestTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.INDEX.sizeInBytes");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.hitratio");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.size");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.warmupTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.hitratio");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.size");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.warmupTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.hitratio");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.size");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.warmupTime");
- add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.core.fieldCache.entries_count");
- }};
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
deleted file mode 100644
index 9d2734fcb60..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package org.apache.ambari.infra.solr.metrics;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import static java.util.Collections.singletonList;
-import static spark.Spark.get;
-import static spark.Spark.port;
-import static spark.Spark.post;
-
-import java.util.Set;
-import java.util.concurrent.ConcurrentSkipListSet;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-
-import spark.Request;
-import spark.Response;
-import spark.servlet.SparkApplication;
-
-public class MockMetricsServer implements SparkApplication {
- private static final Logger LOG = LoggerFactory.getLogger(MockMetricsServer.class);
- private static final String HOST_NAME = "metrics_collector";
-
- private Set expectedMetrics;
-
- @Override
- public void init() {
- port(6188);
- get("/ping", (req, resp) -> "pong");
- get("/ws/v1/timeline/metrics/livenodes", this::queryState);
- post("/ws/v1/timeline/metrics", this::logBody);
- }
-
- private Object queryState(Request request, Response response) {
- LOG.info("Sending hostname {}", HOST_NAME);
- response.type("application/json");
- return new Gson().toJson(singletonList(HOST_NAME));
- }
-
- private Object logBody(Request req, Response resp) {
- String body = req.body();
- LOG.info("Incoming metrics {}", body);
-
- expectedMetrics.removeIf(body::contains);
-
- return "OK";
- }
-
- public void addExpectedMetrics(Set expectedMetrics) {
- this.expectedMetrics = new ConcurrentSkipListSet<>(expectedMetrics);
- }
-
- public Set getNotReceivedMetrics() {
- return expectedMetrics;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
deleted file mode 100644
index f219ce5cfab..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.steps;
-
-import static org.apache.ambari.infra.Solr.AUDIT_LOGS_COLLECTION;
-import static org.apache.ambari.infra.Solr.HADOOP_LOGS_COLLECTION;
-import static org.apache.ambari.infra.TestUtil.doWithin;
-import static org.apache.ambari.infra.TestUtil.getDockerHost;
-import static org.apache.ambari.infra.TestUtil.runCommand;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.time.OffsetDateTime;
-import java.util.Date;
-
-import org.apache.ambari.infra.InfraClient;
-import org.apache.ambari.infra.Solr;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.solr.common.SolrInputDocument;
-import org.jbehave.core.annotations.AfterStories;
-import org.jbehave.core.annotations.BeforeStories;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.ListObjectsRequest;
-import com.amazonaws.services.s3.model.ObjectListing;
-
-public abstract class AbstractInfraSteps {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractInfraSteps.class);
-
- private static final int INFRA_MANAGER_PORT = 61890;
- private static final int FAKE_S3_PORT = 4569;
- private static final int HDFS_PORT = 9000;
- protected static final String S3_BUCKET_NAME = "testbucket";
- private String ambariFolder;
- private String shellScriptLocation;
- private String dockerHost;
- private AmazonS3Client s3client;
- private int documentId = 0;
- private Solr solr;
-
- public InfraClient getInfraClient() {
- return new InfraClient(String.format("http://%s:%d/api/v1/jobs", dockerHost, INFRA_MANAGER_PORT));
- }
-
- public Solr getSolr() {
- return solr;
- }
-
- public AmazonS3Client getS3client() {
- return s3client;
- }
-
- public String getLocalDataFolder() {
- return ambariFolder + "/ambari-infra/ambari-infra-manager/docker/test-out";
- }
-
- @BeforeStories
- public void initDockerContainer() throws Exception {
- System.setProperty("HADOOP_USER_NAME", "root");
-
- URL location = AbstractInfraSteps.class.getProtectionDomain().getCodeSource().getLocation();
- ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent();
-
- LOG.info("Clean local data folder {}", getLocalDataFolder());
- FileUtils.cleanDirectory(new File(getLocalDataFolder()));
-
- shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh";
- LOG.info("Create new docker container for testing Ambari Infra Manager ...");
- runCommand(new String[]{shellScriptLocation, "start"});
-
- dockerHost = getDockerHost();
-
- solr = new Solr();
- solr.waitUntilSolrIsUp();
-
- solr.createSolrCollection(AUDIT_LOGS_COLLECTION);
- solr.createSolrCollection(HADOOP_LOGS_COLLECTION);
-
- LOG.info("Initializing s3 client");
- s3client = new AmazonS3Client(new BasicAWSCredentials("remote-identity", "remote-credential"));
- s3client.setEndpoint(String.format("http://%s:%d", dockerHost, FAKE_S3_PORT));
- s3client.createBucket(S3_BUCKET_NAME);
-
- checkInfraManagerReachable();
- }
-
- private void checkInfraManagerReachable() throws Exception {
- try (InfraClient httpClient = getInfraClient()) {
- doWithin(30, "Start Ambari Infra Manager", httpClient::getJobs);
- LOG.info("Ambari Infra Manager is up and running");
- }
- }
-
- protected void addDocument(OffsetDateTime logtime) {
- SolrInputDocument solrInputDocument = new SolrInputDocument();
- solrInputDocument.addField("logType", "HDFSAudit");
- solrInputDocument.addField("cluster", "cl1");
- solrInputDocument.addField("event_count", 1);
- solrInputDocument.addField("repo", "hdfs");
- solrInputDocument.addField("reqUser", "ambari-qa");
- solrInputDocument.addField("type", "hdfs_audit");
- solrInputDocument.addField("seq_num", 9);
- solrInputDocument.addField("result", 1);
- solrInputDocument.addField("path", "/root/test-logs/hdfs-audit/hdfs-audit.log");
- solrInputDocument.addField("ugi", "ambari-qa (auth:SIMPLE)");
- solrInputDocument.addField("host", "logfeeder.apache.org");
- solrInputDocument.addField("action", "getfileinfo");
- solrInputDocument.addField("log_message", "allowed=true\tugi=ambari-qa (auth:SIMPLE)\tip=/192.168.64.102\tcmd=getfileinfo\tsrc=/ats/active\tdst=null\tperm=null\tproto=rpc\tcallerContext=HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f");
- solrInputDocument.addField("logger_name", "FSNamesystem.audit");
- solrInputDocument.addField("id", Integer.toString(documentId++));
- solrInputDocument.addField("authType", "SIMPLE");
- solrInputDocument.addField("logfile_line_number", 1);
- solrInputDocument.addField("cliIP", "/192.168.64.102");
- solrInputDocument.addField("level", "INFO");
- solrInputDocument.addField("resource", "/ats/active");
- solrInputDocument.addField("ip", "172.18.0.2");
- solrInputDocument.addField("evtTime", "2017-12-08T10:23:16.452Z");
- solrInputDocument.addField("req_caller_id", "HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f");
- solrInputDocument.addField("repoType", 1);
- solrInputDocument.addField("enforcer", "hadoop-acl");
- solrInputDocument.addField("cliType", "rpc");
- solrInputDocument.addField("message_md5", "-6778765776916226588");
- solrInputDocument.addField("event_md5", "5627261521757462732");
- solrInputDocument.addField("logtime", new Date(logtime.toInstant().toEpochMilli()));
- solrInputDocument.addField("_ttl_", "+7DAYS");
- solrInputDocument.addField("_expire_at_", "2017-12-15T10:23:19.106Z");
- solr.add(solrInputDocument);
- }
-
- @AfterStories
- public void shutdownContainers() throws Exception {
- Thread.sleep(2000); // sync with s3 server
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- ObjectListing objectListing = getS3client().listObjects(listObjectsRequest);
- LOG.info("Found {} files on s3.", objectListing.getObjectSummaries().size());
- objectListing.getObjectSummaries().forEach(s3ObjectSummary -> LOG.info("Found file on s3 with key {}", s3ObjectSummary.getKey()));
-
- LOG.info("Listing files on hdfs.");
- try (FileSystem fileSystem = getHdfs()) {
- int count = 0;
- RemoteIterator it = fileSystem.listFiles(new Path("/test_audit_logs"), true);
- while (it.hasNext()) {
- LOG.info("Found file on hdfs with name {}", it.next().getPath().getName());
- ++count;
- }
- LOG.info("{} files found on hfds", count);
- }
-
- LOG.info("shutdown containers");
- runCommand(new String[]{shellScriptLocation, "stop"});
- }
-
- protected FileSystem getHdfs() throws IOException {
- Configuration conf = new Configuration();
- conf.set("fs.defaultFS", String.format("hdfs://%s:%d/", dockerHost, HDFS_PORT));
- return FileSystem.get(conf);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
deleted file mode 100644
index d84c23fb69f..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.steps;
-
-import static java.util.Objects.requireNonNull;
-import static org.apache.ambari.infra.OffsetDateTimeConverter.SOLR_DATETIME_FORMATTER;
-import static org.apache.ambari.infra.TestUtil.doWithin;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.hasProperty;
-import static org.hamcrest.core.IsCollectionContaining.hasItem;
-import static org.junit.Assert.assertThat;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.time.Duration;
-import java.time.OffsetDateTime;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.ambari.infra.InfraClient;
-import org.apache.ambari.infra.JobExecutionInfo;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.jbehave.core.annotations.Given;
-import org.jbehave.core.annotations.Then;
-import org.jbehave.core.annotations.When;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.ListObjectsRequest;
-import com.amazonaws.services.s3.model.ObjectListing;
-import com.amazonaws.services.s3.model.ObjectMetadata;
-
-public class ExportJobsSteps extends AbstractInfraSteps {
- private static final Logger LOG = LoggerFactory.getLogger(ExportJobsSteps.class);
-
- private Map launchedJobs = new HashMap<>();
-
- @Given("$count documents in solr")
- public void addDocuments(int count) {
- OffsetDateTime intervalEnd = OffsetDateTime.now();
- for (int i = 0; i < count; ++i) {
- addDocument(intervalEnd.minusMinutes(i % (count / 10)));
- }
- getSolr().commit();
- }
-
- @Given("$count documents in solr with logtime from $startLogtime to $endLogtime")
- public void addDocuments(long count, OffsetDateTime startLogtime, OffsetDateTime endLogtime) {
- Duration duration = Duration.between(startLogtime, endLogtime);
- long increment = duration.toNanos() / count;
- for (int i = 0; i < count; ++i)
- addDocument(startLogtime.plusNanos(increment * i));
- getSolr().commit();
- }
-
- @Given("a file on s3 with key $key")
- public void addFileToS3(String key) throws Exception {
- try (ByteArrayInputStream inputStream = new ByteArrayInputStream("anything".getBytes())) {
- getS3client().putObject(S3_BUCKET_NAME, key, inputStream, new ObjectMetadata());
- }
- }
-
- @When("start $jobName job")
- public void startJob(String jobName) throws Exception {
- startJob(jobName, null, 0);
- }
-
- @When("start $jobName job with parameters $parameters after $waitSec seconds")
- public void startJob(String jobName, String parameters, int waitSec) throws Exception {
- Thread.sleep(waitSec * 1000);
- try (InfraClient httpClient = getInfraClient()) {
- JobExecutionInfo jobExecutionInfo = httpClient.startJob(jobName, parameters);
- LOG.info("Job {} started: {}", jobName, jobExecutionInfo);
- launchedJobs.put(jobName, jobExecutionInfo);
- }
- }
-
- @When("restart $jobName job within $waitSec seconds")
- public void restartJob(String jobName, int waitSec) {
- doWithin(waitSec, "Restarting job " + jobName, () -> {
- try (InfraClient httpClient = getInfraClient()) {
- httpClient.restartJob(jobName, launchedJobs.get(jobName).getJobId());
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- });
- }
-
- @When("stop job $jobName after at least $count file exists in s3 with filename containing text $text within $waitSec seconds")
- public void stopJob(String jobName, int count, String text, int waitSec) throws Exception {
- AmazonS3Client s3Client = getS3client();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
- && fileCountOnS3(text, s3Client, listObjectsRequest) > count);
-
- try (InfraClient httpClient = getInfraClient()) {
- httpClient.stopJob(launchedJobs.get(jobName).getExecutionId());
- }
- }
-
- @When("delete file with key $key from s3")
- public void deleteFileFromS3(String key) {
- getS3client().deleteObject(S3_BUCKET_NAME, key);
- }
-
- @Then("Check filenames contains the text $text on s3 server after $waitSec seconds")
- public void checkS3After(String text, int waitSec) {
- AmazonS3Client s3Client = getS3client();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
- && !s3Client.listObjects(listObjectsRequest).getObjectSummaries().isEmpty());
-
- ObjectListing objectListing = s3Client.listObjects(listObjectsRequest);
- assertThat(objectListing.getObjectSummaries(), hasItem(hasProperty("key", containsString(text))));
- }
-
- @Then("Check $count files exists on s3 server with filenames containing the text $text after $waitSec seconds")
- public void checkNumberOfFilesOnS3(long count, String text, int waitSec) {
- AmazonS3Client s3Client = getS3client();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME)
- && fileCountOnS3(text, s3Client, listObjectsRequest) == count);
- }
-
- private long fileCountOnS3(String text, AmazonS3Client s3Client, ListObjectsRequest listObjectsRequest) {
- return s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream()
- .filter(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text))
- .count();
- }
-
- @Then("Less than $count files exists on s3 server with filenames containing the text $text after $waitSec seconds")
- public void checkLessThanFileExistsOnS3(long count, String text, int waitSec) {
- AmazonS3Client s3Client = getS3client();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) && between(
- fileCountOnS3(text, s3Client, listObjectsRequest), 1L, count - 1L));
- }
-
- private boolean between(long count, long from, long to) {
- return from <= count && count <= to;
- }
-
- @Then("No file exists on s3 server with filenames containing the text $text")
- public void fileNotExistOnS3(String text) {
- AmazonS3Client s3Client = getS3client();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME);
- assertThat(s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream()
- .anyMatch(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text)), is(false));
- }
-
- @Then("solr contains $count documents between $startLogtime and $endLogtime")
- public void documentCount(int count, OffsetDateTime startLogTime, OffsetDateTime endLogTime) {
- SolrQuery query = new SolrQuery();
- query.setRows(count * 2);
- query.setQuery(String.format("logtime:[\"%s\" TO \"%s\"]", SOLR_DATETIME_FORMATTER.format(startLogTime), SOLR_DATETIME_FORMATTER.format(endLogTime)));
- assertThat(getSolr().query(query).getResults().size(), is(count));
- }
-
- @Then("solr does not contain documents between $startLogtime and $endLogtime after $waitSec seconds")
- public void isSolrEmpty(OffsetDateTime startLogTime, OffsetDateTime endLogTime, int waitSec) {
- SolrQuery query = new SolrQuery();
- query.setRows(1);
- query.setQuery(String.format("logtime:[\"%s\" TO \"%s\"]", SOLR_DATETIME_FORMATTER.format(startLogTime), SOLR_DATETIME_FORMATTER.format(endLogTime)));
- doWithin(waitSec, "check solr is empty", () -> isSolrEmpty(query));
- }
-
- private boolean isSolrEmpty(SolrQuery query) {
- return getSolr().query(query).getResults().isEmpty();
- }
-
- @Then("Check $count files exists on hdfs with filenames containing the text $text in the folder $path after $waitSec seconds")
- public void checkNumberOfFilesOnHdfs(int count, String text, String path, int waitSec) throws Exception {
- try (FileSystem fileSystem = getHdfs()) {
- doWithin(waitSec, "check uploaded files to hdfs", () -> {
- try {
- int fileCount = 0;
- RemoteIterator it = fileSystem.listFiles(new Path(path), true);
- while (it.hasNext()) {
- if (it.next().getPath().getName().contains(text))
- ++fileCount;
- }
- return fileCount == count;
- }
- catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- });
- }
- }
-
- @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName")
- public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) {
- File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId()));
- LOG.info("Destination directory path: {}", destinationDirectory.getAbsolutePath());
- doWithin(5, "Destination directory exists", destinationDirectory::exists);
-
- File[] files = requireNonNull(destinationDirectory.listFiles(),
- String.format("Path %s is not a directory or an I/O error occurred!", destinationDirectory.getAbsolutePath()));
- assertThat(Arrays.stream(files)
- .filter(file -> file.getName().contains(text))
- .count(), is(count));
- }
-}
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties b/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties
deleted file mode 100644
index 956bc6364e9..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.Target=System.out
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story b/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
deleted file mode 100644
index 122a634dcb5..00000000000
--- a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
+++ /dev/null
@@ -1,67 +0,0 @@
-Scenario: Exporting documents form solr and upload them to s3 using defult configuration
-
-Given 1000 documents in solr
-When start archive_audit_logs job
-Then Check filenames contains the text audit_logs on s3 server after 20 seconds
-
-
-Scenario: Exporting 10 documents using writeBlockSize=3 produces 4 files
-
-Given 10 documents in solr with logtime from 2010-10-09T05:00:00.000Z to 2010-10-09T20:00:00.000Z
-When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-10-09T00:00:00.000Z,end=2010-10-11T00:00:00.000Z after 2 seconds
-Then Check 4 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-10-09 after 20 seconds
-And solr does not contain documents between 2010-10-09T05:00:00.000Z and 2010-10-09T20:00:00.000Z after 5 seconds
-
-
-Scenario: Running archiving job with a bigger start value than end value exports and deletes 0 documents
-
-Given 10 documents in solr with logtime from 2010-01-01T05:00:00.000Z to 2010-01-04T05:00:00.000Z
-When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-01-03T05:00:00.000Z,end=2010-01-02T05:00:00.000Z after 2 seconds
-Then No file exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-01-0
-And solr contains 10 documents between 2010-01-01T05:00:00.000Z and 2010-01-04T05:00:00.000Z
-
-
-Scenario: Archiving job fails when part of the data is exported. After resolving the issue and restarting the job exports the rest of the data.
-
-Given 200 documents in solr with logtime from 2011-10-09T05:00:00.000Z to 2011-10-09T20:00:00.000Z
-And a file on s3 with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz
-When start archive_audit_logs job with parameters writeBlockSize=20,start=2010-11-09T00:00:00.000Z,end=2011-10-11T00:00:00.000Z after 2 seconds
-Then Check 3 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
-And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T07:59:59.999Z after 5 seconds
-When delete file with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz from s3
-And restart archive_audit_logs job within 2 seconds
-Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds
-And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T20:00:00.000Z after 5 seconds
-
-
-Scenario: After Deleting job deletes documents from solr no document found in the specified interval
-
-Given 10 documents in solr with logtime from 2012-10-09T05:00:00.000Z to 2012-10-09T20:00:00.000Z
-When start delete_audit_logs job with parameters start=2012-10-09T05:00:00.000Z,end=2012-10-09T20:00:00.000Z after 2 seconds
-Then solr does not contain documents between 2012-10-09T05:00:00.000Z and 2012-10-09T20:00:00.000Z after 5 seconds
-
-
-Scenario: Archiving documents to hdfs
-
-Given 1000 documents in solr with logtime from 2014-01-04T05:00:00.000Z to 2014-01-06T20:00:00.000Z
-When start archive_audit_logs job with parameters start=2014-01-04T05:00:00.000Z,end=2014-01-06T20:00:00.000Z,destination=HDFS after 2 seconds
-Then Check 7 files exists on hdfs with filenames containing the text audit_logs_-_2014-01-0 in the folder /test_audit_logs after 10 seconds
-And solr does not contain documents between 2014-01-04T05:00:00.000Z and 2014-01-06T20:00:00.000Z after 10 seconds
-
-
-Scenario: Archiving documents to local filesystem
-
-Given 200 documents in solr with logtime from 2014-02-04T05:00:00.000Z to 2014-02-06T20:00:00.000Z
-When start archive_audit_logs job with parameters start=2014-02-04T05:00:00.000Z,end=2014-02-06T20:00:00.000Z,destination=LOCAL,localDestinationDirectory=/root/archive after 2 seconds
-Then Check 2 files exists on local filesystem with filenames containing the text audit_logs_-_2014-02-0 in the folder audit_logs_${jobId}_2014-02-06T20-00-00.000Z for job archive_audit_logs
-And solr does not contain documents between 2014-02-04T05:00:00.000Z and 2014-02-06T20:00:00.000Z after 10 seconds
-
-
-Scenario: Launch Archiving job. Initiate stop and check that part of the data is archived. After restart all data must be extracted.
-
-Given 200 documents in solr with logtime from 2014-03-09T05:00:00.000Z to 2014-03-09T20:00:00.000Z
-When start archive_audit_logs job with parameters writeBlockSize=20,start=2014-03-09T05:00:00.000Z,end=2014-03-09T20:00:00.000Z after 2 seconds
-And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text solr_archive_audit_logs_-_2014-03-09 within 10 seconds
-Then Less than 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
-When restart archive_audit_logs job within 10 seconds
-Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds
diff --git a/ambari-infra/ambari-infra-manager/.gitignore b/ambari-infra/ambari-infra-manager/.gitignore
deleted file mode 100644
index 94b38299dda..00000000000
--- a/ambari-infra/ambari-infra-manager/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-out/*
-*.pid
-Profile
-.env
-test-out
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/README.md b/ambari-infra/ambari-infra-manager/README.md
deleted file mode 100644
index 4e38a69c61e..00000000000
--- a/ambari-infra/ambari-infra-manager/README.md
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
-# Ambari Infra Manager
-
-## Overview
-
-Ambari Infra Manager is a REST based management application for Ambari Infra services (like Infra Solr). The API is built on top of [Spring Batch](http://docs.spring.io/spring-batch/reference/html/)
-
-### Architecture
-![batch-1](docs/images/batch-1.png)
-
-### Job execution overview
-![batch-2](docs/images/batch-2.png)
-
-### Job workflow
-![batch-3](docs/images/batch-3.png)
-
-### Step workflow
-![batch-4](docs/images/batch-4.png)
-
-(images originally from [here](http://docs.spring.io/spring-batch/reference/html/))
-
-## API documentation
-
-Infra Manager uses [Swagger](http://swagger.io/), generated yaml file can be downloaded from [here](docs/api/swagger.yaml)
-
-
-## Development guide
-
-### Adding a new custom job
-
-As Infra Manager is a Spring based application and using Java configurations, if it is needed to add a new custom Job, the Jobs/Steps/Configurations are need to be on the classpath. Spring beans are registered only in a specific package, so for writing a plugin, all the added Java classes needs to be added inside "org.apache.ambari.infra" package.
-
-For the plugin it will be needed to add all Spring & Spring batch dependencies. For adding a new Job you will need to define a new Configuration object. There you can define your own jobs/steps/writers/readers/processors, as you can see in that example:
-```java
-@Configuration
-@EnableBatchProcessing
-public class MyJobConfig {
-
- @Inject
- private StepBuilderFactory steps;
-
- @Inject
- private JobBuilderFactory jobs;
-
-
- @Bean(name = "dummyStep")
- protected Step dummyStep(ItemReader reader,
- ItemProcessor processor,
- ItemWriter writer) {
- return steps.get("dummyStep").listener(new DummyStepListener()). chunk(2)
- .reader(reader).processor(processor).writer(writer).build();
- }
-
- @Bean(name = "dummyJob")
- public Job job(@Qualifier("dummyStep") Step dummyStep) {
- return jobs.get("dummyJob").listener(new DummyJobListener()).start(dummyStep).build();
- }
-
-}
-```
-As you can see it will require to implement [ItemWriter](https://docs.spring.io/spring-batch/apidocs/org/springframework/batch/item/ItemWriter.html), [ItemReader](http://docs.spring.io/spring-batch/trunk/apidocs/org/springframework/batch/item/ItemReader.html) and [ItemProcessor](https://docs.spring.io/spring-batch/apidocs/org/springframework/batch/item/ItemProcessor.html)
-
-### Schedule custom jobs
-
-It can be needed based on business requirements to schedule jobs (e.g. daily) instead of run manually through the REST API. It can be done with adding a custom bean to "org.apache.ambari.infra" package with using [@Scheduled](http://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/scheduling/annotation/Scheduled.html):
-```java
-@Named
-public class MySchedulerObject {
-
- @Inject
- private JobService jobService; // or JobOperator jobOperator if spring-batch-admin manager dependecy is not included
-
- @Value("${infra-manager.batch.my.param:defaultString}")
- private String myParamFromLogSearchProperties;
-
- @Scheduled(cron = "*/5 * * * * MON-FRI")
- public void doSomething() {
- // setup job params
- jobService.launch(jobName, jobParameters, TimeZone.getDefault());
- }
-
- @Scheduled(cron = "${infra.manager.my.prop}")
- public void doSomethingBasedOnInfraProperty() {
- // do something ...
- }
-}
-```
-
-You can put your cron expression inside infra-manager.properties file just make it configuratble.
-### Build & Run Application
-```bash
-mvn clean package exec:java
-```
-
-### Build & Run Application in docker container
-```bash
-cd docker
-./infra-manager-docker.sh
-```
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/build.xml b/ambari-infra/ambari-infra-manager/build.xml
deleted file mode 100644
index 6df3767c821..00000000000
--- a/ambari-infra/ambari-infra-manager/build.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docker/Dockerfile b/ambari-infra/ambari-infra-manager/docker/Dockerfile
deleted file mode 100644
index eaefe956953..00000000000
--- a/ambari-infra/ambari-infra-manager/docker/Dockerfile
+++ /dev/null
@@ -1,52 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-FROM centos:centos6
-
-RUN echo root:changeme | chpasswd
-
-RUN yum clean all -y && yum update -y
-RUN yum -y install vim wget rpm-build sudo which telnet tar openssh-server openssh-clients ntp git httpd lsof
-RUN rpm -e --nodeps --justdb glibc-common
-RUN yum -y install glibc-common
-
-ENV HOME /root
-
-#Install JAVA
-ENV JAVA_VERSION 8u131
-ENV BUILD_VERSION b11
-RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm
-RUN rpm -ivh jdk-8-linux-x64.rpm
-ENV JAVA_HOME /usr/java/default/
-
-#Install Maven
-RUN mkdir -p /opt/maven
-WORKDIR /opt/maven
-RUN wget http://archive.apache.org/dist/maven/maven-3/3.3.1/binaries/apache-maven-3.3.1-bin.tar.gz
-RUN tar -xvzf /opt/maven/apache-maven-3.3.1-bin.tar.gz
-RUN rm -rf /opt/maven/apache-maven-3.3.1-bin.tar.gz
-
-ENV M2_HOME /opt/maven/apache-maven-3.3.1
-ENV MAVEN_OPTS -Xmx2048m
-ENV PATH $PATH:$JAVA_HOME/bin:$M2_HOME/bin
-
-# SSH key
-RUN ssh-keygen -f /root/.ssh/id_rsa -t rsa -N ''
-RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys
-RUN chmod 600 /root/.ssh/authorized_keys
-RUN sed -ri 's/UsePAM yes/UsePAM no/g' /etc/ssh/sshd_config
-
-ADD bin/start.sh /root/start.sh
-RUN chmod +x /root/start.sh
-
-WORKDIR /root
-CMD /root/start.sh
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docker/bin/start.sh b/ambari-infra/ambari-infra-manager/docker/bin/start.sh
deleted file mode 100755
index 8c33d32b7e2..00000000000
--- a/ambari-infra/ambari-infra-manager/docker/bin/start.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-export INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n"
-touch /root/infra-manager.log
-/root/ambari-infra-manager/bin/infraManager.sh start > /root/infra-manager.log
-tail -f /root/infra-manager.log
-
diff --git a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml b/ambari-infra/ambari-infra-manager/docker/docker-compose.yml
deleted file mode 100644
index 2369d852b13..00000000000
--- a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml
+++ /dev/null
@@ -1,103 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-version: '3.3'
-services:
- zookeeper:
- image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10}
- restart: always
- hostname: zookeeper
- networks:
- - infra-network
- ports:
- - 2181:2181
- environment:
- ZOO_MY_ID: 1
- ZOO_SERVERS: server.1=zookeeper:2888:3888
- solr:
-# TODO: use infra-solr
- image: solr:${SOLR_VERSION:-7.4.0}
- restart: always
- hostname: solr
- ports:
- - "8983:8983"
- networks:
- - infra-network
- env_file:
- - Profile
- entrypoint:
- - docker-entrypoint.sh
- - solr
- - start
- - "-f"
- - "-c"
- - "-z"
- - ${ZOOKEEPER_CONNECTION_STRING}
- volumes:
- - $AMBARI_LOCATION/ambari-logsearch/ambari-logsearch-server/src/main/configsets:/opt/solr/configsets
- fakes3:
- image: localstack/localstack
- hostname: fakes3
- ports:
- - "4569:4569"
- environment:
- - SERVICES=s3:4569
- networks:
- infra-network:
- aliases:
- - testbucket.fakes3
- env_file:
- - Profile
- namenode:
- image: flokkr/hadoop-hdfs-namenode:${HADOOP_VERSION:-3.0.0}
- hostname: namenode
- ports:
- - 9870:9870
- - 9000:9000
- env_file:
- - Profile
- environment:
- ENSURE_NAMENODE_DIR: "/tmp/hadoop-hdfs/dfs/name"
- networks:
- - infra-network
- datanode:
- image: flokkr/hadoop-hdfs-datanode:${HADOOP_VERSION:-3.0.0}
- links:
- - namenode
- env_file:
- - Profile
- networks:
- - infra-network
- inframanager:
- image: ambari-infra-manager:v1.0
- restart: always
- hostname: infra-manager.apache.org
- networks:
- - infra-network
- env_file:
- - Profile
- ports:
- - 61890:61890
- - 5007:5007
- environment:
- COMPONENT: infra-manager
- COMPONENT_LOG: infra-manager
- ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING}
- DISPLAY: $DOCKERIP:0
- volumes:
- - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/target/package:/root/ambari-infra-manager
- - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/docker/test-out:/root/archive
-networks:
- infra-network:
- driver: bridge
diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh
deleted file mode 100755
index 52719095c04..00000000000
--- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-sdir="`dirname \"$0\"`"
-: ${1:?"argument is missing: (start|stop)"}
-command="$1"
-
-function start_containers() {
- check_env_files
- kill_containers
- pushd $sdir/../
- local AMBARI_INFRA_MANAGER_LOCATION=$(pwd)
- echo $AMBARI_INFRA_MANAGER_LOCATION
- cd $AMBARI_INFRA_MANAGER_LOCATION/docker
- echo "Start containers ..."
- docker-compose up -d
- popd
- echo "Containers started"
-}
-
-function check_env_files() {
- local count=0;
-
- check_env_file .env setup_env
- count=$((count + $?));
- check_env_file Profile setup_profile
- count=$((count + $?));
-
- if [[ "$count" -gt 0 ]]
- then
- echo "Exit"
- exit;
- fi
-}
-
-function check_env_file() {
- if [ -f "$sdir/$1" ];
- then
- echo "$1 file exists"
- return 0;
- else
- echo "$1 file does not exist, Creating a new one..."
- $2
- echo "$1 file has been created. Check it out before starting Ambari Infra Manager. ($sdir/$1)"
- return 1;
- fi
-}
-
-function setup_env() {
- pushd $sdir/../../
- local AMBARI_LOCATION=$(pwd)
- popd
- local docker_ip=$(get_docker_ip)
- cat << EOF > $sdir/.env
-DOCKERIP=$docker_ip
-MAVEN_REPOSITORY_LOCATION=$HOME/.m2
-AMBARI_LOCATION=$AMBARI_LOCATION
-
-ZOOKEEPER_VERSION=3.4.10
-ZOOKEEPER_CONNECTION_STRING=zookeeper:2181
-
-SOLR_VERSION=7.4.0
-
-HADOOP_VERSION=3.0.0
-EOF
-}
-
-function get_docker_ip() {
- local ip=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}')
- echo $ip
-}
-
-function setup_profile() {
- cat << EOF > $sdir/Profile
-AWS_ACCESS_KEY_ID=test
-AWS_SECRET_ACCESS_KEY=test
-HADOOP_USER_NAME=root
-
-CORE-SITE.XML_fs.default.name=hdfs://namenode:9000
-CORE-SITE.XML_fs.defaultFS=hdfs://namenode:9000
-HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
-HDFS-SITE.XML_dfs.replication=1
-EOF
-}
-
-function kill_containers() {
- pushd $sdir/../
- local AMBARI_INFRA_MANAGER_LOCATION=$(pwd)
- echo "Try to remove containers if exists ..."
- echo $AMBARI_INFRA_MANAGER_LOCATION
- cd $AMBARI_INFRA_MANAGER_LOCATION/docker
- docker-compose rm -f -s inframanager
- docker-compose rm -f -s solr
- docker-compose rm -f -s zookeeper
- docker-compose rm -f -s fakes3
- docker-compose rm -f -s namenode
- docker-compose rm -f -s datanode
- popd
-}
-
-case $command in
- "start")
- start_containers
- ;;
- "stop")
- kill_containers
- ;;
- *)
- echo "Available commands: (start|stop)"
- ;;
-esac
diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
deleted file mode 100755
index 87d6b8aa795..00000000000
--- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-sdir="`dirname \"$0\"`"
-: ${1:?"argument is missing: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"}
-command="$1"
-
-function build_infra_manager_container() {
- pushd $sdir
- docker build -t ambari-infra-manager:v1.0 .
- popd
-}
-
-function build_infra_manager_project() {
- pushd $sdir/../
- mvn clean package -DskipTests
- popd
-}
-
-function kill_infra_manager_container() {
- echo "Try to remove infra manager container if exists ..."
- docker rm -f infra-manager
-}
-
-function start_infra_manager_container() {
- echo "Start infra manager container ..."
- pushd $sdir/../
- local AMBARI_INFRA_MANAGER_LOCATION=$(pwd)
- popd
- kill_infra_manager_container
- docker run -d --name infra-manager --hostname infra-manager.apache.org \
- -v $AMBARI_INFRA_MANAGER_LOCATION/target/package:/root/ambari-infra-manager -p 61890:61890 -p 5007:5007 \
- ambari-infra-manager:v1.0
- ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' logsearch)
- echo "Ambari Infra Manager container started on $ip_address (for Mac OSX route to boot2docker/docker-machine VM address, e.g.: 'sudo route add -net 172.17.0.0/16 192.168.59.103')"
- echo "You can follow Log Search logs with 'docker logs -f infra-manager' command"
-}
-
-case $command in
- "build-and-run")
- build_infra_manager_project
- build_infra_manager_container
- start_infra_manager_container
- ;;
- "build")
- build_infra_manager_project
- start_infra_manager_container
- ;;
- "build-docker-and-run")
- build_infra_manager_container
- start_infra_manager_container
- ;;
- "build-mvn-and-run")
- build_infra_manager_project
- build_infra_manager_container
- ;;
- "build-docker-only")
- build_infra_manager_container
- ;;
- "build-mvn-only")
- build_infra_manager_project
- ;;
- "start")
- start_infra_manager_container
- ;;
- "stop")
- kill_infra_manager_container
- ;;
- *)
- echo "Available commands: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"
- ;;
-esac
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml b/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml
deleted file mode 100644
index 6fad22df98e..00000000000
--- a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml
+++ /dev/null
@@ -1,784 +0,0 @@
----
-swagger: "2.0"
-info:
- description: "Manager component for Ambari Infra"
- version: "1.0.0"
- title: "Infra Manager REST API"
- license:
- name: "Apache 2.0"
- url: "http://www.apache.org/licenses/LICENSE-2.0.html"
-basePath: "/api/v1"
-tags:
-- name: "jobs"
-schemes:
-- "http"
-- "https"
-paths:
- /jobs:
- get:
- tags:
- - "jobs"
- summary: "Get all jobs"
- description: ""
- operationId: "getAllJobs"
- produces:
- - "application/json"
- parameters:
- - name: "page"
- in: "query"
- required: false
- type: "integer"
- default: 0
- format: "int32"
- - name: "size"
- in: "query"
- required: false
- type: "integer"
- default: 20
- format: "int32"
- responses:
- 200:
- description: "successful operation"
- schema:
- type: "array"
- items:
- $ref: "#/definitions/JobInfo"
- /jobs/executions:
- delete:
- tags:
- - "jobs"
- summary: "Stop all job executions."
- description: ""
- operationId: "stopAll"
- produces:
- - "application/json"
- parameters: []
- responses:
- 200:
- description: "successful operation"
- schema:
- type: "integer"
- format: "int32"
- /jobs/executions/{jobExecutionId}:
- get:
- tags:
- - "jobs"
- summary: "Get job and step details for job execution instance."
- description: ""
- operationId: "getExecutionInfo"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/JobExecutionDetailsResponse"
- delete:
- tags:
- - "jobs"
- summary: "Stop or abandon a running job execution."
- description: ""
- operationId: "stopOrAbandonJobExecution"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- - name: "operation"
- in: "query"
- required: true
- type: "string"
- enum:
- - "STOP"
- - "ABANDON"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/JobExecutionInfoResponse"
- /jobs/executions/{jobExecutionId}/context:
- get:
- tags:
- - "jobs"
- summary: "Get execution context for specific job."
- description: ""
- operationId: "getExecutionContextByJobExecId"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/ExecutionContextResponse"
- /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}:
- get:
- tags:
- - "jobs"
- summary: "Get step execution details."
- description: ""
- operationId: "getStepExecution"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- - name: "stepExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/StepExecutionInfoResponse"
- /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context:
- get:
- tags:
- - "jobs"
- summary: "Get the execution context of step execution."
- description: ""
- operationId: "getStepExecutionContext"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- - name: "stepExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/StepExecutionContextResponse"
- /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/progress:
- get:
- tags:
- - "jobs"
- summary: "Get progress of step execution."
- description: ""
- operationId: "getStepExecutionProgress"
- produces:
- - "application/json"
- parameters:
- - name: "jobExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- - name: "stepExecutionId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/StepExecutionProgressResponse"
- /jobs/info/names:
- get:
- tags:
- - "jobs"
- summary: "Get all job names"
- description: ""
- operationId: "getAllJobNames"
- produces:
- - "application/json"
- parameters: []
- responses:
- 200:
- description: "successful operation"
- schema:
- type: "array"
- uniqueItems: true
- items:
- type: "string"
- /jobs/{jobName}:
- post:
- tags:
- - "jobs"
- summary: "Start a new job instance by job name."
- description: ""
- operationId: "startJob"
- produces:
- - "application/json"
- parameters:
- - name: "jobName"
- in: "path"
- required: true
- type: "string"
- - name: "params"
- in: "query"
- required: false
- type: "string"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/JobExecutionInfoResponse"
- /jobs/{jobName}/executions:
- get:
- tags:
- - "jobs"
- summary: "Get the id values of all the running job instances."
- description: ""
- operationId: "getExecutionIdsByJobName"
- produces:
- - "application/json"
- parameters:
- - name: "jobName"
- in: "path"
- required: true
- type: "string"
- responses:
- 200:
- description: "successful operation"
- schema:
- type: "array"
- uniqueItems: true
- items:
- type: "integer"
- format: "int64"
- /jobs/{jobName}/info:
- get:
- tags:
- - "jobs"
- summary: "Get job details by job name."
- description: ""
- operationId: "getJobDetails"
- produces:
- - "application/json"
- parameters:
- - name: "page"
- in: "query"
- required: false
- type: "integer"
- default: 0
- format: "int32"
- - name: "size"
- in: "query"
- required: false
- type: "integer"
- default: 20
- format: "int32"
- - name: "jobName"
- in: "path"
- required: true
- type: "string"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/JobDetailsResponse"
- /jobs/{jobName}/{jobInstanceId}/executions:
- get:
- tags:
- - "jobs"
- summary: "Get execution for job instance."
- description: ""
- operationId: "getExecutionsForInstance"
- produces:
- - "application/json"
- parameters:
- - name: "jobName"
- in: "path"
- required: true
- type: "string"
- - name: "jobInstanceId"
- in: "path"
- required: true
- type: "integer"
- format: "int64"
- responses:
- 200:
- description: "successful operation"
- schema:
- type: "array"
- items:
- $ref: "#/definitions/JobExecutionInfoResponse"
- post:
- tags:
- - "jobs"
- summary: "Restart job instance."
- description: ""
- operationId: "restartJobInstance"
- produces:
- - "application/json"
- parameters:
- - in: "body"
- name: "body"
- required: false
- schema:
- $ref: "#/definitions/JobExecutionRestartRequest"
- responses:
- 200:
- description: "successful operation"
- schema:
- $ref: "#/definitions/JobExecutionInfoResponse"
-definitions:
- JobExecutionData:
- type: "object"
- properties:
- id:
- type: "integer"
- format: "int64"
- executionContext:
- $ref: "#/definitions/ExecutionContext"
- jobInstance:
- $ref: "#/definitions/JobInstance"
- jobId:
- type: "integer"
- format: "int64"
- jobParameters:
- $ref: "#/definitions/JobParameters"
- failureExceptions:
- type: "array"
- items:
- $ref: "#/definitions/Throwable"
- endTime:
- type: "string"
- format: "date-time"
- exitStatus:
- $ref: "#/definitions/ExitStatus"
- createTime:
- type: "string"
- format: "date-time"
- lastUpdated:
- type: "string"
- format: "date-time"
- jobConfigurationName:
- type: "string"
- startTime:
- type: "string"
- format: "date-time"
- status:
- type: "string"
- enum:
- - "COMPLETED"
- - "STARTING"
- - "STARTED"
- - "STOPPING"
- - "STOPPED"
- - "FAILED"
- - "ABANDONED"
- - "UNKNOWN"
- stepExecutionDataList:
- type: "array"
- items:
- $ref: "#/definitions/StepExecutionData"
- JobInstance:
- type: "object"
- properties:
- id:
- type: "integer"
- format: "int64"
- version:
- type: "integer"
- format: "int32"
- jobName:
- type: "string"
- instanceId:
- type: "integer"
- format: "int64"
- StepExecutionData:
- type: "object"
- properties:
- id:
- type: "integer"
- format: "int64"
- jobExecutionId:
- type: "integer"
- format: "int64"
- executionContext:
- $ref: "#/definitions/ExecutionContext"
- stepName:
- type: "string"
- terminateOnly:
- type: "boolean"
- default: false
- failureExceptions:
- type: "array"
- items:
- $ref: "#/definitions/Throwable"
- endTime:
- type: "string"
- format: "date-time"
- exitStatus:
- $ref: "#/definitions/ExitStatus"
- lastUpdated:
- type: "string"
- format: "date-time"
- commitCount:
- type: "integer"
- format: "int32"
- readCount:
- type: "integer"
- format: "int32"
- filterCount:
- type: "integer"
- format: "int32"
- writeCount:
- type: "integer"
- format: "int32"
- readSkipCount:
- type: "integer"
- format: "int32"
- writeSkipCount:
- type: "integer"
- format: "int32"
- processSkipCount:
- type: "integer"
- format: "int32"
- rollbackCount:
- type: "integer"
- format: "int32"
- startTime:
- type: "string"
- format: "date-time"
- status:
- type: "string"
- enum:
- - "COMPLETED"
- - "STARTING"
- - "STARTED"
- - "STOPPING"
- - "STOPPED"
- - "FAILED"
- - "ABANDONED"
- - "UNKNOWN"
- StackTraceElement:
- type: "object"
- properties:
- methodName:
- type: "string"
- fileName:
- type: "string"
- lineNumber:
- type: "integer"
- format: "int32"
- className:
- type: "string"
- nativeMethod:
- type: "boolean"
- default: false
- JobExecutionDetailsResponse:
- type: "object"
- properties:
- jobExecutionInfoResponse:
- $ref: "#/definitions/JobExecutionInfoResponse"
- stepExecutionInfoList:
- type: "array"
- items:
- $ref: "#/definitions/StepExecutionInfoResponse"
- StepExecutionContextResponse:
- type: "object"
- properties:
- executionContextMap:
- type: "object"
- additionalProperties:
- type: "object"
- jobExecutionId:
- type: "integer"
- format: "int64"
- stepExecutionId:
- type: "integer"
- format: "int64"
- stepName:
- type: "string"
- StepExecutionProgress:
- type: "object"
- properties:
- estimatedPercentCompleteMessage:
- $ref: "#/definitions/MessageSourceResolvable"
- estimatedPercentComplete:
- type: "number"
- format: "double"
- ExitStatus:
- type: "object"
- properties:
- exitCode:
- type: "string"
- exitDescription:
- type: "string"
- running:
- type: "boolean"
- default: false
- ExecutionContextResponse:
- type: "object"
- properties:
- jobExecutionId:
- type: "integer"
- format: "int64"
- executionContextMap:
- type: "object"
- additionalProperties:
- type: "object"
- StepExecutionHistory:
- type: "object"
- properties:
- stepName:
- type: "string"
- count:
- type: "integer"
- format: "int32"
- commitCount:
- $ref: "#/definitions/CumulativeHistory"
- rollbackCount:
- $ref: "#/definitions/CumulativeHistory"
- readCount:
- $ref: "#/definitions/CumulativeHistory"
- writeCount:
- $ref: "#/definitions/CumulativeHistory"
- filterCount:
- $ref: "#/definitions/CumulativeHistory"
- readSkipCount:
- $ref: "#/definitions/CumulativeHistory"
- writeSkipCount:
- $ref: "#/definitions/CumulativeHistory"
- processSkipCount:
- $ref: "#/definitions/CumulativeHistory"
- duration:
- $ref: "#/definitions/CumulativeHistory"
- durationPerRead:
- $ref: "#/definitions/CumulativeHistory"
- TimeZone:
- type: "object"
- properties:
- displayName:
- type: "string"
- id:
- type: "string"
- dstsavings:
- type: "integer"
- format: "int32"
- rawOffset:
- type: "integer"
- format: "int32"
- MessageSourceResolvable:
- type: "object"
- properties:
- arguments:
- type: "array"
- items:
- type: "object"
- codes:
- type: "array"
- items:
- type: "string"
- defaultMessage:
- type: "string"
- ExecutionContext:
- type: "object"
- properties:
- dirty:
- type: "boolean"
- default: false
- empty:
- type: "boolean"
- default: false
- StepExecutionInfoResponse:
- type: "object"
- properties:
- id:
- type: "integer"
- format: "int64"
- jobExecutionId:
- type: "integer"
- format: "int64"
- jobName:
- type: "string"
- name:
- type: "string"
- startDate:
- type: "string"
- startTime:
- type: "string"
- duration:
- type: "string"
- durationMillis:
- type: "integer"
- format: "int64"
- exitCode:
- type: "string"
- status:
- type: "string"
- JobExecutionInfoResponse:
- type: "object"
- properties:
- id:
- type: "integer"
- format: "int64"
- stepExecutionCount:
- type: "integer"
- format: "int32"
- jobId:
- type: "integer"
- format: "int64"
- jobName:
- type: "string"
- startDate:
- type: "string"
- startTime:
- type: "string"
- duration:
- type: "string"
- jobExecutionData:
- $ref: "#/definitions/JobExecutionData"
- jobParameters:
- type: "object"
- additionalProperties:
- type: "object"
- jobParametersString:
- type: "string"
- restartable:
- type: "boolean"
- default: false
- abandonable:
- type: "boolean"
- default: false
- stoppable:
- type: "boolean"
- default: false
- timeZone:
- $ref: "#/definitions/TimeZone"
- JobInfo:
- type: "object"
- properties:
- name:
- type: "string"
- executionCount:
- type: "integer"
- format: "int32"
- launchable:
- type: "boolean"
- default: false
- incrementable:
- type: "boolean"
- default: false
- jobInstanceId:
- type: "integer"
- format: "int64"
- JobExecutionRestartRequest:
- type: "object"
- properties:
- jobName:
- type: "string"
- jobInstanceId:
- type: "integer"
- format: "int64"
- operation:
- type: "string"
- enum:
- - "RESTART"
- Throwable:
- type: "object"
- properties:
- cause:
- $ref: "#/definitions/Throwable"
- stackTrace:
- type: "array"
- items:
- $ref: "#/definitions/StackTraceElement"
- message:
- type: "string"
- localizedMessage:
- type: "string"
- suppressed:
- type: "array"
- items:
- $ref: "#/definitions/Throwable"
- JobParameters:
- type: "object"
- properties:
- parameters:
- type: "object"
- additionalProperties:
- $ref: "#/definitions/JobParameter"
- empty:
- type: "boolean"
- default: false
- CumulativeHistory:
- type: "object"
- properties:
- count:
- type: "integer"
- format: "int32"
- min:
- type: "number"
- format: "double"
- max:
- type: "number"
- format: "double"
- standardDeviation:
- type: "number"
- format: "double"
- mean:
- type: "number"
- format: "double"
- JobInstanceDetailsResponse:
- type: "object"
- properties:
- jobInstance:
- $ref: "#/definitions/JobInstance"
- jobExecutionInfoResponseList:
- type: "array"
- items:
- $ref: "#/definitions/JobExecutionInfoResponse"
- JobParameter:
- type: "object"
- properties:
- identifying:
- type: "boolean"
- default: false
- value:
- type: "object"
- type:
- type: "string"
- enum:
- - "STRING"
- - "DATE"
- - "LONG"
- - "DOUBLE"
- StepExecutionProgressResponse:
- type: "object"
- properties:
- stepExecutionProgress:
- $ref: "#/definitions/StepExecutionProgress"
- stepExecutionHistory:
- $ref: "#/definitions/StepExecutionHistory"
- stepExecutionInfoResponse:
- $ref: "#/definitions/StepExecutionInfoResponse"
- JobDetailsResponse:
- type: "object"
- properties:
- jobInfo:
- $ref: "#/definitions/JobInfo"
- jobInstanceDetailsResponseList:
- type: "array"
- items:
- $ref: "#/definitions/JobInstanceDetailsResponse"
diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-1.png b/ambari-infra/ambari-infra-manager/docs/images/batch-1.png
deleted file mode 100644
index d763852cffe..00000000000
Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-1.png and /dev/null differ
diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-2.png b/ambari-infra/ambari-infra-manager/docs/images/batch-2.png
deleted file mode 100644
index 1de34795e86..00000000000
Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-2.png and /dev/null differ
diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-3.png b/ambari-infra/ambari-infra-manager/docs/images/batch-3.png
deleted file mode 100644
index 7f1123c7094..00000000000
Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-3.png and /dev/null differ
diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-4.png b/ambari-infra/ambari-infra-manager/docs/images/batch-4.png
deleted file mode 100644
index beb610ad94a..00000000000
Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-4.png and /dev/null differ
diff --git a/ambari-infra/ambari-infra-manager/pom.xml b/ambari-infra/ambari-infra-manager/pom.xml
deleted file mode 100644
index d25440f9967..00000000000
--- a/ambari-infra/ambari-infra-manager/pom.xml
+++ /dev/null
@@ -1,465 +0,0 @@
-
-
-
-
- ambari-infra
- org.apache.ambari
- 2.0.0.0-SNAPSHOT
-
- Ambari Infra Manager
- http://maven.apache.org
- 4.0.0
-
- ambari-infra-manager
-
-
- 4.3.17.RELEASE
- 4.2.4.RELEASE
- 2.2.0.RELEASE
- 2.25.1
- 9.4.11.v20180605
- 3.0.7.RELEASE
- 3.8.11.2
- 2.0.2.RELEASE
- 1.5.13.RELEASE
- 1.5.16
- 0.6.0
-
-
-
- ambari-infra-manager_${project.version}
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 3.0
-
- ${jdk.version}
- ${jdk.version}
-
-
-
- org.codehaus.mojo
- exec-maven-plugin
- 1.2.1
-
-
-
- java
-
-
-
-
- org.apache.ambari.infra.InfraManager
-
-
-
- org.springframework.boot
- spring-boot-maven-plugin
- ${spring-boot.version}
-
-
- org.apache.maven.plugins
- maven-dependency-plugin
- 2.8
-
-
- copy-dependencies
- package
-
- copy-dependencies
-
-
- true
- ${basedir}/target/libs
- false
- false
- true
-
-
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.7
-
-
- package
-
-
-
-
-
-
-
-
- run
-
-
-
-
-
-
-
-
-
- junit
- junit
- test
-
-
- org.easymock
- easymock
- 3.4
- test
-
-
- org.hamcrest
- hamcrest-all
- 1.3
- test
-
-
-
- org.springframework
- spring-beans
- ${spring.version}
-
-
- org.springframework
- spring-context
- ${spring.version}
-
-
- org.springframework
- spring-test
- ${spring.version}
-
-
-
- org.springframework.security
- spring-security-web
- ${spring.security.version}
-
-
- org.springframework.security
- spring-security-core
- ${spring.security.version}
-
-
- org.springframework.security
- spring-security-config
- ${spring.security.version}
-
-
- org.springframework.security
- spring-security-ldap
- ${spring.security.version}
-
-
-
- org.springframework.boot
- spring-boot-starter
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-logging
-
-
-
-
- org.springframework.boot
- spring-boot-starter-log4j2
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-web
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-security
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-actuator
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-jetty
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-jersey
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-freemarker
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-autoconfigure
- ${spring-boot.version}
-
-
- org.springframework.boot
- spring-boot-configuration-processor
- ${spring-boot.version}
-
-
- org.glassfish.jersey.media
- jersey-media-json-jettison
- ${jersey.version}
-
-
-
- org.apache.solr
- solr-solrj
- ${solr.version}
-
-
- org.apache.solr
- solr-core
- ${solr.version}
-
-
- *
- *
-
-
-
-
- org.apache.lucene
- lucene-core
- ${solr.version}
-
-
- org.apache.lucene
- lucene-analyzers-common
- ${solr.version}
-
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop.version}
-
-
- javax.servlet
- servlet-api
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- com.sun.jersey
- jetty-util
-
-
- com.sun.jersey
- jersey-core
-
-
- com.sun.jersey
- jersey-json
-
-
- com.sun.jersey
- jersey-server
-
-
- org.slf4j
- slf4j-log4j12
-
-
- org.eclipse.jetty
- jetty-server
-
-
- org.eclipse.jetty
- jetty-util
-
-
- org.eclipse.jetty
- jetty-servlet
-
-
- org.eclipse.jetty
- jetty-security
-
-
-
-
- org.apache.hadoop
- hadoop-hdfs-client
- ${hadoop.version}
-
-
- com.fasterxml.jackson.core
- jackson-annotations
-
-
-
-
- commons-io
- commons-io
- 2.4
-
-
- commons-cli
- commons-cli
-
-
- commons-codec
- commons-codec
-
-
- commons-lang
- commons-lang
-
-
- org.springframework.security.kerberos
- spring-security-kerberos-core
- 1.0.1.RELEASE
-
-
- org.springframework.security.kerberos
- spring-security-kerberos-web
- 1.0.1.RELEASE
-
-
- org.springframework.security.kerberos
- spring-security-kerberos-client
- 1.0.1.RELEASE
-
-
- com.thoughtworks.xstream
- xstream
- 1.4.10
-
-
- cglib
- cglib
- 3.2.4
-
-
- io.swagger
- swagger-annotations
- ${swagger.version}
-
-
- io.swagger
- swagger-core
- ${swagger.version}
-
-
- io.swagger
- swagger-jersey2-jaxrs
- ${swagger.version}
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
- io.swagger
- swagger-models
- ${swagger.version}
-
-
- org.webjars
- swagger-ui
- 2.2.2
-
-
- org.springframework
- spring-context-support
- ${spring.version}
-
-
- org.springframework.batch
- spring-batch-core
- ${spring-batch.version}
-
-
- org.springframework
- spring-jdbc
- ${spring.version}
-
-
- io.jsonwebtoken
- jjwt
- ${jjwt.version}
-
-
- org.xerial
- sqlite-jdbc
- ${sqlite.version}
-
-
- org.springframework.batch
- spring-batch-admin-manager
- 1.3.1.RELEASE
-
-
- org.slf4j
- slf4j-log4j12
-
-
-
-
- guava
- com.google.guava
- 20.0
-
-
- com.amazonaws
- aws-java-sdk-s3
- 1.11.5
-
-
- org.apache.commons
- commons-csv
- 1.5
-
-
- org.springframework.boot
- spring-boot-starter-tomcat
- ${spring-boot.version}
- provided
-
-
-
-
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
deleted file mode 100644
index 938cfd0152b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra;
-
-import org.springframework.boot.Banner;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration;
-import org.springframework.boot.autoconfigure.data.rest.RepositoryRestMvcAutoConfiguration;
-import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
-import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
-import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
-import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration;
-import org.springframework.boot.builder.SpringApplicationBuilder;
-import org.springframework.boot.system.ApplicationPidFileWriter;
-
-@SpringBootApplication(
- scanBasePackages = {"org.apache.ambari.infra"},
- exclude = {
- RepositoryRestMvcAutoConfiguration.class,
- WebMvcAutoConfiguration.class,
- BatchAutoConfiguration.class,
- SecurityAutoConfiguration.class,
- DataSourceAutoConfiguration.class,
- SolrAutoConfiguration.class
- }
-)
-public class InfraManager {
-
- public static void main(String[] args) {
- String pidFile = System.getenv("INFRA_MANAGER_PID_FILE") == null ? "infra-manager.pid" : System.getenv("INFRA_MANAGER_PID_FILE");
- new SpringApplicationBuilder(InfraManager.class)
- .bannerMode(Banner.Mode.OFF)
- .listeners(new ApplicationPidFileWriter(pidFile))
- .web(true)
- .run(args);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java
deleted file mode 100644
index 4c76742eff7..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import io.swagger.jaxrs.config.BeanConfig;
-import io.swagger.jaxrs.listing.ApiListingResource;
-import io.swagger.jaxrs.listing.SwaggerSerializers;
-import io.swagger.models.Info;
-import io.swagger.models.License;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class InfraManagerApiDocConfig {
-
- private static final String DESCRIPTION = "Manager component for Ambari Infra";
- private static final String VERSION = "1.0.0";
- private static final String TITLE = "Infra Manager REST API";
- private static final String LICENSE = "Apache 2.0";
- private static final String LICENSE_URL = "http://www.apache.org/licenses/LICENSE-2.0.html";
- private static final String RESOURCE_PACKAGE = "org.apache.ambari.infra.rest";
- private static final String BASE_PATH = "/api/v1";
-
- @Bean
- public ApiListingResource apiListingResource() {
- return new ApiListingResource();
- }
-
- @Bean
- public SwaggerSerializers swaggerSerializers() {
- return new SwaggerSerializers();
- }
-
- @Bean
- public BeanConfig swaggerConfig() {
- BeanConfig beanConfig = new BeanConfig();
- beanConfig.setSchemes(new String[]{"http", "https"});
- beanConfig.setBasePath(BASE_PATH);
- beanConfig.setTitle(TITLE);
- beanConfig.setDescription(DESCRIPTION);
- beanConfig.setLicense(LICENSE);
- beanConfig.setLicenseUrl(LICENSE_URL);
- beanConfig.setScan(true);
- beanConfig.setVersion(VERSION);
- beanConfig.setResourcePackage(RESOURCE_PACKAGE);
-
- License license = new License();
- license.setName(LICENSE);
- license.setUrl(LICENSE_URL);
-
- Info info = new Info();
- info.setDescription(DESCRIPTION);
- info.setTitle(TITLE);
- info.setVersion(VERSION);
- info.setLicense(license);
- beanConfig.setInfo(info);
- return beanConfig;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
deleted file mode 100644
index 86059a24a59..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.ComponentScan;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
-
-@Configuration
-@ComponentScan("org.apache.ambari.infra")
-@PropertySource(value = {"classpath:infra-manager.properties"})
-public class InfraManagerConfig {
-
- @Bean
- public static PropertySourcesPlaceholderConfigurer propertyConfigurer() {
- return new PropertySourcesPlaceholderConfigurer();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java
deleted file mode 100644
index b5b215e31ee..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class InfraManagerDataConfig {
-
- @Value("${infra-manager.server.data.folder:/opt/ambari-infra-manager/data}")
- private String dataFolder;
-
- public String getDataFolder() {
- return dataFolder;
- }
-
- public void setDataFolder(String dataFolder) {
- this.dataFolder = dataFolder;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java
deleted file mode 100644
index 3a4c00f8656..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.apache.ambari.infra.rest.JobResource;
-import org.glassfish.jersey.jackson.JacksonFeature;
-import org.glassfish.jersey.server.ResourceConfig;
-import org.glassfish.jersey.servlet.ServletProperties;
-
-import javax.ws.rs.ApplicationPath;
-
-@ApplicationPath("/api/v1")
-public class InfraManagerJerseyResourceConfig extends ResourceConfig {
-
- public InfraManagerJerseyResourceConfig() {
- packages(JobResource.class.getPackage().getName());
- register(JacksonFeature.class);
- property(ServletProperties.FILTER_FORWARD_ON_404, true);
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java
deleted file mode 100644
index bb495a20691..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
-
-@Configuration
-public class InfraManagerSchedulingConfig {
- @Bean
- public TaskScheduler taskScheduler() {
- return new ThreadPoolTaskScheduler();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java
deleted file mode 100644
index 06aea79b698..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.glassfish.jersey.servlet.ServletContainer;
-import org.glassfish.jersey.servlet.ServletProperties;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.autoconfigure.web.ServerProperties;
-import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
-import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainer;
-import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
-import org.springframework.boot.web.servlet.ServletRegistrationBean;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import javax.inject.Inject;
-
-@Configuration
-public class InfraManagerServletConfig {
-
- private static final Integer SESSION_TIMEOUT = 60 * 30;
- private static final String INFRA_MANAGER_SESSIONID = "INFRAMANAGER_SESSIONID";
- private static final String INFRA_MANAGER_APPLICATION_NAME = "infra-manager";
-
- @Value("${infra-manager.server.port:61890}")
- private int port;
-
- @Inject
- private ServerProperties serverProperties;
-
- @Inject
- private InfraManagerDataConfig infraManagerDataConfig;
-
-
- @Bean
- public ServletRegistrationBean jerseyServlet() {
- ServletRegistrationBean jerseyServletBean = new ServletRegistrationBean(new ServletContainer(), "/api/v1/*");
- jerseyServletBean.addInitParameter(ServletProperties.JAXRS_APPLICATION_CLASS, InfraManagerJerseyResourceConfig.class.getName());
- return jerseyServletBean;
- }
-
- @Bean
- public ServletRegistrationBean dataServlet() {
- ServletRegistrationBean dataServletBean = new ServletRegistrationBean(new DefaultServlet(), "/files/*");
- dataServletBean.addInitParameter("dirAllowed","true");
- dataServletBean.addInitParameter("pathInfoOnly","true");
- dataServletBean.addInitParameter("resourceBase", infraManagerDataConfig.getDataFolder());
- return dataServletBean;
- }
-
- @Bean
- public EmbeddedServletContainerFactory containerFactory() {
- final JettyEmbeddedServletContainerFactory jettyEmbeddedServletContainerFactory = new JettyEmbeddedServletContainerFactory() {
- @Override
- protected JettyEmbeddedServletContainer getJettyEmbeddedServletContainer(Server server) {
- return new JettyEmbeddedServletContainer(server);
- }
- };
- jettyEmbeddedServletContainerFactory.setSessionTimeout(SESSION_TIMEOUT);
- serverProperties.getSession().getCookie().setName(INFRA_MANAGER_SESSIONID);
- serverProperties.setDisplayName(INFRA_MANAGER_APPLICATION_NAME);
- jettyEmbeddedServletContainerFactory.setPort(port);
- return jettyEmbeddedServletContainerFactory;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java
deleted file mode 100644
index f0cd3cf3ece..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf;
-
-import org.springframework.context.annotation.Configuration;
-import org.springframework.web.servlet.config.annotation.EnableWebMvc;
-import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
-import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
-import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
-
-@EnableWebMvc
-@Configuration
-public class StaticResourceConfiguration extends WebMvcConfigurerAdapter {
-
- private static final String[] CLASSPATH_RESOURCE_LOCATIONS = {
- "classpath:/static/", "classpath:/swagger/","classpath:META-INF/resources/webjars/"
- };
-
- @Override
- public void addResourceHandlers(ResourceHandlerRegistry registry) {
- registry.addResourceHandler("/**")
- .addResourceLocations(CLASSPATH_RESOURCE_LOCATIONS);
- }
-
- @Override
- public void addViewControllers(ViewControllerRegistry registry) {
- registry.addViewController("/").setViewName(
- "forward:/index.html");
- registry.addViewController("/docs").setViewName(
- "forward:/swagger.html");
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
deleted file mode 100644
index 706ed8b4f11..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.batch;
-
-import org.springframework.batch.admin.service.JdbcSearchableJobExecutionDao;
-import org.springframework.batch.admin.service.JdbcSearchableJobInstanceDao;
-import org.springframework.batch.admin.service.JdbcSearchableStepExecutionDao;
-import org.springframework.batch.admin.service.JobService;
-import org.springframework.batch.admin.service.SearchableJobExecutionDao;
-import org.springframework.batch.admin.service.SearchableJobInstanceDao;
-import org.springframework.batch.admin.service.SearchableStepExecutionDao;
-import org.springframework.batch.admin.service.SimpleJobService;
-import org.springframework.batch.core.configuration.JobRegistry;
-import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
-import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
-import org.springframework.batch.core.explore.JobExplorer;
-import org.springframework.batch.core.explore.support.JobExplorerFactoryBean;
-import org.springframework.batch.core.launch.JobLauncher;
-import org.springframework.batch.core.launch.JobOperator;
-import org.springframework.batch.core.launch.support.SimpleJobLauncher;
-import org.springframework.batch.core.launch.support.SimpleJobOperator;
-import org.springframework.batch.core.repository.ExecutionContextSerializer;
-import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.repository.dao.ExecutionContextDao;
-import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer;
-import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao;
-import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
-import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.core.io.Resource;
-import org.springframework.core.task.SimpleAsyncTaskExecutor;
-import org.springframework.jdbc.core.JdbcTemplate;
-import org.springframework.jdbc.datasource.DriverManagerDataSource;
-import org.springframework.jdbc.datasource.init.DataSourceInitializer;
-import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
-import org.springframework.scheduling.annotation.EnableAsync;
-import org.springframework.scheduling.annotation.EnableScheduling;
-import org.springframework.transaction.PlatformTransactionManager;
-
-import javax.inject.Inject;
-import javax.sql.DataSource;
-
-@Configuration
-@EnableBatchProcessing
-@EnableScheduling
-@EnableAsync
-public class InfraManagerBatchConfig {
-
- @Value("classpath:org/springframework/batch/core/schema-drop-sqlite.sql")
- private Resource dropRepositoryTables;
-
- @Value("classpath:org/springframework/batch/core/schema-sqlite.sql")
- private Resource dataRepositorySchema;
-
- @Value("${infra-manager.batch.db.init:false}")
- private boolean dropDatabaseOnStartup;
-
- @Value("${infra-manager.batch.db.file:/etc/ambari-inra-manager/conf/repository.db}")
- private String sqliteDbFileLocation;
-
- @Value("${infra-manager.batch.db.username}")
- private String databaseUsername;
-
- @Value("${infra-manager.batch.db.password}")
- private String databasePassword;
-
- @Inject
- private JobRegistry jobRegistry;
-
- @Bean
- public DataSource dataSource() {
- DriverManagerDataSource dataSource = new DriverManagerDataSource();
- dataSource.setDriverClassName("org.sqlite.JDBC");
- dataSource.setUrl("jdbc:sqlite:" + sqliteDbFileLocation);
- dataSource.setUsername(databaseUsername);
- dataSource.setPassword(databasePassword);
- return dataSource;
- }
-
- @Bean
- public DataSourceInitializer dataSourceInitializer() {
- ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
- if (dropDatabaseOnStartup) {
- databasePopulator.addScript(dropRepositoryTables);
- databasePopulator.setIgnoreFailedDrops(true);
- }
- databasePopulator.addScript(dataRepositorySchema);
- databasePopulator.setContinueOnError(true);
-
- DataSourceInitializer initializer = new DataSourceInitializer();
- initializer.setDataSource(dataSource());
- initializer.setDatabasePopulator(databasePopulator);
-
- return initializer;
- }
-
- @Bean
- public ExecutionContextSerializer executionContextSerializer() {
- return new Jackson2ExecutionContextStringSerializer();
- }
-
- @Bean
- public JobRepository jobRepository() throws Exception {
- JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
- factory.setDataSource(dataSource());
- factory.setTransactionManager(transactionManager());
- factory.setSerializer(executionContextSerializer());
- factory.afterPropertiesSet();
- return factory.getObject();
- }
-
- @Bean
- public PlatformTransactionManager transactionManager() {
- return new ResourcelessTransactionManager();
- }
-
- @Bean(name = "jobLauncher")
- public JobLauncher jobLauncher() throws Exception {
- SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
- jobLauncher.setJobRepository(jobRepository());
- jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
- jobLauncher.afterPropertiesSet();
- return jobLauncher;
- }
-
- @Bean
- public JobOperator jobOperator() throws Exception {
- SimpleJobOperator jobOperator = new SimpleJobOperator();
- jobOperator.setJobExplorer(jobExplorer());
- jobOperator.setJobLauncher(jobLauncher());
- jobOperator.setJobRegistry(jobRegistry);
- jobOperator.setJobRepository(jobRepository());
- return jobOperator;
- }
-
- @Bean
- public JobExplorer jobExplorer() throws Exception {
- JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean();
- factoryBean.setSerializer(executionContextSerializer());
- factoryBean.setDataSource(dataSource());
- factoryBean.afterPropertiesSet();
- return factoryBean.getObject();
- }
-
- @Bean
- public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor() {
- JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
- jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
- return jobRegistryBeanPostProcessor;
- }
-
- @Bean
- public JdbcTemplate jdbcTemplate() {
- return new JdbcTemplate(dataSource());
- }
-
- @Bean
- public SearchableJobInstanceDao searchableJobInstanceDao() {
- JdbcSearchableJobInstanceDao dao = new JdbcSearchableJobInstanceDao();
- dao.setJdbcTemplate(jdbcTemplate());
- return dao;
- }
-
- @Bean
- public SearchableJobExecutionDao searchableJobExecutionDao() {
- JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao();
- dao.setJdbcTemplate(jdbcTemplate());
- dao.setDataSource(dataSource());
- return dao;
- }
-
- @Bean
- public SearchableStepExecutionDao searchableStepExecutionDao() {
- JdbcSearchableStepExecutionDao dao = new JdbcSearchableStepExecutionDao();
- dao.setDataSource(dataSource());
- dao.setJdbcTemplate(jdbcTemplate());
- return dao;
- }
-
- @Bean
- public ExecutionContextDao executionContextDao() {
- JdbcExecutionContextDao dao = new JdbcExecutionContextDao();
- dao.setSerializer(executionContextSerializer());
- dao.setJdbcTemplate(jdbcTemplate());
- return dao;
- }
-
- @Bean
- public JobService jobService() throws Exception {
- return new
- SimpleJobService(searchableJobInstanceDao(), searchableJobExecutionDao(), searchableStepExecutionDao(),
- jobRepository(), jobLauncher(), jobRegistry, executionContextDao());
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
deleted file mode 100644
index 6d32963ecc3..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-import java.util.Optional;
-
-public class CompositePasswordStore implements PasswordStore {
- private PasswordStore[] passwordStores;
-
- public CompositePasswordStore(PasswordStore... passwordStores) {
- this.passwordStores = passwordStores;
- }
-
- @Override
- public Optional getPassword(String propertyName) {
- for (PasswordStore passwordStore : passwordStores) {
- Optional optionalPassword = passwordStore.getPassword(propertyName);
- if (optionalPassword.isPresent())
- return optionalPassword;
- }
- return Optional.empty();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
deleted file mode 100644
index 9e1a17f8a06..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Optional;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.commons.lang3.ArrayUtils.isNotEmpty;
-
-public class HadoopCredentialStore implements PasswordStore {
- private static final Logger LOG = LoggerFactory.getLogger(InfraManagerSecurityConfig.class);
- public static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path";
-
- private final String credentialStoreProviderPath;
-
- public HadoopCredentialStore(String credentialStoreProviderPath) {
- this.credentialStoreProviderPath = credentialStoreProviderPath;
- }
-
- @Override
- public Optional getPassword(String propertyName) {
- try {
- if (isBlank(credentialStoreProviderPath)) {
- return Optional.empty();
- }
-
- org.apache.hadoop.conf.Configuration config = new org.apache.hadoop.conf.Configuration();
- config.set(CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY, credentialStoreProviderPath);
- char[] passwordChars = config.getPassword(propertyName);
- return (isNotEmpty(passwordChars)) ? Optional.of(new String(passwordChars)) : Optional.empty();
- } catch (Exception e) {
- LOG.warn("Could not load password {} from credential store.", propertyName);
- return Optional.empty();
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
deleted file mode 100644
index 45b79b36695..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import static org.apache.ambari.infra.conf.security.HadoopCredentialStore.CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY;
-
-@Configuration
-public class InfraManagerSecurityConfig {
-
- @Value("${"+ CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY + ":}")
- private String credentialStoreProviderPath;
-
-
- @Bean
- public PasswordStore passwords() {
- return new CompositePasswordStore(new HadoopCredentialStore(credentialStoreProviderPath), new SecurityEnvironment());
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java
deleted file mode 100644
index 19848feac86..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-import java.util.Optional;
-
-public interface PasswordStore {
- Optional getPassword(String propertyName);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
deleted file mode 100644
index 8e3387b4f51..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.conf.security;
-
-import java.util.Optional;
-
-public class SecurityEnvironment implements PasswordStore {
- @Override
- public Optional getPassword(String propertyName) {
- return Optional.ofNullable(System.getenv(propertyName));
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
deleted file mode 100644
index e536d9a39e8..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.doc;
-
-import io.swagger.jaxrs.config.BeanConfig;
-import io.swagger.models.Swagger;
-import io.swagger.util.Yaml;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-@Named
-public class InfraManagerApiDocStorage {
-
- private static final Logger LOG = LoggerFactory.getLogger(InfraManagerApiDocStorage.class);
-
- private final Map swaggerMap = new ConcurrentHashMap<>();
-
- @Inject
- private BeanConfig beanConfig;
-
- @PostConstruct
- private void postConstruct() {
- Thread loadApiDocThread = new Thread("load_swagger_api_doc") {
- @Override
- public void run() {
- LOG.info("Start thread to scan REST API doc from endpoints.");
- Swagger swagger = beanConfig.getSwagger();
- beanConfig.configure(swagger);
- beanConfig.scanAndRead();
- setSwagger(swagger);
- try {
- if (swagger != null) {
- String yaml = Yaml.mapper().writeValueAsString(swagger);
- StringBuilder b = new StringBuilder();
- String[] parts = yaml.split("\n");
- for (String part : parts) {
- b.append(part);
- b.append("\n");
- }
- setSwaggerYaml(b.toString());
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- LOG.info("Scanning REST API endpoints and generating docs has been successful.");
- }
- };
- loadApiDocThread.setDaemon(true);
- loadApiDocThread.start();
- }
-
- public Swagger getSwagger() {
- return (Swagger) swaggerMap.get("swaggerObject");
- }
-
- public void setSwagger(final Swagger swagger) {
- swaggerMap.put("swaggerObject", swagger);
- }
-
- public void setSwaggerYaml(final String swaggerYaml) {
- swaggerMap.put("swaggerYaml", swaggerYaml);
- }
-
- public String getSwaggerYaml() {
- return (String) swaggerMap.get("swaggerYaml");
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
deleted file mode 100644
index 02a688560db..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
-import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
-import org.springframework.batch.core.job.builder.JobBuilder;
-import org.springframework.boot.context.event.ApplicationReadyEvent;
-import org.springframework.context.event.EventListener;
-
-import javax.annotation.PostConstruct;
-import java.util.Map;
-
-public abstract class AbstractJobsConfiguration> {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractJobsConfiguration.class);
-
- private final Map propertyMap;
- private final JobScheduler scheduler;
- private final JobBuilderFactory jobs;
- private final JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor;
-
- protected AbstractJobsConfiguration(Map propertyMap, JobScheduler scheduler, JobBuilderFactory jobs, JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) {
- this.propertyMap = propertyMap;
- this.scheduler = scheduler;
- this.jobs = jobs;
- this.jobRegistryBeanPostProcessor = jobRegistryBeanPostProcessor;
- }
-
- @PostConstruct
- public void registerJobs() {
- if (propertyMap == null)
- return;
-
- propertyMap.keySet().stream()
- .filter(key -> propertyMap.get(key).isEnabled())
- .forEach(jobName -> {
- try {
- propertyMap.get(jobName).validate(jobName);
- LOG.info("Registering job {}", jobName);
- JobBuilder jobBuilder = jobs.get(jobName).listener(new JobsPropertyMap<>(propertyMap));
- Job job = buildJob(jobBuilder);
- jobRegistryBeanPostProcessor.postProcessAfterInitialization(job, jobName);
- }
- catch (Exception e) {
- LOG.warn("Unable to register job " + jobName, e);
- propertyMap.get(jobName).setEnabled(false);
- }
- });
- }
-
- @EventListener(ApplicationReadyEvent.class)
- public void scheduleJobs() {
- if (propertyMap == null)
- return;
-
- propertyMap.keySet().stream()
- .filter(key -> propertyMap.get(key).isEnabled())
- .forEach(jobName -> propertyMap.get(jobName).scheduling().ifPresent(
- schedulingProperties -> scheduler.schedule(jobName, schedulingProperties)));
- }
-
- protected abstract Job buildJob(JobBuilder jobBuilder);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java
deleted file mode 100644
index 5fa29b00992..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import java.util.Iterator;
-
-public interface CloseableIterator extends Iterator, AutoCloseable {
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java
deleted file mode 100644
index 8c16daac0ef..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-public class JobConfigurationException extends RuntimeException {
- public JobConfigurationException(String message, Exception ex) {
- super(message, ex);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java
deleted file mode 100644
index eb7f7172897..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.springframework.batch.core.StepExecution;
-
-public interface JobContextRepository {
- StepExecution getStepExecution(Long jobExecutionId, Long id);
- void updateExecutionContext(StepExecution stepExecution);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java
deleted file mode 100644
index fbb256f8f86..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.springframework.batch.admin.service.JobService;
-import org.springframework.batch.admin.service.NoSuchStepExecutionException;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.repository.JobRepository;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-
-@Named
-public class JobContextRepositoryImpl implements JobContextRepository {
-
- @Inject
- private JobRepository jobRepository;
- @Inject
- private JobService jobService;
-
-
- @Override
- public StepExecution getStepExecution(Long jobExecutionId, Long id) {
- try {
- return jobService.getStepExecution(jobExecutionId, id);
- } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void updateExecutionContext(StepExecution stepExecution) {
- jobRepository.updateExecutionContext(stepExecution);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java
deleted file mode 100644
index 79406d017e5..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.springframework.batch.core.JobParameters;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.util.Optional;
-
-public abstract class JobProperties> {
-
- private SchedulingProperties scheduling;
- private final Class clazz;
- private boolean enabled;
-
- protected JobProperties(Class clazz) {
- this.clazz = clazz;
- }
-
- public SchedulingProperties getScheduling() {
- return scheduling;
- }
-
- public Optional scheduling() {
- if (scheduling != null && scheduling.isEnabled())
- return Optional.of(scheduling);
- return Optional.empty();
- }
-
- public void setScheduling(SchedulingProperties scheduling) {
- this.scheduling = scheduling;
- }
-
- public T deepCopy() {
- try {
- ObjectMapper objectMapper = new ObjectMapper();
- String json = objectMapper.writeValueAsString(this);
- return objectMapper.readValue(json, clazz);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- public abstract void apply(JobParameters jobParameters);
-
- public abstract void validate();
-
- public void validate(String jobName) {
- try {
- validate();
- }
- catch (Exception ex) {
- throw new JobConfigurationException(String.format("Configuration of job %s is invalid: %s!", jobName, ex.getMessage()), ex);
- }
- }
-
- public boolean isEnabled() {
- return enabled;
- }
-
- public void setEnabled(boolean enabled) {
- this.enabled = enabled;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
deleted file mode 100644
index 324c0b36a89..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.apache.ambari.infra.manager.Jobs;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobParametersBuilder;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.launch.NoSuchJobException;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
-import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
-import org.springframework.batch.core.repository.JobRestartException;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.time.Duration;
-import java.time.OffsetDateTime;
-
-import static org.apache.ambari.infra.job.archive.FileNameSuffixFormatter.SOLR_DATETIME_FORMATTER;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-@Named
-public class JobScheduler {
- private static final Logger LOG = LoggerFactory.getLogger(JobScheduler.class);
-
- private final TaskScheduler scheduler;
- private final Jobs jobs;
-
- @Inject
- public JobScheduler(TaskScheduler scheduler, Jobs jobs) {
- this.scheduler = scheduler;
- this.jobs = jobs;
- }
-
- public void schedule(String jobName, SchedulingProperties schedulingProperties) {
- try {
- jobs.lastRun(jobName).ifPresent(this::restartIfFailed);
- } catch (NoSuchJobException | NoSuchJobExecutionException e) {
- throw new RuntimeException(e);
- }
-
- scheduler.schedule(() -> launchJob(jobName, schedulingProperties.getIntervalEndDelta()), new CronTrigger(schedulingProperties.getCron()));
- LOG.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron());
- }
-
- private void restartIfFailed(JobExecution jobExecution) {
- if (jobExecution.getExitStatus() == ExitStatus.FAILED) {
- try {
- jobs.restart(jobExecution.getId());
- } catch (JobInstanceAlreadyCompleteException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | NoSuchJobExecutionException e) {
- throw new RuntimeException(e);
- }
- }
- }
-
- private void launchJob(String jobName, String endDelta) {
- try {
- JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
- if (!isBlank(endDelta))
- jobParametersBuilder.addString("end", SOLR_DATETIME_FORMATTER.format(OffsetDateTime.now().minus(Duration.parse(endDelta))));
-
- jobs.launchJob(jobName, jobParametersBuilder.toJobParameters());
- } catch (JobParametersInvalidException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
deleted file mode 100644
index 094e7978568..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobExecutionListener;
-
-import java.util.Map;
-
-public class JobsPropertyMap> implements JobExecutionListener {
-
- private final Map propertyMap;
-
- public JobsPropertyMap(Map propertyMap) {
- this.propertyMap = propertyMap;
- }
-
- @Override
- public void beforeJob(JobExecution jobExecution) {
- try {
- String jobName = jobExecution.getJobInstance().getJobName();
- T defaultProperties = propertyMap.get(jobName);
- if (defaultProperties == null)
- throw new UnsupportedOperationException("Properties not found for job " + jobName);
-
- T properties = defaultProperties.deepCopy();
- properties.apply(jobExecution.getJobParameters());
- properties.validate(jobName);
- jobExecution.getExecutionContext().put("jobProperties", properties);
- }
- catch (UnsupportedOperationException | IllegalArgumentException ex) {
- jobExecution.stop();
- jobExecution.setExitStatus(new ExitStatus(ExitStatus.FAILED.getExitCode(), ex.getMessage()));
- throw ex;
- }
- }
-
- @Override
- public void afterJob(JobExecution jobExecution) {
-
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java
deleted file mode 100644
index 98a1e0d55c1..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-public interface ObjectSource {
- CloseableIterator open(T current, int rows);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java
deleted file mode 100644
index af81b4faedd..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-public class SchedulingProperties {
- private boolean enabled = false;
- private String cron;
- private String intervalEndDelta;
-
- public boolean isEnabled() {
- return enabled;
- }
-
- public void setEnabled(boolean enabled) {
- this.enabled = enabled;
- }
-
- public String getCron() {
- return cron;
- }
-
- public void setCron(String cron) {
- this.cron = cron;
- }
-
- public String getIntervalEndDelta() {
- return intervalEndDelta;
- }
-
- public void setIntervalEndDelta(String intervalEndDelta) {
- this.intervalEndDelta = intervalEndDelta;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
deleted file mode 100644
index 3ac5b058806..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job;
-
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-
-public abstract class SolrDAOBase {
- private static final Logger LOG = LoggerFactory.getLogger(SolrDAOBase.class);
-
- private final String zooKeeperConnectionString;
- private final String defaultCollection;
-
- protected SolrDAOBase(String zooKeeperConnectionString, String defaultCollection) {
- this.zooKeeperConnectionString = zooKeeperConnectionString;
- this.defaultCollection = defaultCollection;
- }
-
- protected void delete(String deleteQueryText) {
- try (CloudSolrClient client = createClient()) {
- try {
- LOG.info("Executing solr delete by query {}", deleteQueryText);
- client.deleteByQuery(deleteQueryText);
- client.commit();
- } catch (Exception e) {
- try {
- client.rollback();
- } catch (SolrServerException e1) {
- LOG.warn("Unable to rollback after solr delete operation failure.", e1);
- }
- throw new RuntimeException(e);
- }
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- protected CloudSolrClient createClient() {
- CloudSolrClient client = new CloudSolrClient.Builder().withZkHost(zooKeeperConnectionString).build();
- client.setDefaultCollection(defaultCollection);
- return client;
- }
-}
-
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
deleted file mode 100644
index 3df18b69b0f..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-public abstract class AbstractFileAction implements FileAction {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractFileAction.class);
-
- @Override
- public File perform(File inputFile) {
- File outputFile = onPerform(inputFile);
- if (!inputFile.delete())
- LOG.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists());
- return outputFile;
- }
-
- protected abstract File onPerform(File inputFile);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java
deleted file mode 100644
index 99bc6d97d04..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-import static java.util.Arrays.asList;
-
-public class CompositeFileAction implements FileAction {
-
- private final List actions;
-
- public CompositeFileAction(FileAction... actions) {
- this.actions = new ArrayList<>(asList(actions));
- }
-
- public void add(FileAction action) {
- actions.add(action);
- }
-
- @Override
- public File perform(File inputFile) {
- File file = inputFile;
- for (FileAction action : actions) {
- file = action.perform(file);
- }
- return file;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
deleted file mode 100644
index 5ff9587a89e..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static java.util.Collections.unmodifiableMap;
-
-public class Document {
- private final Map fieldMap;
-
- private Document() {
- fieldMap = new HashMap<>();
- }
-
- public Document(Map fieldMap) {
- this.fieldMap = unmodifiableMap(fieldMap);
- }
-
- public String get(String key) {
- return fieldMap.get(key);
- }
-
- @JsonAnyGetter
- public Map getFieldMap() {
- return fieldMap;
- }
-
- @JsonAnySetter
- private void put(String key, String value) {
- fieldMap.put(key, value);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
deleted file mode 100644
index 8358dd08a54..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.apache.ambari.infra.job.AbstractJobsConfiguration;
-import org.apache.ambari.infra.job.JobContextRepository;
-import org.apache.ambari.infra.job.JobScheduler;
-import org.apache.ambari.infra.job.ObjectSource;
-import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
-import org.springframework.batch.core.configuration.annotation.JobScope;
-import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
-import org.springframework.batch.core.configuration.annotation.StepScope;
-import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
-import org.springframework.batch.core.job.builder.JobBuilder;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import javax.inject.Inject;
-import java.io.File;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-@Configuration
-public class DocumentArchivingConfiguration extends AbstractJobsConfiguration {
- private static final Logger LOG = LoggerFactory.getLogger(DocumentArchivingConfiguration.class);
- private static final DocumentWiper NOT_DELETE = (firstDocument, lastDocument) -> { };
-
- private final StepBuilderFactory steps;
- private final Step exportStep;
-
- @Inject
- public DocumentArchivingConfiguration(
- DocumentArchivingPropertyMap jobsPropertyMap,
- JobScheduler scheduler,
- StepBuilderFactory steps,
- JobBuilderFactory jobs,
- @Qualifier("exportStep") Step exportStep,
- JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) {
- super(jobsPropertyMap.getSolrDataArchiving(), scheduler, jobs, jobRegistryBeanPostProcessor);
- this.exportStep = exportStep;
- this.steps = steps;
- }
-
- @Override
- protected Job buildJob(JobBuilder jobBuilder) {
- return jobBuilder.start(exportStep).build();
- }
-
- @Bean
- @JobScope
- public Step exportStep(DocumentExporter documentExporter) {
- return steps.get("export")
- .tasklet(documentExporter)
- .build();
- }
-
- @Bean
- @StepScope
- public DocumentExporter documentExporter(DocumentItemReader documentItemReader,
- @Value("#{stepExecution.jobExecution.jobId}") String jobId,
- @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties,
- InfraManagerDataConfig infraManagerDataConfig,
- @Value("#{jobParameters[end]}") String intervalEnd,
- DocumentWiper documentWiper,
- JobContextRepository jobContextRepository,
- PasswordStore passwordStore) {
-
- File baseDir = new File(infraManagerDataConfig.getDataFolder(), "exporting");
- CompositeFileAction fileAction = new CompositeFileAction(new TarGzCompressor());
- switch (properties.getDestination()) {
- case S3:
- fileAction.add(new S3Uploader(
- properties.s3Properties().orElseThrow(() -> new IllegalStateException("S3 properties are not provided!")),
- passwordStore));
- break;
- case HDFS:
- org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
- conf.set("fs.defaultFS", properties.getHdfsEndpoint());
- fileAction.add(new HdfsUploader(conf, new Path(properties.getHdfsDestinationDirectory())));
- break;
- case LOCAL:
- baseDir = new File(properties.getLocalDestinationDirectory());
- break;
- }
-
- FileNameSuffixFormatter fileNameSuffixFormatter = FileNameSuffixFormatter.from(properties);
- LocalItemWriterListener itemWriterListener = new LocalItemWriterListener(fileAction, documentWiper);
- File destinationDirectory = new File(
- baseDir,
- String.format("%s_%s_%s",
- properties.getSolr().getCollection(),
- jobId,
- isBlank(intervalEnd) ? "" : fileNameSuffixFormatter.format(intervalEnd)));
- LOG.info("Destination directory path={}", destinationDirectory);
- if (!destinationDirectory.exists()) {
- if (!destinationDirectory.mkdirs()) {
- LOG.warn("Unable to create directory {}", destinationDirectory);
- }
- }
-
- return new DocumentExporter(
- documentItemReader,
- firstDocument -> new LocalDocumentItemWriter(
- outFile(properties.getSolr().getCollection(), destinationDirectory, fileNameSuffixFormatter.format(firstDocument)), itemWriterListener),
- properties.getWriteBlockSize(), jobContextRepository);
- }
-
- @Bean
- @StepScope
- public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties,
- SolrDAO solrDAO) {
- if (isBlank(properties.getSolr().getDeleteQueryText()))
- return NOT_DELETE;
- return solrDAO;
- }
-
- @Bean
- @StepScope
- public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) {
- return new SolrDAO(properties.getSolr());
- }
-
- private File outFile(String collection, File directoryPath, String suffix) {
- File file = new File(directoryPath, String.format("%s_-_%s.json", collection, suffix));
- LOG.info("Exporting to temp file {}", file.getAbsolutePath());
- return file;
- }
-
- @Bean
- @StepScope
- public DocumentItemReader reader(ObjectSource documentSource,
- @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) {
- return new DocumentItemReader(documentSource, properties.getReadBlockSize());
- }
-
- @Bean
- @StepScope
- public ObjectSource logSource(@Value("#{jobParameters[start]}") String start,
- @Value("#{jobParameters[end]}") String end,
- SolrDAO solrDAO) {
-
- return new SolrDocumentSource(solrDAO, start, end);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
deleted file mode 100644
index b26da3656c8..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.springframework.batch.core.JobParameters;
-
-import java.util.Optional;
-
-import static java.util.Objects.requireNonNull;
-import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS;
-import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL;
-import static org.apache.ambari.infra.job.archive.ExportDestination.S3;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class DocumentArchivingProperties extends JobProperties {
- private int readBlockSize;
- private int writeBlockSize;
- private ExportDestination destination;
- private String localDestinationDirectory;
- private String fileNameSuffixColumn;
- private String fileNameSuffixDateFormat;
- private SolrProperties solr;
- private String s3AccessFile;
- private String s3KeyPrefix;
- private String s3BucketName;
- private String s3Endpoint;
-
- private String hdfsEndpoint;
- private String hdfsDestinationDirectory;
-
- public DocumentArchivingProperties() {
- super(DocumentArchivingProperties.class);
- }
-
- public int getReadBlockSize() {
- return readBlockSize;
- }
-
- public void setReadBlockSize(int readBlockSize) {
- this.readBlockSize = readBlockSize;
- }
-
- public int getWriteBlockSize() {
- return writeBlockSize;
- }
-
- public void setWriteBlockSize(int writeBlockSize) {
- this.writeBlockSize = writeBlockSize;
- }
-
- public ExportDestination getDestination() {
- return destination;
- }
-
- public void setDestination(ExportDestination destination) {
- this.destination = destination;
- }
-
- public String getLocalDestinationDirectory() {
- return localDestinationDirectory;
- }
-
- public void setLocalDestinationDirectory(String localDestinationDirectory) {
- this.localDestinationDirectory = localDestinationDirectory;
- }
-
- public String getFileNameSuffixColumn() {
- return fileNameSuffixColumn;
- }
-
- public void setFileNameSuffixColumn(String fileNameSuffixColumn) {
- this.fileNameSuffixColumn = fileNameSuffixColumn;
- }
-
- public String getFileNameSuffixDateFormat() {
- return fileNameSuffixDateFormat;
- }
-
- public void setFileNameSuffixDateFormat(String fileNameSuffixDateFormat) {
- this.fileNameSuffixDateFormat = fileNameSuffixDateFormat;
- }
-
- public SolrProperties getSolr() {
- return solr;
- }
-
- public void setSolr(SolrProperties query) {
- this.solr = query;
- }
-
- public String getS3AccessFile() {
- return s3AccessFile;
- }
-
- public void setS3AccessFile(String s3AccessFile) {
- this.s3AccessFile = s3AccessFile;
- }
-
- public String getS3KeyPrefix() {
- return s3KeyPrefix;
- }
-
- public void setS3KeyPrefix(String s3KeyPrefix) {
- this.s3KeyPrefix = s3KeyPrefix;
- }
-
- public String getS3BucketName() {
- return s3BucketName;
- }
-
- public void setS3BucketName(String s3BucketName) {
- this.s3BucketName = s3BucketName;
- }
-
- public String getS3Endpoint() {
- return s3Endpoint;
- }
-
- public void setS3Endpoint(String s3Endpoint) {
- this.s3Endpoint = s3Endpoint;
- }
-
- public Optional s3Properties() {
- if (isBlank(s3BucketName))
- return Optional.empty();
-
- return Optional.of(new S3Properties(
- s3AccessFile,
- s3KeyPrefix,
- s3BucketName,
- s3Endpoint));
- }
-
- public String getHdfsEndpoint() {
- return hdfsEndpoint;
- }
-
- public void setHdfsEndpoint(String hdfsEndpoint) {
- this.hdfsEndpoint = hdfsEndpoint;
- }
-
- public String getHdfsDestinationDirectory() {
- return hdfsDestinationDirectory;
- }
-
- public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) {
- this.hdfsDestinationDirectory = hdfsDestinationDirectory;
- }
-
- @Override
- public void apply(JobParameters jobParameters) {
- readBlockSize = getIntJobParameter(jobParameters, "readBlockSize", readBlockSize);
- writeBlockSize = getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize);
- destination = ExportDestination.valueOf(jobParameters.getString("destination", destination.name()));
- localDestinationDirectory = jobParameters.getString("localDestinationDirectory", localDestinationDirectory);
- s3AccessFile = jobParameters.getString("s3AccessFile", s3AccessFile);
- s3BucketName = jobParameters.getString("s3BucketName", s3BucketName);
- s3KeyPrefix = jobParameters.getString("s3KeyPrefix", s3KeyPrefix);
- s3Endpoint = jobParameters.getString("s3Endpoint", s3Endpoint);
- hdfsEndpoint = jobParameters.getString("hdfsEndpoint", hdfsEndpoint);
- hdfsDestinationDirectory = jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory);
- solr.apply(jobParameters);
- }
-
- private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
- String valueText = jobParameters.getString(parameterName);
- if (isBlank(valueText))
- return defaultValue;
- return Integer.parseInt(valueText);
- }
-
- @Override
- public void validate() {
- if (readBlockSize == 0)
- throw new IllegalArgumentException("The property readBlockSize must be greater than 0!");
-
- if (writeBlockSize == 0)
- throw new IllegalArgumentException("The property writeBlockSize must be greater than 0!");
-
- if (isBlank(fileNameSuffixColumn)) {
- throw new IllegalArgumentException("The property fileNameSuffixColumn can not be null or empty string!");
- }
-
- requireNonNull(destination, "The property destination can not be null!");
- switch (destination) {
- case LOCAL:
- if (isBlank(localDestinationDirectory))
- throw new IllegalArgumentException(String.format(
- "The property localDestinationDirectory can not be null or empty string when destination is set to %s!", LOCAL.name()));
- break;
-
- case S3:
- s3Properties()
- .orElseThrow(() -> new IllegalArgumentException("S3 related properties must be set if the destination is " + S3.name()))
- .validate();
- break;
-
- case HDFS:
- if (isBlank(hdfsEndpoint))
- throw new IllegalArgumentException(String.format(
- "The property hdfsEndpoint can not be null or empty string when destination is set to %s!", HDFS.name()));
- if (isBlank(hdfsDestinationDirectory))
- throw new IllegalArgumentException(String.format(
- "The property hdfsDestinationDirectory can not be null or empty string when destination is set to %s!", HDFS.name()));
- }
-
- requireNonNull(solr, "No solr query was specified for archiving job!");
- solr.validate();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
deleted file mode 100644
index a009031e413..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Map;
-
-@Configuration
-@ConfigurationProperties(prefix = "infra-manager.jobs")
-public class DocumentArchivingPropertyMap {
- private Map solrDataArchiving;
-
- public Map getSolrDataArchiving() {
- return solrDataArchiving;
- }
-
- public void setSolrDataArchiving(Map solrDataArchiving) {
- this.solrDataArchiving = solrDataArchiving;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java
deleted file mode 100644
index f647a36ec05..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public interface DocumentDestination {
- DocumentItemWriter open(Document firstDocument);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
deleted file mode 100644
index d87fdea72a9..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.JobContextRepository;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.StepContribution;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.StepExecutionListener;
-import org.springframework.batch.core.scope.context.ChunkContext;
-import org.springframework.batch.core.step.tasklet.Tasklet;
-import org.springframework.batch.item.ExecutionContext;
-import org.springframework.batch.item.ItemStreamReader;
-import org.springframework.batch.repeat.RepeatStatus;
-
-public class DocumentExporter implements Tasklet, StepExecutionListener {
-
- private static final Logger LOG = LoggerFactory.getLogger(DocumentExporter.class);
-
- private boolean complete = false;
- private final ItemStreamReader documentReader;
- private final DocumentDestination documentDestination;
- private final int writeBlockSize;
- private final JobContextRepository jobContextRepository;
-
- public DocumentExporter(ItemStreamReader documentReader, DocumentDestination documentDestination, int writeBlockSize, JobContextRepository jobContextRepository) {
- this.documentReader = documentReader;
- this.documentDestination = documentDestination;
- this.writeBlockSize = writeBlockSize;
- this.jobContextRepository = jobContextRepository;
- }
-
- @Override
- public void beforeStep(StepExecution stepExecution) {
-
- }
-
- @Override
- public ExitStatus afterStep(StepExecution stepExecution) {
- if (complete) {
- return ExitStatus.COMPLETED;
- }
- else {
- return ExitStatus.FAILED;
- }
- }
-
- @Override
- public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
- StepExecution stepExecution = chunkContext.getStepContext().getStepExecution();
- ExecutionContext executionContext = stepExecution.getExecutionContext();
- documentReader.open(executionContext);
-
- DocumentItemWriter writer = null;
- int writtenCount = 0;
- try {
- Document document;
- while ((document = documentReader.read()) != null) {
- if (writer != null && writtenCount >= writeBlockSize) {
- stepExecution = jobContextRepository.getStepExecution(stepExecution.getJobExecutionId(), stepExecution.getId());
- if (stepExecution.getJobExecution().getStatus() == BatchStatus.STOPPING) {
- LOG.info("Received stop signal.");
- writer.revert();
- writer = null;
- return RepeatStatus.CONTINUABLE;
- }
-
- writer.close();
- writer = null;
- writtenCount = 0;
- documentReader.update(executionContext);
- jobContextRepository.updateExecutionContext(stepExecution);
- }
-
- if (writer == null)
- writer = documentDestination.open(document);
-
- writer.write(document);
- ++writtenCount;
- }
- }
- catch (Exception e) {
- if (writer != null) {
- writer.revert();
- writer = null;
- }
- throw e;
- }
- finally {
- if (writer != null)
- writer.close();
- documentReader.close();
- }
-
- complete = true;
- return RepeatStatus.FINISHED;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java
deleted file mode 100644
index 3a6b869b23a..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.ambari.infra.job.ObjectSource;
-import org.springframework.batch.item.ExecutionContext;
-import org.springframework.batch.item.ItemStreamException;
-import org.springframework.batch.item.support.AbstractItemStreamItemReader;
-import org.springframework.batch.repeat.CompletionPolicy;
-import org.springframework.batch.repeat.RepeatContext;
-import org.springframework.batch.repeat.RepeatStatus;
-import org.springframework.batch.repeat.context.RepeatContextSupport;
-import org.springframework.util.ClassUtils;
-
-public class DocumentItemReader extends AbstractItemStreamItemReader implements CompletionPolicy {
-
- public final static String POSITION = "last-read";
-
- private final ObjectSource documentSource;
- private final int readBlockSize;
-
- private CloseableIterator documentIterator = null;
- private int count = 0;
- private boolean eof = false;
- private Document current = null;
- private Document previous = null;
-
- public DocumentItemReader(ObjectSource documentSource, int readBlockSize) {
- this.documentSource = documentSource;
- this.readBlockSize = readBlockSize;
- setName(ClassUtils.getShortName(DocumentItemReader.class));
- }
-
- @Override
- public Document read() throws Exception {
- if (documentIterator == null)
- openStream();
- Document next = getNext();
- if (next == null && count > readBlockSize) {
- openStream();
- next = getNext();
- }
- eof = next == null;
- if (eof && documentIterator != null)
- documentIterator.close();
-
- previous = current;
- current = next;
- return current;
- }
-
- private Document getNext() {
- ++count;
- return documentIterator.next();
- }
-
- private void openStream() {
- closeStream();
- documentIterator = documentSource.open(current, readBlockSize);
- count = 0;
- }
-
- private void closeStream() {
- if (documentIterator == null)
- return;
- try {
- documentIterator.close();
- }
- catch (Exception e) {
- throw new RuntimeException(e);
- }
- documentIterator = null;
- }
-
- @Override
- public void open(ExecutionContext executionContext) {
- super.open(executionContext);
- current = null;
- previous = null;
- eof = false;
- documentIterator = null;
- if (!executionContext.containsKey(POSITION))
- return;
-
- current = (Document) executionContext.get(POSITION);
- }
-
- @Override
- public void update(ExecutionContext executionContext) throws ItemStreamException {
- super.update(executionContext);
- if (previous != null)
- executionContext.put(POSITION, previous);
- }
-
- @Override
- public void close() {
- closeStream();
- }
-
- @Override
- public boolean isComplete(RepeatContext context, RepeatStatus result) {
- return eof;
- }
-
- @Override
- public boolean isComplete(RepeatContext context) {
- return eof;
- }
-
- @Override
- public RepeatContext start(RepeatContext parent) {
- return new RepeatContextSupport(parent);
- }
-
- @Override
- public void update(RepeatContext context) {
- if (eof)
- context.setCompleteOnly();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java
deleted file mode 100644
index e96f6f10e54..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public interface DocumentItemWriter {
- void write(Document document);
- void revert();
- void close();
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java
deleted file mode 100644
index 2b2a355ca5b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public interface DocumentWiper {
- void delete(Document firstDocument, Document lastDocument);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
deleted file mode 100644
index a143e4c546a..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public enum ExportDestination {
- LOCAL,
- HDFS,
- S3
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java
deleted file mode 100644
index 26a8c6310fe..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import java.io.File;
-
-public interface FileAction {
- File perform(File inputFile);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
deleted file mode 100644
index f9016e61bdc..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import java.time.OffsetDateTime;
-import java.time.format.DateTimeFormatter;
-
-import static java.util.Objects.requireNonNull;
-import static org.apache.ambari.infra.job.archive.SolrDocumentIterator.SOLR_DATE_FORMAT_TEXT;
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class FileNameSuffixFormatter {
- public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern(SOLR_DATE_FORMAT_TEXT);
-
- public static FileNameSuffixFormatter from(DocumentArchivingProperties properties) {
- return new FileNameSuffixFormatter(properties.getFileNameSuffixColumn(), properties.getFileNameSuffixDateFormat());
- }
-
-
- private final String columnName;
-
- private final DateTimeFormatter dateFormat;
-
- public FileNameSuffixFormatter(String columnName, String dateTimeFormat) {
- this.columnName = columnName;
- dateFormat = isBlank(dateTimeFormat) ? null : DateTimeFormatter.ofPattern(dateTimeFormat);
- }
-
- public String format(Document document) {
- requireNonNull(document, "Can not format file name suffix: input document is null!");
-
- if (isBlank(document.get(columnName)))
- throw new IllegalArgumentException("The specified document does not have a column " + columnName + " or it's value is blank!");
-
- return format(document.get(columnName));
- }
-
- public String format(String value) {
- if (isBlank(value))
- throw new IllegalArgumentException("The specified value is blank!");
-
- if (dateFormat == null)
- return value;
- OffsetDateTime date = OffsetDateTime.parse(value, SOLR_DATETIME_FORMATTER);
- return date.format(dateFormat);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
deleted file mode 100644
index 0f7b99fcc24..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.UncheckedIOException;
-
-public class HdfsUploader extends AbstractFileAction {
-
- private final Configuration configuration;
- private final Path destinationDirectory;
-
- public HdfsUploader(Configuration configuration, Path destinationDirectory) {
- this.destinationDirectory = destinationDirectory;
- this.configuration = configuration;
- }
-
- @Override
- protected File onPerform(File inputFile) {
- try (FileSystem fileSystem = FileSystem.get(configuration)) {
- Path destination = new Path(destinationDirectory, inputFile.getName());
- if (fileSystem.exists(destination)) {
- throw new UnsupportedOperationException(String.format("File '%s' already exists!", destination));
- }
-
- fileSystem.copyFromLocalFile(new Path(inputFile.getAbsolutePath()), destination);
-
- return inputFile;
- }
- catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java
deleted file mode 100644
index 33a67cbb0bf..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public interface ItemWriterListener {
- void onCompleted(WriteCompletedEvent event);
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
deleted file mode 100644
index 531d2d57b44..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.*;
-
-public class LocalDocumentItemWriter implements DocumentItemWriter {
- private static final Logger LOG = LoggerFactory.getLogger(LocalDocumentItemWriter.class);
-
- private static final ObjectMapper json = new ObjectMapper();
- private static final String ENCODING = "UTF-8";
-
- private final File outFile;
- private final BufferedWriter bufferedWriter;
- private final ItemWriterListener itemWriterListener;
- private Document firstDocument = null;
- private Document lastDocument = null;
-
- public LocalDocumentItemWriter(File outFile, ItemWriterListener itemWriterListener) {
- this.itemWriterListener = itemWriterListener;
- this.outFile = outFile;
- try {
- this.bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outFile), ENCODING));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- } catch (FileNotFoundException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- @Override
- public void write(Document document) {
- try {
- bufferedWriter.write(json.writeValueAsString(document));
- bufferedWriter.newLine();
-
- if (firstDocument == null)
- firstDocument = document;
-
- lastDocument = document;
- }
- catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- @Override
- public void revert() {
- IOUtils.closeQuietly(bufferedWriter);
- if (!outFile.delete())
- LOG.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists());
- }
-
- @Override
- public void close() {
- try {
- bufferedWriter.close();
- if (itemWriterListener != null)
- itemWriterListener.onCompleted(new WriteCompletedEvent(outFile, firstDocument, lastDocument));
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java
deleted file mode 100644
index a24d524cd93..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public class LocalItemWriterListener implements ItemWriterListener {
- private final FileAction fileAction;
- private final DocumentWiper documentWiper;
-
- public LocalItemWriterListener(FileAction fileAction, DocumentWiper documentWiper) {
- this.fileAction = fileAction;
- this.documentWiper = documentWiper;
- }
-
-
- @Override
- public void onCompleted(WriteCompletedEvent event) {
- fileAction.perform(event.getOutFile());
- documentWiper.delete(event.getFirstDocument(), event.getLastDocument());
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
deleted file mode 100644
index 879b58bc8f2..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.UncheckedIOException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Optional;
-
-import static org.apache.commons.csv.CSVFormat.DEFAULT;
-
-public class S3AccessCsv implements PasswordStore {
- private static final Logger LOG = LoggerFactory.getLogger(S3AccessCsv.class);
-
- public static S3AccessCsv file(String path) {
- try {
- return new S3AccessCsv(new FileReader(path));
- } catch (FileNotFoundException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- private Map passwordMap = new HashMap<>();
-
- public S3AccessCsv(Reader reader) {
- try (CSVParser csvParser = CSVParser.parse(reader, DEFAULT.withHeader(
- S3AccessKeyNames.AccessKeyId.getCsvName(), S3AccessKeyNames.SecretAccessKey.getCsvName()))) {
- Iterator iterator = csvParser.iterator();
- if (!iterator.hasNext()) {
- throw new S3AccessCsvFormatException("Csv file is empty!");
- }
-
- CSVRecord record = iterator.next();
- if (record.size() < 2) {
- throw new S3AccessCsvFormatException("Csv file contains less than 2 columns!");
- }
-
- checkColumnExists(record, S3AccessKeyNames.AccessKeyId);
- checkColumnExists(record, S3AccessKeyNames.SecretAccessKey);
-
- if (!iterator.hasNext()) {
- throw new S3AccessCsvFormatException("Csv file contains header only!");
- }
-
- record = iterator.next();
-
- Map header = csvParser.getHeaderMap();
- for (S3AccessKeyNames keyNames : S3AccessKeyNames.values())
- passwordMap.put(keyNames.getEnvVariableName(), record.get(header.get(keyNames.getCsvName())));
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- } catch (S3AccessCsvFormatException e) {
- LOG.warn("Unable to parse csv file: {}", e.getMessage());
- }
- }
-
- private void checkColumnExists(CSVRecord record, S3AccessKeyNames s3AccessKeyName) {
- if (!s3AccessKeyName.getCsvName().equals(record.get(s3AccessKeyName.getCsvName()))) {
- throw new S3AccessCsvFormatException(String.format("Csv file does not contain the required column: '%s'", s3AccessKeyName.getCsvName()));
- }
- }
-
- @Override
- public Optional getPassword(String propertyName) {
- return Optional.ofNullable(passwordMap.get(propertyName));
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
deleted file mode 100644
index ef9d53918fb..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public class S3AccessCsvFormatException extends RuntimeException {
- public S3AccessCsvFormatException(String message) {
- super(message);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
deleted file mode 100644
index e840d3b329b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-public enum S3AccessKeyNames {
- AccessKeyId("AWS_ACCESS_KEY_ID", "Access key ID"),
- SecretAccessKey("AWS_SECRET_ACCESS_KEY", "Secret access key");
-
- private final String envVariableName;
- private final String csvName;
-
- S3AccessKeyNames(String envVariableName, String csvName) {
- this.envVariableName = envVariableName;
- this.csvName = csvName;
- }
-
- public String getEnvVariableName() {
- return envVariableName;
- }
-
- public String getCsvName() {
- return csvName;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
deleted file mode 100644
index 59a4469e945..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class S3Properties {
- private final String s3AccessFile;
- private final String s3KeyPrefix;
- private final String s3BucketName;
- private final String s3EndPoint;
-
- public S3Properties(String s3AccessFile, String s3KeyPrefix, String s3BucketName, String s3EndPoint) {
- this.s3AccessFile = s3AccessFile;
- this.s3KeyPrefix = s3KeyPrefix;
- this.s3BucketName = s3BucketName;
- this.s3EndPoint = s3EndPoint;
- }
-
- public String getS3KeyPrefix() {
- return s3KeyPrefix;
- }
-
- public String getS3BucketName() {
- return s3BucketName;
- }
-
- public String getS3EndPoint() {
- return s3EndPoint;
- }
-
- public String getS3AccessFile() {
- return s3AccessFile;
- }
-
- @Override
- public String toString() {
- return "S3Properties{" +
- "s3AccessFile='" + s3AccessFile + '\'' +
- ", s3KeyPrefix='" + s3KeyPrefix + '\'' +
- ", s3BucketName='" + s3BucketName + '\'' +
- ", s3EndPoint='" + s3EndPoint + '\'' +
- '}';
- }
-
- public void validate() {
- if (isBlank(s3BucketName))
- throw new IllegalArgumentException("The property s3BucketName can not be null or empty string!");
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
deleted file mode 100644
index 2536cb5dfac..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
+++ /dev/null
@@ -1,74 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.s3.AmazonS3Client;
-import org.apache.ambari.infra.conf.security.CompositePasswordStore;
-import org.apache.ambari.infra.conf.security.PasswordStore;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.commons.lang.StringUtils.isNotBlank;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3Uploader extends AbstractFileAction {
-
- private static final Logger LOG = LoggerFactory.getLogger(S3Uploader.class);
-
- private final AmazonS3Client client;
- private final String keyPrefix;
- private final String bucketName;
-
- public S3Uploader(S3Properties s3Properties, PasswordStore passwordStore) {
- LOG.info("Initializing S3 client with " + s3Properties);
-
- this.keyPrefix = s3Properties.getS3KeyPrefix();
- this.bucketName = s3Properties.getS3BucketName();
-
- PasswordStore compositePasswordStore = passwordStore;
- if (isNotBlank((s3Properties.getS3AccessFile())))
- compositePasswordStore = new CompositePasswordStore(passwordStore, S3AccessCsv.file(s3Properties.getS3AccessFile()));
-
- BasicAWSCredentials credentials = new BasicAWSCredentials(
- compositePasswordStore.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName())
- .orElseThrow(() -> new IllegalArgumentException("Access key Id is not present!")),
- compositePasswordStore.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName())
- .orElseThrow(() -> new IllegalArgumentException("Secret Access Key is not present!")));
- client = new AmazonS3Client(credentials);
- if (!isBlank(s3Properties.getS3EndPoint()))
- client.setEndpoint(s3Properties.getS3EndPoint());
-// Note: without pathStyleAccess=true endpoint going to be .:
-// client.setS3ClientOptions(S3ClientOptions.builder().setPathStyleAccess(true).build());
- }
-
- @Override
- public File onPerform(File inputFile) {
- String key = keyPrefix + inputFile.getName();
-
- if (client.doesObjectExist(bucketName, key)) {
- throw new UnsupportedOperationException(String.format("Object '%s' already exists in bucket '%s'", key, bucketName));
- }
-
- client.putObject(bucketName, key, inputFile);
- return inputFile;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
deleted file mode 100644
index fba08e74957..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.SolrDAOBase;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-
-public class SolrDAO extends SolrDAOBase implements DocumentWiper {
- private static final Logger LOG = LoggerFactory.getLogger(SolrDAO.class);
-
- private final SolrProperties queryProperties;
-
- public SolrDAO(SolrProperties queryProperties) {
- super(queryProperties.getZooKeeperConnectionString(), queryProperties.getCollection());
- this.queryProperties = queryProperties;
- }
-
- @Override
- public void delete(Document firstDocument, Document lastDocument) {
- delete(new SolrParametrizedString(queryProperties.getDeleteQueryText())
- .set("start", firstDocument.getFieldMap())
- .set("end", lastDocument.getFieldMap()).toString());
- }
-
- public SolrDocumentIterator query(String start, String end, Document subIntervalFrom, int rows) {
- SolrQuery query = queryProperties.toQueryBuilder()
- .setInterval(start, end)
- .setDocument(subIntervalFrom)
- .build();
- query.setRows(rows);
-
- LOG.info("Executing solr query {}", query.toLocalParamsString());
-
- try {
- CloudSolrClient client = createClient();
- QueryResponse response = client.query(query);
- return new SolrDocumentIterator(response, client);
- } catch (SolrServerException e) {
- throw new RuntimeException(e);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
deleted file mode 100644
index f8d8382a3f4..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.TimeZone;
-
-public class SolrDocumentIterator implements CloseableIterator {
-
- public static final String SOLR_DATE_FORMAT_TEXT = "yyyy-MM-dd'T'HH:mm:ss.SSSX";
- private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat(SOLR_DATE_FORMAT_TEXT);
-
- static {
- SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- private final Iterator documentIterator;
- private final CloudSolrClient client;
-
-
- public SolrDocumentIterator(QueryResponse response, CloudSolrClient client) {
- documentIterator = response.getResults().iterator();
- this.client = client;
- }
-
- @Override
- public Document next() {
- if (!documentIterator.hasNext())
- return null;
-
- SolrDocument document = documentIterator.next();
- HashMap fieldMap = new HashMap<>();
- for (String key : document.getFieldNames()) {
- fieldMap.put(key, toString(document.get(key)));
- }
-
- return new Document(fieldMap);
- }
-
- private String toString(Object value) {
- if (value == null) {
- return null;
- }
- else if (value instanceof Date) {
- return SOLR_DATE_FORMAT.format(value);
- }
- else {
- return value.toString();
- }
- }
-
- @Override
- public void close() {
- try {
- client.close();
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- @Override
- public boolean hasNext() {
- return documentIterator.hasNext();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java
deleted file mode 100644
index 39ddd1e21ad..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.ambari.infra.job.ObjectSource;
-
-public class SolrDocumentSource implements ObjectSource {
- private final SolrDAO solrDAO;
- private final String start;
- private final String end;
-
- public SolrDocumentSource(SolrDAO solrDAO, String start, String end) {
- this.solrDAO = solrDAO;
- this.start = start;
- this.end = end;
- }
-
- @Override
- public CloseableIterator open(Document current, int rows) {
- return solrDAO.query(start, end, current, rows);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
deleted file mode 100644
index 9770982f0ef..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.solr.client.solrj.util.ClientUtils;
-
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class SolrParametrizedString {
- private static final String PARAMETER_PATTERN = "\\$\\{%s[a-z0-9A-Z]+}";
- private static final Pattern NO_PREFIX_PARAMETER_PATTERN = Pattern.compile(String.format(PARAMETER_PATTERN, ""));
-
- private final String string;
-
- public SolrParametrizedString(String string) {
- this.string = string;
- }
-
- private Set collectParamNames(Pattern regExPattern) {
- Matcher matcher = regExPattern.matcher(string);
- Set parameters = new HashSet<>();
- while (matcher.find())
- parameters.add(matcher.group().replace("${", "").replace("}", ""));
- return parameters;
- }
-
- @Override
- public String toString() {
- return string;
- }
-
- public SolrParametrizedString set(Map parameterMap) {
- return set(NO_PREFIX_PARAMETER_PATTERN, null, parameterMap);
- }
-
- public SolrParametrizedString set(String prefix, Map parameterMap) {
- String dottedPrefix = prefix + ".";
- return set(Pattern.compile(String.format(PARAMETER_PATTERN, dottedPrefix)), dottedPrefix, parameterMap);
- }
-
- private SolrParametrizedString set(Pattern regExPattern, String prefix, Map parameterMap) {
- String newString = string;
- for (String paramName : collectParamNames(regExPattern)) {
- String paramSuffix = prefix == null ? paramName : paramName.replace(prefix, "");
- if (parameterMap.get(paramSuffix) != null)
- newString = newString.replace(String.format("${%s}", paramName), getValue(parameterMap, paramSuffix));
- }
- return new SolrParametrizedString(newString);
- }
-
- private String getValue(Map parameterMap, String paramSuffix) {
- String value = parameterMap.get(paramSuffix);
- if ("*".equals(value))
- return value;
- return ClientUtils.escapeQueryChars(value);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
deleted file mode 100644
index a2a78c24e20..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.springframework.batch.core.JobParameters;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class SolrProperties {
- private String zooKeeperConnectionString;
- private String collection;
- private String queryText;
- private String filterQueryText;
- private String[] sortColumn;
- private String deleteQueryText;
-
- public String getZooKeeperConnectionString() {
- return zooKeeperConnectionString;
- }
-
- public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
- this.zooKeeperConnectionString = zooKeeperConnectionString;
- }
-
- public String getCollection() {
- return collection;
- }
-
- public void setCollection(String collection) {
- this.collection = collection;
- }
-
- public String getQueryText() {
- return queryText;
- }
-
- public void setQueryText(String queryText) {
- this.queryText = queryText;
- }
-
- public String getFilterQueryText() {
- return filterQueryText;
- }
-
- public void setFilterQueryText(String filterQueryText) {
- this.filterQueryText = filterQueryText;
- }
-
- public String[] getSortColumn() {
- return sortColumn;
- }
-
- public void setSortColumn(String[] sortColumn) {
- this.sortColumn = sortColumn;
- }
-
- public String getDeleteQueryText() {
- return deleteQueryText;
- }
-
- public void setDeleteQueryText(String deleteQueryText) {
- this.deleteQueryText = deleteQueryText;
- }
-
- public SolrQueryBuilder toQueryBuilder() {
- return new SolrQueryBuilder().
- setQueryText(queryText)
- .setFilterQueryText(filterQueryText)
- .addSort(sortColumn);
- }
-
- public void apply(JobParameters jobParameters) {
- zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString);
- collection = jobParameters.getString("collection", collection);
- queryText = jobParameters.getString("queryText", queryText);
- filterQueryText = jobParameters.getString("filterQueryText", filterQueryText);
- deleteQueryText = jobParameters.getString("deleteQueryText", deleteQueryText);
-
- String sortValue;
- List sortColumns = new ArrayList<>();
- int i = 0;
- while ((sortValue = jobParameters.getString(String.format("sortColumn[%d]", i))) != null) {
- sortColumns.add(sortValue);
- ++i;
- }
-
- if (sortColumns.size() > 0)
- sortColumn = sortColumns.toArray(new String[sortColumns.size()]);
- }
-
- public void validate() {
- if (isBlank(zooKeeperConnectionString))
- throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
-
- if (isBlank(collection))
- throw new IllegalArgumentException("The property collection can not be null or empty string!");
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
deleted file mode 100644
index 0e41169eaa4..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.solr.client.solrj.SolrQuery;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-import static org.apache.solr.client.solrj.SolrQuery.ORDER.asc;
-
-public class SolrQueryBuilder {
-
- private static final String INTERVAL_START = "start";
- private static final String INTERVAL_END = "end";
- private String queryText;
- private final Map interval;
- private String filterQueryText;
- private Document document;
- private String[] sortFields;
-
- public SolrQueryBuilder() {
- this.queryText = "*:*";
- interval = new HashMap<>();
- interval.put(INTERVAL_START, "*");
- interval.put(INTERVAL_END, "*");
- }
-
- public SolrQueryBuilder setQueryText(String queryText) {
- this.queryText = queryText;
- return this;
- }
-
- public SolrQueryBuilder setInterval(String startValue, String endValue) {
- if (isBlank(startValue))
- startValue = "*";
- if (isBlank(endValue))
- endValue = "*";
- this.interval.put(INTERVAL_START, startValue);
- this.interval.put(INTERVAL_END, endValue);
- return this;
- }
-
- public SolrQueryBuilder setFilterQueryText(String filterQueryText) {
- this.filterQueryText = filterQueryText;
- return this;
- }
-
-
- public SolrQueryBuilder setDocument(Document document) {
- this.document = document;
- return this;
- }
-
- public SolrQueryBuilder addSort(String... sortBy) {
- this.sortFields = sortBy;
- return this;
- }
-
- public SolrQuery build() {
- SolrQuery solrQuery = new SolrQuery();
-
- SolrParametrizedString queryText = new SolrParametrizedString(this.queryText).set(interval);
- solrQuery.setQuery(queryText.toString());
-
- if (filterQueryText != null) {
- SolrParametrizedString filterQuery = new SolrParametrizedString(filterQueryText)
- .set(interval);
-
- if (document != null) {
- filterQuery = filterQuery.set(document.getFieldMap());
- solrQuery.setFilterQueries(filterQuery.toString());
- }
- }
-
- if (sortFields != null) {
- for (String field : sortFields)
- solrQuery.addSort(field, asc);
- }
-
- return solrQuery;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java
deleted file mode 100644
index f062879f93e..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.hibernate.validator.constraints.NotBlank;
-import org.springframework.batch.core.JobParameters;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class SolrQueryProperties {
- @NotBlank
- private String collection;
- @NotBlank
- private String queryText;
- private String filterQueryText;
- private String[] sortColumn;
-
- public String getCollection() {
- return collection;
- }
-
- public void setCollection(String collection) {
- this.collection = collection;
- }
-
- public String getQueryText() {
- return queryText;
- }
-
- public void setQueryText(String queryText) {
- this.queryText = queryText;
- }
-
- public String getFilterQueryText() {
- return filterQueryText;
- }
-
- public void setFilterQueryText(String filterQueryText) {
- this.filterQueryText = filterQueryText;
- }
-
- public String[] getSortColumn() {
- return sortColumn;
- }
-
- public void setSortColumn(String[] sortColumn) {
- this.sortColumn = sortColumn;
- }
-
- public SolrQueryBuilder toQueryBuilder() {
- return new SolrQueryBuilder().
- setQueryText(queryText)
- .setFilterQueryText(filterQueryText)
- .addSort(sortColumn);
- }
-
- public void apply(JobParameters jobParameters) {
- collection = jobParameters.getString("collection", collection);
- queryText = jobParameters.getString("queryText", queryText);
- filterQueryText = jobParameters.getString("filterQueryText", filterQueryText);
-
- String sortValue;
- List sortColumns = new ArrayList<>();
- int i = 0;
- while ((sortValue = jobParameters.getString(String.format("sortColumn[%d]", i))) != null) {
- sortColumns.add(sortValue);
- ++i;
- }
-
- if (sortColumns.size() > 0)
- sortColumn = sortColumns.toArray(new String[sortColumns.size()]);
- }
-
- public void validate() {
- if (isBlank(collection))
- throw new IllegalArgumentException("The property collection can not be null or empty string!");
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java
deleted file mode 100644
index 8f9d6732de9..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
-import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
-import org.apache.commons.io.IOUtils;
-
-import java.io.*;
-
-public class TarGzCompressor extends AbstractFileAction {
- @Override
- public File onPerform(File inputFile) {
- File tarGzFile = new File(inputFile.getParent(), inputFile.getName() + ".tar.gz");
- try (TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream(
- new GzipCompressorOutputStream(new FileOutputStream(tarGzFile)))) {
- TarArchiveEntry archiveEntry = new TarArchiveEntry(inputFile.getName());
- archiveEntry.setSize(inputFile.length());
- tarArchiveOutputStream.putArchiveEntry(archiveEntry);
-
- try (FileInputStream fileInputStream = new FileInputStream(inputFile)) {
- IOUtils.copy(fileInputStream, tarArchiveOutputStream);
- }
-
- tarArchiveOutputStream.closeArchiveEntry();
- }
- catch (IOException ex) {
- throw new UncheckedIOException(ex);
- }
-
- return tarGzFile;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java
deleted file mode 100644
index 49abe22873b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import java.io.File;
-
-public class WriteCompletedEvent {
- private final File outFile;
- private final Document firstDocument;
- private final Document lastDocument;
-
- public WriteCompletedEvent(File outFile, Document firstDocument, Document lastDocument) {
- this.outFile = outFile;
- this.firstDocument = firstDocument;
- this.lastDocument = lastDocument;
- }
-
- public File getOutFile() {
- return outFile;
- }
-
- public Document getFirstDocument() {
- return firstDocument;
- }
-
- public Document getLastDocument() {
- return lastDocument;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
deleted file mode 100644
index 4a68c494238..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import org.apache.ambari.infra.job.AbstractJobsConfiguration;
-import org.apache.ambari.infra.job.JobScheduler;
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
-import org.springframework.batch.core.configuration.annotation.JobScope;
-import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
-import org.springframework.batch.core.configuration.annotation.StepScope;
-import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
-import org.springframework.batch.core.job.builder.JobBuilder;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import javax.inject.Inject;
-
-@Configuration
-public class DocumentDeletingConfiguration extends AbstractJobsConfiguration {
-
- private final StepBuilderFactory steps;
- private final Step deleteStep;
-
- @Inject
- public DocumentDeletingConfiguration(
- DocumentDeletingPropertyMap documentDeletingPropertyMap,
- JobScheduler scheduler,
- StepBuilderFactory steps,
- JobBuilderFactory jobs,
- JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor,
- @Qualifier("deleteStep") Step deleteStep) {
- super(documentDeletingPropertyMap.getSolrDataDeleting(), scheduler, jobs, jobRegistryBeanPostProcessor);
- this.steps = steps;
- this.deleteStep = deleteStep;
- }
-
- @Override
- protected Job buildJob(JobBuilder jobBuilder) {
- return jobBuilder.start(deleteStep).build();
- }
-
- @Bean
- @JobScope
- public Step deleteStep(DocumentWiperTasklet tasklet) {
- return steps.get("delete")
- .tasklet(tasklet)
- .build();
- }
-
- @Bean
- @StepScope
- public DocumentWiperTasklet documentWiperTasklet(
- @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentDeletingProperties properties,
- @Value("#{jobParameters[start]}") String start,
- @Value("#{jobParameters[end]}") String end) {
- return new DocumentWiperTasklet(properties, start, end);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
deleted file mode 100644
index 63b7dd22975..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import org.apache.ambari.infra.job.JobProperties;
-import org.springframework.batch.core.JobParameters;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-public class DocumentDeletingProperties extends JobProperties {
- private String zooKeeperConnectionString;
- private String collection;
- private String filterField;
-
- public DocumentDeletingProperties() {
- super(DocumentDeletingProperties.class);
- }
-
- public String getZooKeeperConnectionString() {
- return zooKeeperConnectionString;
- }
-
- public void setZooKeeperConnectionString(String zooKeeperConnectionString) {
- this.zooKeeperConnectionString = zooKeeperConnectionString;
- }
-
- public String getCollection() {
- return collection;
- }
-
- public void setCollection(String collection) {
- this.collection = collection;
- }
-
- public String getFilterField() {
- return filterField;
- }
-
- public void setFilterField(String filterField) {
- this.filterField = filterField;
- }
-
- @Override
- public void apply(JobParameters jobParameters) {
- zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString);
- collection = jobParameters.getString("collection", collection);
- filterField = jobParameters.getString("filterField", filterField);
- }
-
- @Override
- public void validate() {
- if (isBlank(zooKeeperConnectionString))
- throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!");
-
- if (isBlank(collection))
- throw new IllegalArgumentException("The property collection can not be null or empty string!");
-
- if (isBlank(filterField))
- throw new IllegalArgumentException("The property filterField can not be null or empty string!");
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
deleted file mode 100644
index 1dc0caf9e7b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Map;
-
-@Configuration
-@ConfigurationProperties(prefix = "infra-manager.jobs")
-public class DocumentDeletingPropertyMap {
- private Map solrDataDeleting;
-
- public Map getSolrDataDeleting() {
- return solrDataDeleting;
- }
-
- public void setSolrDataDeleting(Map solrDataDeleting) {
- this.solrDataDeleting = solrDataDeleting;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
deleted file mode 100644
index 463e6e002e7..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.deleting;
-
-import org.apache.ambari.infra.job.SolrDAOBase;
-import org.apache.solr.client.solrj.util.ClientUtils;
-import org.springframework.batch.core.StepContribution;
-import org.springframework.batch.core.scope.context.ChunkContext;
-import org.springframework.batch.core.step.tasklet.Tasklet;
-import org.springframework.batch.repeat.RepeatStatus;
-
-public class DocumentWiperTasklet extends SolrDAOBase implements Tasklet {
- private final String filterField;
- private final String start;
- private final String end;
-
- public DocumentWiperTasklet(DocumentDeletingProperties properties, String start, String end) {
- super(properties.getZooKeeperConnectionString(), properties.getCollection());
- this.filterField = properties.getFilterField();
- this.start = start;
- this.end = end;
- }
-
- @Override
- public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) {
- delete(String.format("%s:[%s TO %s]", filterField, getValue(start), getValue(end)));
- return RepeatStatus.FINISHED;
- }
-
- private String getValue(String value) {
- return "*".equals(value) ? value : ClientUtils.escapeQueryChars(value);
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
deleted file mode 100644
index a124e4d16fe..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.dummy;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.item.ItemProcessor;
-
-public class DummyItemProcessor implements ItemProcessor {
-
- private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class);
-
- @Override
- public String process(DummyObject input) throws Exception {
- LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2());
- Thread.sleep(10000);
- return String.format("%s, %s", input.getF1(), input.getF2());
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
deleted file mode 100644
index 89ad0130222..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.dummy;
-
-import org.apache.ambari.infra.conf.InfraManagerDataConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.annotation.BeforeStep;
-import org.springframework.batch.item.ItemWriter;
-
-import javax.inject.Inject;
-import java.io.File;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Date;
-import java.util.List;
-
-public class DummyItemWriter implements ItemWriter {
-
- private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class);
-
- private StepExecution stepExecution;
-
- @Inject
- private InfraManagerDataConfig infraManagerDataConfig;
-
- @Override
- public void write(List extends String> values) throws Exception {
- LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString());
- Thread.sleep(1000);
- String outputDirectoryLocation = String.format("%s%s%s%s", infraManagerDataConfig.getDataFolder(), File.separator, "dummyOutput-", new Date().getTime());
- Path pathToDirectory = Paths.get(outputDirectoryLocation);
- Path pathToFile = Paths.get(String.format("%s%s%s", outputDirectoryLocation, File.separator, "dummyOutput.txt"));
- Files.createDirectories(pathToDirectory);
- LOG.info("Write location to step execution context...");
- stepExecution.getExecutionContext().put("stepOutputLocation", pathToFile.toAbsolutePath().toString());
- LOG.info("Write location to job execution context...");
- stepExecution.getJobExecution().getExecutionContext().put("jobOutputLocation", pathToFile.toAbsolutePath().toString());
- LOG.info("Write to file: {}", pathToFile.toAbsolutePath());
- Files.write(pathToFile, values.toString().getBytes());
- }
-
- @BeforeStep
- public void saveStepExecution(StepExecution stepExecution) {
- this.stepExecution = stepExecution;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java
deleted file mode 100644
index a4f53696b78..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.job.dummy;
-
-import javax.inject.Inject;
-
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
-import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
-import org.springframework.batch.item.ItemProcessor;
-import org.springframework.batch.item.ItemReader;
-import org.springframework.batch.item.ItemWriter;
-import org.springframework.batch.item.file.FlatFileItemReader;
-import org.springframework.batch.item.file.LineMapper;
-import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
-import org.springframework.batch.item.file.mapping.DefaultLineMapper;
-import org.springframework.batch.item.file.mapping.FieldSetMapper;
-import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
-import org.springframework.batch.item.file.transform.LineTokenizer;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.core.io.ClassPathResource;
-
-@Configuration
-public class DummyJobConfiguration {
- @Inject
- private StepBuilderFactory steps;
-
- @Inject
- private JobBuilderFactory jobs;
-
- @Bean(name = "dummyStep")
- protected Step dummyStep(ItemReader reader,
- ItemProcessor processor,
- @Qualifier("dummyItemWriter") ItemWriter writer) {
- return steps.get("dummyStep").listener(new DummyStepListener()). chunk(2)
- .reader(reader).processor(processor).writer(writer).build();
- }
-
- @Bean(name = "dummyJob")
- public Job job(@Qualifier("dummyStep") Step dummyStep) {
- return jobs.get("dummyJob").listener(new DummyJobListener()).start(dummyStep).build();
- }
-
- @Bean
- public ItemReader dummyItemReader() {
- FlatFileItemReader csvFileReader = new FlatFileItemReader<>();
- csvFileReader.setResource(new ClassPathResource("dummy/dummy.txt"));
- csvFileReader.setLinesToSkip(1);
- LineMapper lineMapper = dummyLineMapper();
- csvFileReader.setLineMapper(lineMapper);
- return csvFileReader;
- }
-
- @Bean
- public ItemProcessor dummyItemProcessor() {
- return new DummyItemProcessor();
- }
-
- @Bean(name = "dummyItemWriter")
- public ItemWriter dummyItemWriter() {
- return new DummyItemWriter();
- }
-
- private LineMapper dummyLineMapper() {
- DefaultLineMapper lineMapper = new DefaultLineMapper<>();
-
- LineTokenizer dummyTokenizer = dummyTokenizer();
- lineMapper.setLineTokenizer(dummyTokenizer);
-
- FieldSetMapper dummyFieldSetMapper = dummyFieldSetMapper();
- lineMapper.setFieldSetMapper(dummyFieldSetMapper);
-
- return lineMapper;
- }
-
- private FieldSetMapper dummyFieldSetMapper() {
- BeanWrapperFieldSetMapper studentInformationMapper = new BeanWrapperFieldSetMapper<>();
- studentInformationMapper.setTargetType(DummyObject.class);
- return studentInformationMapper;
- }
-
- private LineTokenizer dummyTokenizer() {
- DelimitedLineTokenizer studentLineTokenizer = new DelimitedLineTokenizer();
- studentLineTokenizer.setDelimiter(",");
- studentLineTokenizer.setNames(new String[]{"f1", "f2"});
- return studentLineTokenizer;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
deleted file mode 100644
index 99c50e804e9..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.dummy;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobExecutionListener;
-
-public class DummyJobListener implements JobExecutionListener {
-
- private static final Logger LOG = LoggerFactory.getLogger(DummyJobListener.class);
-
- @Override
- public void beforeJob(JobExecution jobExecution) {
- LOG.info("Dummy - before job execution");
- }
-
- @Override
- public void afterJob(JobExecution jobExecution) {
- LOG.info("Dummy - after job execution");
- if (jobExecution.getExecutionContext().get("jobOutputLocation") != null) {
- String jobOutputLocation = (String) jobExecution.getExecutionContext().get("jobOutputLocation");
- String exitDescription = "file://" + jobOutputLocation;
- LOG.info("Add exit description '{}'", exitDescription);
- jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode(), exitDescription));
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java
deleted file mode 100644
index ce087dd15cf..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.dummy;
-
-public class DummyObject {
- private String f1;
- private String f2;
-
- public String getF1() {
- return f1;
- }
-
- public void setF1(String f1) {
- this.f1 = f1;
- }
-
- public String getF2() {
- return f2;
- }
-
- public void setF2(String f2) {
- this.f2 = f2;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
deleted file mode 100644
index 548e6504eaf..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.dummy;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.StepExecutionListener;
-
-public class DummyStepListener implements StepExecutionListener {
-
- private static final Logger LOG = LoggerFactory.getLogger(DummyStepListener.class);
-
- @Override
- public void beforeStep(StepExecution stepExecution) {
- LOG.info("Dummy step - before step execution");
- }
-
- @Override
- public ExitStatus afterStep(StepExecution stepExecution) {
- LOG.info("Dummy step - after step execution");
- return stepExecution.getExitStatus();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
deleted file mode 100644
index f35387d468f..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.manager;
-
-import com.google.common.collect.Lists;
-import org.apache.ambari.infra.model.ExecutionContextResponse;
-import org.apache.ambari.infra.model.JobDetailsResponse;
-import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
-import org.apache.ambari.infra.model.JobExecutionInfoResponse;
-import org.apache.ambari.infra.model.JobInstanceDetailsResponse;
-import org.apache.ambari.infra.model.JobOperationParams;
-import org.apache.ambari.infra.model.StepExecutionContextResponse;
-import org.apache.ambari.infra.model.StepExecutionInfoResponse;
-import org.apache.ambari.infra.model.StepExecutionProgressResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.admin.history.StepExecutionHistory;
-import org.springframework.batch.admin.service.JobService;
-import org.springframework.batch.admin.service.NoSuchStepExecutionException;
-import org.springframework.batch.admin.web.JobInfo;
-import org.springframework.batch.admin.web.StepExecutionProgress;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.explore.JobExplorer;
-import org.springframework.batch.core.launch.JobExecutionNotRunningException;
-import org.springframework.batch.core.launch.JobOperator;
-import org.springframework.batch.core.launch.NoSuchJobException;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.launch.NoSuchJobInstanceException;
-import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
-import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
-import org.springframework.batch.core.repository.JobRestartException;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.TimeZone;
-
-@Named
-public class JobManager implements Jobs {
-
- private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
-
- @Inject
- private JobService jobService;
-
- @Inject
- private JobOperator jobOperator;
-
- @Inject
- private JobExplorer jobExplorer;
-
- private TimeZone timeZone = TimeZone.getDefault();
-
- public Set getAllJobNames() {
- return jobOperator.getJobNames();
- }
-
- /**
- * Launch a new job instance (based on job name) and applies customized parameters to it.
- * Also add a new date parameter to make sure the job instance will be unique
- */
- @Override
- public JobExecutionInfoResponse launchJob(String jobName, JobParameters jobParameters)
- throws JobParametersInvalidException, NoSuchJobException,
- JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException {
-
- Set running = jobExplorer.findRunningJobExecutions(jobName);
- if (!running.isEmpty())
- throw new JobExecutionAlreadyRunningException("An instance of this job is already active: "+jobName);
-
- return new JobExecutionInfoResponse(jobService.launch(jobName, jobParameters), timeZone);
- }
-
- @Override
- public void restart(Long jobExecutionId)
- throws JobInstanceAlreadyCompleteException, NoSuchJobException, JobExecutionAlreadyRunningException,
- JobParametersInvalidException, JobRestartException, NoSuchJobExecutionException {
- jobService.restart(jobExecutionId);
- }
-
- @Override
- public Optional lastRun(String jobName) throws NoSuchJobException {
- return jobService.listJobExecutionsForJob(jobName, 0, 1).stream().findFirst();
- }
-
- /**
- * Get all executions ids that mapped to specific job name,
- */
- public Set getExecutionIdsByJobName(String jobName) throws NoSuchJobException {
- return jobOperator.getRunningExecutions(jobName);
- }
-
- /**
- * Stop all running job executions and returns with the number of stopped jobs.
- */
- public Integer stopAllJobs() {
- return jobService.stopAll();
- }
-
- /**
- * Gather job execution details by job execution id.
- */
- public JobExecutionDetailsResponse getExecutionInfo(Long jobExecutionId) throws NoSuchJobExecutionException {
- JobExecution jobExecution = jobService.getJobExecution(jobExecutionId);
- List stepExecutionInfoList = new ArrayList<>();
- for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
- stepExecutionInfoList.add(new StepExecutionInfoResponse(stepExecution, timeZone));
- }
- stepExecutionInfoList.sort(Comparator.comparing(StepExecutionInfoResponse::getId));
- return new JobExecutionDetailsResponse(new JobExecutionInfoResponse(jobExecution, timeZone), stepExecutionInfoList);
- }
-
- /**
- * Stop or abandon a running job execution by job execution id
- */
- public JobExecutionInfoResponse stopOrAbandonJobByExecutionId(Long jobExecutionId, JobOperationParams.JobStopOrAbandonOperationParam operation)
- throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException {
- JobExecution jobExecution;
- if (JobOperationParams.JobStopOrAbandonOperationParam.STOP.equals(operation)) {
- jobExecution = jobService.stop(jobExecutionId);
- } else if (JobOperationParams.JobStopOrAbandonOperationParam.ABANDON.equals(operation)) {
- jobExecution = jobService.abandon(jobExecutionId);
- } else {
- throw new UnsupportedOperationException("Unsupported operaration");
- }
- LOG.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name());
- return new JobExecutionInfoResponse(jobExecution, timeZone);
- }
-
- /**
- * Get execution context for a job execution instance. (context can be shipped between job executions)
- */
- public ExecutionContextResponse getExecutionContextByJobExecutionId(Long executionId) throws NoSuchJobExecutionException {
- JobExecution jobExecution = jobService.getJobExecution(executionId);
- Map executionMap = new HashMap<>();
- for (Map.Entry entry : jobExecution.getExecutionContext().entrySet()) {
- executionMap.put(entry.getKey(), entry.getValue());
- }
- return new ExecutionContextResponse(executionId, executionMap);
- }
-
- /**
- * Restart a specific job instance with the same parameters. (only restart operation is supported here)
- */
- public JobExecutionInfoResponse restart(Long jobInstanceId, String jobName,
- JobOperationParams.JobRestartOperationParam operation) throws NoSuchJobException, JobParametersInvalidException,
- JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobExecutionException {
- if (JobOperationParams.JobRestartOperationParam.RESTART.equals(operation)) {
- Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstanceId);
- JobExecution jobExecution = jobExecutions.iterator().next();
- Long jobExecutionId = jobExecution.getId();
- return new JobExecutionInfoResponse(jobService.restart(jobExecutionId), timeZone);
- } else {
- throw new UnsupportedOperationException("Unsupported operation (try: RESTART)");
- }
- }
-
- /**
- * Get all job details. (paged)
- */
- public List getAllJobs(int start, int pageSize) {
- List jobs = new ArrayList<>();
- Collection names = jobService.listJobs(start, pageSize);
- for (String name : names) {
- int count = 0;
- try {
- count = jobService.countJobExecutionsForJob(name);
- }
- catch (NoSuchJobException e) {
- // shouldn't happen
- }
- boolean launchable = jobService.isLaunchable(name);
- boolean incrementable = jobService.isIncrementable(name);
- jobs.add(new JobInfo(name, count, null, launchable, incrementable));
- }
- return jobs;
- }
-
- /**
- * Get all executions for unique job instance.
- */
- public List getExecutionsForJobInstance(String jobName, Long jobInstanceId) throws NoSuchJobInstanceException, NoSuchJobException {
- List result = Lists.newArrayList();
- JobInstance jobInstance = jobService.getJobInstance(jobInstanceId);
- Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getInstanceId());
- for (JobExecution jobExecution : jobExecutions) {
- result.add(new JobExecutionInfoResponse(jobExecution, timeZone));
- }
- return result;
- }
-
- /**
- * Get job details for a specific job. (paged)
- */
- public JobDetailsResponse getJobDetails(String jobName, int page, int size) throws NoSuchJobException {
- List jobInstanceResponses = Lists.newArrayList();
- Collection jobInstances = jobService.listJobInstances(jobName, page, size);
-
- int count = jobService.countJobExecutionsForJob(jobName);
- boolean launchable = jobService.isLaunchable(jobName);
- boolean isIncrementable = jobService.isIncrementable(jobName);
-
- for (JobInstance jobInstance: jobInstances) {
- List executionInfos = Lists.newArrayList();
- Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getId());
- if (jobExecutions != null) {
- for (JobExecution jobExecution : jobExecutions) {
- executionInfos.add(new JobExecutionInfoResponse(jobExecution, timeZone));
- }
- }
- jobInstanceResponses.add(new JobInstanceDetailsResponse(jobInstance, executionInfos));
- }
- return new JobDetailsResponse(new JobInfo(jobName, count, launchable, isIncrementable), jobInstanceResponses);
- }
-
- /**
- * Get step execution details based for job execution id and step execution id.
- */
- public StepExecutionInfoResponse getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
- return new StepExecutionInfoResponse(stepExecution, timeZone);
- }
-
- /**
- * Get step execution context details. (execution context can be shipped between steps)
- */
- public StepExecutionContextResponse getStepExecutionContext(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
- Map executionMap = new HashMap<>();
- for (Map.Entry entry : stepExecution.getExecutionContext().entrySet()) {
- executionMap.put(entry.getKey(), entry.getValue());
- }
- return new StepExecutionContextResponse(executionMap, jobExecutionId, stepExecutionId, stepExecution.getStepName());
- }
-
- /**
- * Get step execution progress status detauls.
- */
- public StepExecutionProgressResponse getStepExecutionProgress(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId);
- StepExecutionInfoResponse stepExecutionInfoResponse = new StepExecutionInfoResponse(stepExecution, timeZone);
- String stepName = stepExecution.getStepName();
- if (stepName.contains(":partition")) {
- stepName = stepName.replaceAll("(:partition).*", "$1*");
- }
- String jobName = stepExecution.getJobExecution().getJobInstance().getJobName();
- StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName);
- StepExecutionProgress stepExecutionProgress = new StepExecutionProgress(stepExecution, stepExecutionHistory);
-
- return new StepExecutionProgressResponse(stepExecutionProgress, stepExecutionHistory, stepExecutionInfoResponse);
-
- }
-
- private StepExecutionHistory computeHistory(String jobName, String stepName) {
- int total = jobService.countStepExecutionsForStep(jobName, stepName);
- StepExecutionHistory stepExecutionHistory = new StepExecutionHistory(stepName);
- for (int i = 0; i < total; i += 1000) {
- for (StepExecution stepExecution : jobService.listStepExecutionsForStep(jobName, stepName, i, 1000)) {
- stepExecutionHistory.append(stepExecution);
- }
- }
- return stepExecutionHistory;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java
deleted file mode 100644
index b2ca605b61b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.manager;
-
-import org.apache.ambari.infra.model.JobExecutionInfoResponse;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.launch.NoSuchJobException;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
-import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
-import org.springframework.batch.core.repository.JobRestartException;
-
-import java.util.Optional;
-
-public interface Jobs {
- JobExecutionInfoResponse launchJob(String jobName, JobParameters params)
- throws JobParametersInvalidException, NoSuchJobException,
- JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException;
- void restart(Long jobExecutionId)
- throws JobInstanceAlreadyCompleteException, NoSuchJobException, JobExecutionAlreadyRunningException,
- JobParametersInvalidException, JobRestartException, NoSuchJobExecutionException;
-
- Optional lastRun(String jobName) throws NoSuchJobException, NoSuchJobExecutionException;
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
deleted file mode 100644
index 2d46c5475c2..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import java.util.Map;
-
-public class ExecutionContextResponse {
-
- private final Long jobExecutionId;
- private final Map executionContextMap;
-
- public ExecutionContextResponse(Long jobExecutionId, Map executionContextMap) {
- this.jobExecutionId = jobExecutionId;
- this.executionContextMap = executionContextMap;
- }
-
- public Long getJobExecutionId() {
- return jobExecutionId;
- }
-
- public Map getExecutionContextMap() {
- return executionContextMap;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
deleted file mode 100644
index cd34fefdbc3..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import org.springframework.batch.admin.web.JobInfo;
-
-import java.util.List;
-
-public class JobDetailsResponse {
-
- private JobInfo jobInfo;
- private List jobInstanceDetailsResponseList;
-
- public JobDetailsResponse() {
- }
-
- public JobDetailsResponse(JobInfo jobInfo, List jobInstanceDetailsResponseList) {
- this.jobInfo = jobInfo;
- this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
- }
-
- public JobInfo getJobInfo() {
- return jobInfo;
- }
-
- public void setJobInfo(JobInfo jobInfo) {
- this.jobInfo = jobInfo;
- }
-
- public List getJobInstanceDetailsResponseList() {
- return jobInstanceDetailsResponseList;
- }
-
- public void setJobInstanceDetailsResponseList(List jobInstanceDetailsResponseList) {
- this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
deleted file mode 100644
index 695b57f0bc7..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import java.util.List;
-
-public class JobExecutionDetailsResponse {
-
- private JobExecutionInfoResponse jobExecutionInfoResponse;
-
- private List stepExecutionInfoList;
-
- public JobExecutionDetailsResponse(JobExecutionInfoResponse jobExecutionInfoResponse, List stepExecutionInfoList) {
- this.jobExecutionInfoResponse = jobExecutionInfoResponse;
- this.stepExecutionInfoList = stepExecutionInfoList;
- }
-
- public JobExecutionInfoResponse getJobExecutionInfoResponse() {
- return jobExecutionInfoResponse;
- }
-
- public void setJobExecutionInfoResponse(JobExecutionInfoResponse jobExecutionInfoResponse) {
- this.jobExecutionInfoResponse = jobExecutionInfoResponse;
- }
-
- public List getStepExecutionInfoList() {
- return stepExecutionInfoList;
- }
-
- public void setStepExecutionInfoList(List stepExecutionInfoList) {
- this.stepExecutionInfoList = stepExecutionInfoList;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
deleted file mode 100644
index a7e4a4f0c44..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import org.apache.ambari.infra.model.wrapper.JobExecutionData;
-import org.springframework.batch.admin.web.JobParametersExtractor;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
-import org.springframework.batch.core.converter.DefaultJobParametersConverter;
-import org.springframework.batch.core.converter.JobParametersConverter;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Properties;
-import java.util.TimeZone;
-
-public class JobExecutionInfoResponse {
- private Long id;
- private int stepExecutionCount;
- private Long jobId;
- private String jobName;
- private String startDate = "";
- private String startTime = "";
- private String duration = "";
- private JobExecutionData jobExecutionData;
- private Properties jobParameters;
- private String jobParametersString;
- private boolean restartable = false;
- private boolean abandonable = false;
- private boolean stoppable = false;
- private final TimeZone timeZone;
-
-
- public JobExecutionInfoResponse(JobExecution jobExecution, TimeZone timeZone) {
- JobParametersConverter converter = new DefaultJobParametersConverter();
- this.jobExecutionData = new JobExecutionData(jobExecution);
- this.timeZone = timeZone;
- this.id = jobExecutionData.getId();
- this.jobId = jobExecutionData.getJobId();
- this.stepExecutionCount = jobExecutionData.getStepExecutions().size();
- this.jobParameters = converter.getProperties(jobExecutionData.getJobParameters());
- this.jobParametersString = (new JobParametersExtractor()).fromJobParameters(jobExecutionData.getJobParameters());
- JobInstance jobInstance = jobExecutionData.getJobInstance();
- if(jobInstance != null) {
- this.jobName = jobInstance.getJobName();
- BatchStatus endTime = jobExecutionData.getStatus();
- this.restartable = endTime.isGreaterThan(BatchStatus.STOPPING) && endTime.isLessThan(BatchStatus.ABANDONED);
- this.abandonable = endTime.isGreaterThan(BatchStatus.STARTED) && endTime != BatchStatus.ABANDONED;
- this.stoppable = endTime.isLessThan(BatchStatus.STOPPING);
- } else {
- this.jobName = "?";
- }
-
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
- SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
-
- durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
- timeFormat.setTimeZone(timeZone);
- dateFormat.setTimeZone(timeZone);
- if(jobExecutionData.getStartTime() != null) {
- this.startDate = dateFormat.format(jobExecutionData.getStartTime());
- this.startTime = timeFormat.format(jobExecutionData.getStartTime());
- Date endTime1 = jobExecutionData.getEndTime() != null? jobExecutionData.getEndTime():new Date();
- this.duration = durationFormat.format(new Date(endTime1.getTime() - jobExecutionData.getStartTime().getTime()));
- }
- }
-
- public Long getId() {
- return id;
- }
-
- public int getStepExecutionCount() {
- return stepExecutionCount;
- }
-
- public Long getJobId() {
- return jobId;
- }
-
- public String getJobName() {
- return jobName;
- }
-
- public String getStartDate() {
- return startDate;
- }
-
- public String getStartTime() {
- return startTime;
- }
-
- public String getDuration() {
- return duration;
- }
-
- public JobExecutionData getJobExecutionData() {
- return jobExecutionData;
- }
-
- public Properties getJobParameters() {
- return jobParameters;
- }
-
- public String getJobParametersString() {
- return jobParametersString;
- }
-
- public boolean isRestartable() {
- return restartable;
- }
-
- public boolean isAbandonable() {
- return abandonable;
- }
-
- public boolean isStoppable() {
- return stoppable;
- }
-
- public TimeZone getTimeZone() {
- return timeZone;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java
deleted file mode 100644
index b4c20e9ffba..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.ws.rs.PathParam;
-
-public class JobExecutionRequest {
-
- @PathParam("jobName")
- private String jobName;
-
- @PathParam("jobInstanceId")
- private Long jobInstanceId;
-
- public String getJobName() {
- return jobName;
- }
-
- public Long getJobInstanceId() {
- return jobInstanceId;
- }
-
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-
- public void setJobInstanceId(Long jobInstanceId) {
- this.jobInstanceId = jobInstanceId;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
deleted file mode 100644
index 3eab25f4e42..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-
-public class JobExecutionRestartRequest {
-
- @PathParam("jobName")
- @NotNull
- private String jobName;
-
- @PathParam("jobInstanceId")
- @NotNull
- private Long jobInstanceId;
-
- @QueryParam("operation")
- @NotNull
- private JobOperationParams.JobRestartOperationParam operation;
-
- public String getJobName() {
- return jobName;
- }
-
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-
- public Long getJobInstanceId() {
- return jobInstanceId;
- }
-
- public void setJobExecutionId(Long jobExecutionId) {
- this.jobInstanceId = jobExecutionId;
- }
-
- public JobOperationParams.JobRestartOperationParam getOperation() {
- return operation;
- }
-
- public void setOperation(JobOperationParams.JobRestartOperationParam operation) {
- this.operation = operation;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
deleted file mode 100644
index b176f125fae..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-
-public class JobExecutionStopRequest {
-
- @PathParam("jobExecutionId")
- @NotNull
- private Long jobExecutionId;
-
- @QueryParam("operation")
- @NotNull
- private JobOperationParams.JobStopOrAbandonOperationParam operation;
-
- public Long getJobExecutionId() {
- return jobExecutionId;
- }
-
- public void setJobExecutionId(Long jobExecutionId) {
- this.jobExecutionId = jobExecutionId;
- }
-
- public JobOperationParams.JobStopOrAbandonOperationParam getOperation() {
- return operation;
- }
-
- public void setOperation(JobOperationParams.JobStopOrAbandonOperationParam operation) {
- this.operation = operation;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
deleted file mode 100644
index af886545d9d..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import org.springframework.batch.core.JobInstance;
-
-import java.util.List;
-
-public class JobInstanceDetailsResponse {
-
- private JobInstance jobInstance;
-
- private List jobExecutionInfoResponseList;
-
- public JobInstanceDetailsResponse() {
- }
-
- public JobInstanceDetailsResponse(JobInstance jobInstance, List jobExecutionInfoResponseList) {
- this.jobInstance = jobInstance;
- this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
- }
-
- public JobInstance getJobInstance() {
- return jobInstance;
- }
-
- public void setJobInstance(JobInstance jobInstance) {
- this.jobInstance = jobInstance;
- }
-
- public List getJobExecutionInfoResponseList() {
- return jobExecutionInfoResponseList;
- }
-
- public void setJobExecutionInfoResponseList(List jobExecutionInfoResponseList) {
- this.jobExecutionInfoResponseList = jobExecutionInfoResponseList;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
deleted file mode 100644
index 905a4fa6f67..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-
-public class JobInstanceStartRequest {
-
- @PathParam("jobName")
- @NotNull
- private String jobName;
-
- @QueryParam("params")
- String params;
-
- public String getJobName() {
- return jobName;
- }
-
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-
- public String getParams() {
- return params;
- }
-
- public void setParams(String params) {
- this.params = params;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java
deleted file mode 100644
index e286debee1b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-public class JobOperationParams {
-
- public enum JobStopOrAbandonOperationParam {
- STOP, ABANDON;
- }
-
- public enum JobRestartOperationParam {
- RESTART;
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java
deleted file mode 100644
index b4fd4785e1a..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-
-public class JobRequest extends PageRequest {
-
- @NotNull
- @PathParam("jobName")
- private String jobName;
-
- public String getJobName() {
- return jobName;
- }
-
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java
deleted file mode 100644
index 679d4fd8220..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.QueryParam;
-
-public class PageRequest {
-
- @QueryParam("page")
- @DefaultValue("0")
- private int page;
-
- @QueryParam("size")
- @DefaultValue("20")
- private int size;
-
- public int getPage() {
- return page;
- }
-
- public void setPage(int page) {
- this.page = page;
- }
-
- public int getSize() {
- return size;
- }
-
- public void setSize(int size) {
- this.size = size;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
deleted file mode 100644
index 0e67a8795e9..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import java.util.Map;
-
-public class StepExecutionContextResponse {
-
- private Map executionContextMap;
-
- private Long jobExecutionId;
-
- private Long stepExecutionId;
-
- private String stepName;
-
- public StepExecutionContextResponse() {
- }
-
- public StepExecutionContextResponse(Map executionContextMap, Long jobExecutionId, Long stepExecutionId, String stepName) {
- this.executionContextMap = executionContextMap;
- this.jobExecutionId = jobExecutionId;
- this.stepExecutionId = stepExecutionId;
- this.stepName = stepName;
- }
-
- public Map getExecutionContextMap() {
- return executionContextMap;
- }
-
- public Long getJobExecutionId() {
- return jobExecutionId;
- }
-
- public Long getStepExecutionId() {
- return stepExecutionId;
- }
-
- public String getStepName() {
- return stepName;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
deleted file mode 100644
index ed04767bd41..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import org.apache.ambari.infra.model.wrapper.StepExecutionData;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.StepExecution;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.TimeZone;
-
-public class StepExecutionInfoResponse {
- private Long id;
- private Long jobExecutionId;
- private String jobName;
- private String name;
- private String startDate = "-";
- private String startTime = "-";
- private String duration = "-";
- private StepExecutionData stepExecutionData;
- private long durationMillis;
-
- public StepExecutionInfoResponse(String jobName, Long jobExecutionId, String name, TimeZone timeZone) {
- this.jobName = jobName;
- this.jobExecutionId = jobExecutionId;
- this.name = name;
- this.stepExecutionData = new StepExecutionData(new StepExecution(name, new JobExecution(jobExecutionId)));
- }
-
- public StepExecutionInfoResponse(StepExecution stepExecution, TimeZone timeZone) {
- this.stepExecutionData = new StepExecutionData(stepExecution);
- this.id = stepExecutionData.getId();
- this.name = stepExecutionData.getStepName();
- this.jobName = stepExecutionData.getJobExecution() != null && stepExecutionData.getJobExecution().getJobInstance() != null? stepExecutionData.getJobExecution().getJobInstance().getJobName():"?";
- this.jobExecutionId = stepExecutionData.getJobExecutionId();
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
- SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss");
-
- durationFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
- timeFormat.setTimeZone(timeZone);
- dateFormat.setTimeZone(timeZone);
- if(stepExecutionData.getStartTime() != null) {
- this.startDate = dateFormat.format(stepExecutionData.getStartTime());
- this.startTime = timeFormat.format(stepExecutionData.getStartTime());
- Date endTime = stepExecutionData.getEndTime() != null? stepExecutionData.getEndTime():new Date();
- this.durationMillis = endTime.getTime() - stepExecutionData.getStartTime().getTime();
- this.duration = durationFormat.format(new Date(this.durationMillis));
- }
-
- }
-
- public Long getId() {
- return this.id;
- }
-
- public Long getJobExecutionId() {
- return this.jobExecutionId;
- }
-
- public String getName() {
- return this.name;
- }
-
- public String getJobName() {
- return this.jobName;
- }
-
- public String getStartDate() {
- return this.startDate;
- }
-
- public String getStartTime() {
- return this.startTime;
- }
-
- public String getDuration() {
- return this.duration;
- }
-
- public long getDurationMillis() {
- return this.durationMillis;
- }
-
- public String getStatus() {
- return this.id != null?this.stepExecutionData.getStatus().toString():"NONE";
- }
-
- public String getExitCode() {
- return this.id != null?this.stepExecutionData.getExitStatus().getExitCode():"NONE";
- }
-
- @JsonIgnore
- public StepExecutionData getStepExecution() {
- return this.stepExecutionData;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
deleted file mode 100644
index 26f9ed4f9ba..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import org.springframework.batch.admin.history.StepExecutionHistory;
-import org.springframework.batch.admin.web.StepExecutionProgress;
-
-public class StepExecutionProgressResponse {
-
- private StepExecutionProgress stepExecutionProgress;
-
- private StepExecutionHistory stepExecutionHistory;
-
- private StepExecutionInfoResponse stepExecutionInfoResponse;
-
- public StepExecutionProgressResponse() {
- }
-
- public StepExecutionProgressResponse(StepExecutionProgress stepExecutionProgress, StepExecutionHistory stepExecutionHistory,
- StepExecutionInfoResponse stepExecutionInfoResponse) {
- this.stepExecutionProgress = stepExecutionProgress;
- this.stepExecutionHistory = stepExecutionHistory;
- this.stepExecutionInfoResponse = stepExecutionInfoResponse;
- }
-
- public StepExecutionProgress getStepExecutionProgress() {
- return stepExecutionProgress;
- }
-
- public StepExecutionHistory getStepExecutionHistory() {
- return stepExecutionHistory;
- }
-
- public StepExecutionInfoResponse getStepExecutionInfoResponse() {
- return stepExecutionInfoResponse;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java
deleted file mode 100644
index 2228171271b..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model;
-
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.PathParam;
-
-public class StepExecutionRequest {
-
- @PathParam("jobExecutionId")
- @NotNull
- private Long jobExecutionId;
-
- @PathParam("stepExecutionId")
- @NotNull
- private Long stepExecutionId;
-
- public Long getJobExecutionId() {
- return jobExecutionId;
- }
-
- public void setJobExecutionId(Long jobExecutionId) {
- this.jobExecutionId = jobExecutionId;
- }
-
- public Long getStepExecutionId() {
- return stepExecutionId;
- }
-
- public void setStepExecutionId(Long stepExecutionId) {
- this.stepExecutionId = stepExecutionId;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
deleted file mode 100644
index 28e262ae6db..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model.wrapper;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.google.common.collect.Lists;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.item.ExecutionContext;
-
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-
-/**
- * Wrapper for #{{@link JobExecution}}
- */
-public class JobExecutionData {
-
- private JobExecution jobExecution;
-
- public JobExecutionData(JobExecution jobExecution) {
- this.jobExecution = jobExecution;
- }
-
- @JsonIgnore
- public JobExecution getJobExecution() {
- return jobExecution;
- }
-
- @JsonIgnore
- public Collection getStepExecutions() {
- return jobExecution.getStepExecutions();
- }
-
- public JobParameters getJobParameters() {
- return jobExecution.getJobParameters();
- }
-
- public JobInstance getJobInstance() {
- return jobExecution.getJobInstance();
- }
-
- public Collection getStepExecutionDataList() {
- List stepExecutionDataList = Lists.newArrayList();
- Collection stepExecutions = getStepExecutions();
- if (stepExecutions != null) {
- for (StepExecution stepExecution : stepExecutions) {
- stepExecutionDataList.add(new StepExecutionData(stepExecution));
- }
- }
- return stepExecutionDataList;
- }
-
- public BatchStatus getStatus() {
- return jobExecution.getStatus();
- }
-
- public Date getStartTime() {
- return jobExecution.getStartTime();
- }
-
- public Date getCreateTime() {
- return jobExecution.getCreateTime();
- }
-
- public Date getEndTime() {
- return jobExecution.getEndTime();
- }
-
- public Date getLastUpdated() {
- return jobExecution.getLastUpdated();
- }
-
- public ExitStatus getExitStatus() {
- return jobExecution.getExitStatus();
- }
-
- public ExecutionContext getExecutionContext() {
- return jobExecution.getExecutionContext();
- }
-
- public List getFailureExceptions() {
- return jobExecution.getFailureExceptions();
- }
-
- public String getJobConfigurationName() {
- return jobExecution.getJobConfigurationName();
- }
-
- public Long getId() {
- return jobExecution.getId();
- }
-
- public Long getJobId() {
- return jobExecution.getJobId();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
deleted file mode 100644
index 26552ae6b85..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.model.wrapper;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.item.ExecutionContext;
-
-import java.util.Date;
-import java.util.List;
-
-/**
- * Wrapper for #{{@link StepExecution}}
- */
-public class StepExecutionData {
-
- @JsonIgnore
- private final JobExecution jobExecution;
-
- @JsonIgnore
- private final StepExecution stepExecution;
-
-
- public StepExecutionData(StepExecution stepExecution) {
- this.stepExecution = stepExecution;
- this.jobExecution = stepExecution.getJobExecution();
- }
-
- @JsonIgnore
- public JobExecution getJobExecution() {
- return jobExecution;
- }
-
- @JsonIgnore
- public StepExecution getStepExecution() {
- return stepExecution;
- }
-
- public String getStepName() {
- return stepExecution.getStepName();
- }
-
- public int getReadCount() {
- return stepExecution.getReadCount();
- }
-
- public BatchStatus getStatus() {
- return stepExecution.getStatus();
- }
-
- public int getWriteCount() {
- return stepExecution.getWriteCount();
- }
-
- public int getCommitCount() {
- return stepExecution.getCommitCount();
- }
-
- public int getRollbackCount() {
- return stepExecution.getRollbackCount();
- }
-
- public int getReadSkipCount() {
- return stepExecution.getReadSkipCount();
- }
-
- public int getProcessSkipCount() {
- return stepExecution.getProcessSkipCount();
- }
-
- public Date getStartTime() {
- return stepExecution.getStartTime();
- }
-
- public int getWriteSkipCount() {
- return stepExecution.getWriteSkipCount();
- }
-
- public Date getEndTime() {
- return stepExecution.getEndTime();
- }
-
- public Date getLastUpdated() {
- return stepExecution.getLastUpdated();
- }
-
- public ExecutionContext getExecutionContext() {
- return stepExecution.getExecutionContext();
- }
-
- public ExitStatus getExitStatus() {
- return stepExecution.getExitStatus();
- }
-
- public boolean isTerminateOnly() {
- return stepExecution.isTerminateOnly();
- }
-
- public int getFilterCount() {
- return stepExecution.getFilterCount();
- }
-
- public List getFailureExceptions() {
- return stepExecution.getFailureExceptions();
- }
-
- public Long getId() {
- return stepExecution.getId();
- }
-
- public Long getJobExecutionId() {
- return stepExecution.getJobExecutionId();
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java
deleted file mode 100644
index 18dfdd9d21c..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.rest;
-
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.infra.doc.InfraManagerApiDocStorage;
-import org.springframework.context.annotation.Scope;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-@Path("swagger.{type:json|yaml}")
-@Named
-@Scope("request")
-public class ApiDocResource {
-
- @Inject
- private InfraManagerApiDocStorage infraManagerApiDocStorage;
-
- @GET
- @Produces({MediaType.APPLICATION_JSON, "application/yaml"})
- @ApiOperation(value = "The swagger definition in either JSON or YAML", hidden = true)
- public Response swaggerDefinitionResponse(@PathParam("type") String type) {
- Response response = Response.status(404).build();
- if (infraManagerApiDocStorage.getSwagger() != null) {
- if ("yaml".equalsIgnoreCase(type)) {
- response = Response.ok().entity(infraManagerApiDocStorage.getSwaggerYaml()).type("application/yaml").build();
- } else {
- response = Response.ok().entity(infraManagerApiDocStorage.getSwagger()).build();
- }
- }
- return response;
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
deleted file mode 100644
index 079cce3e115..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.rest;
-
-
-import com.google.common.collect.Maps;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.admin.service.NoSuchStepExecutionException;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.launch.JobExecutionNotFailedException;
-import org.springframework.batch.core.launch.JobExecutionNotRunningException;
-import org.springframework.batch.core.launch.JobExecutionNotStoppedException;
-import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
-import org.springframework.batch.core.launch.JobParametersNotFoundException;
-import org.springframework.batch.core.launch.NoSuchJobException;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.launch.NoSuchJobInstanceException;
-import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
-import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
-import org.springframework.batch.core.repository.JobRestartException;
-import org.springframework.batch.core.step.NoSuchStepException;
-import org.springframework.web.bind.MethodArgumentNotValidException;
-
-import javax.batch.operations.JobExecutionAlreadyCompleteException;
-import javax.inject.Named;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.Map;
-
-@Named
-@Provider
-public class JobExceptionMapper implements ExceptionMapper {
-
- private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class);
-
- private static final Map exceptionStatusCodeMap = Maps.newHashMap();
-
- static {
- exceptionStatusCodeMap.put(MethodArgumentNotValidException.class, Response.Status.BAD_REQUEST);
- exceptionStatusCodeMap.put(NoSuchJobException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(NoSuchStepException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(NoSuchStepExecutionException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(NoSuchJobExecutionException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(NoSuchJobInstanceException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(JobExecutionNotRunningException.class, Response.Status.INTERNAL_SERVER_ERROR);
- exceptionStatusCodeMap.put(JobExecutionNotStoppedException.class, Response.Status.INTERNAL_SERVER_ERROR);
- exceptionStatusCodeMap.put(JobInstanceAlreadyExistsException.class, Response.Status.ACCEPTED);
- exceptionStatusCodeMap.put(JobInstanceAlreadyCompleteException.class, Response.Status.ACCEPTED);
- exceptionStatusCodeMap.put(JobExecutionAlreadyRunningException.class, Response.Status.ACCEPTED);
- exceptionStatusCodeMap.put(JobExecutionAlreadyCompleteException.class, Response.Status.ACCEPTED);
- exceptionStatusCodeMap.put(JobParametersNotFoundException.class, Response.Status.NOT_FOUND);
- exceptionStatusCodeMap.put(JobExecutionNotFailedException.class, Response.Status.INTERNAL_SERVER_ERROR);
- exceptionStatusCodeMap.put(JobRestartException.class, Response.Status.INTERNAL_SERVER_ERROR);
- exceptionStatusCodeMap.put(JobParametersInvalidException.class, Response.Status.BAD_REQUEST);
- }
-
- @Override
- public Response toResponse(Throwable throwable) {
- LOG.error("REST Exception occurred:", throwable);
- Response.Status status = Response.Status.INTERNAL_SERVER_ERROR;
-
- for (Map.Entry entry : exceptionStatusCodeMap.entrySet()) {
- if (throwable.getClass().isAssignableFrom(entry.getKey())) {
- status = entry.getValue();
- LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode());
- break;
- }
- }
-
- return Response.status(status).entity(new StatusMessage(throwable.getMessage(), status.getStatusCode()))
- .type(MediaType.APPLICATION_JSON_TYPE).build();
- }
-
- private class StatusMessage {
- private String message;
- private int statusCode;
-
- StatusMessage(String message, int statusCode) {
- this.message = message;
- this.statusCode = statusCode;
- }
-
- public String getMessage() {
- return message;
- }
-
- public int getStatusCode() {
- return statusCode;
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
deleted file mode 100644
index 502057e078f..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.rest;
-
-import com.google.common.base.Splitter;
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.infra.manager.JobManager;
-import org.apache.ambari.infra.model.ExecutionContextResponse;
-import org.apache.ambari.infra.model.JobDetailsResponse;
-import org.apache.ambari.infra.model.JobExecutionDetailsResponse;
-import org.apache.ambari.infra.model.JobExecutionInfoResponse;
-import org.apache.ambari.infra.model.JobExecutionRequest;
-import org.apache.ambari.infra.model.JobExecutionRestartRequest;
-import org.apache.ambari.infra.model.JobExecutionStopRequest;
-import org.apache.ambari.infra.model.JobInstanceStartRequest;
-import org.apache.ambari.infra.model.JobRequest;
-import org.apache.ambari.infra.model.PageRequest;
-import org.apache.ambari.infra.model.StepExecutionContextResponse;
-import org.apache.ambari.infra.model.StepExecutionInfoResponse;
-import org.apache.ambari.infra.model.StepExecutionProgressResponse;
-import org.apache.ambari.infra.model.StepExecutionRequest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.batch.admin.service.NoSuchStepExecutionException;
-import org.springframework.batch.admin.web.JobInfo;
-import org.springframework.batch.core.JobParametersBuilder;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.launch.JobExecutionNotRunningException;
-import org.springframework.batch.core.launch.NoSuchJobException;
-import org.springframework.batch.core.launch.NoSuchJobExecutionException;
-import org.springframework.batch.core.launch.NoSuchJobInstanceException;
-import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
-import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
-import org.springframework.batch.core.repository.JobRestartException;
-import org.springframework.context.annotation.Scope;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.validation.Valid;
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.BeanParam;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import java.util.List;
-import java.util.Set;
-
-@Api(value = "jobs", description = "Job operations")
-@Path("jobs")
-@Named
-@Scope("request")
-public class JobResource {
- private static final Logger LOG = LoggerFactory.getLogger(JobResource.class);
-
- @Inject
- private JobManager jobManager;
-
- @GET
- @Produces({"application/json"})
- @ApiOperation("Get all jobs")
- public List getAllJobs(@BeanParam @Valid PageRequest request) {
- return jobManager.getAllJobs(request.getPage(), request.getSize());
- }
-
- @POST
- @Produces({"application/json"})
- @Path("{jobName}")
- @ApiOperation("Start a new job instance by job name.")
- public JobExecutionInfoResponse startJob(@BeanParam @Valid JobInstanceStartRequest request)
- throws JobParametersInvalidException, NoSuchJobException, JobExecutionAlreadyRunningException,
- JobRestartException, JobInstanceAlreadyCompleteException {
-
- String jobName = request.getJobName();
- String params = request.getParams();
- JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
- if (params != null) {
- LOG.info("Parsing parameters of job {} '{}'", jobName, params);
- Splitter.on(',')
- .trimResults()
- .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults())
- .split(params).forEach(jobParametersBuilder::addString);
- }
-
- return jobManager.launchJob(jobName, jobParametersBuilder.toJobParameters());
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/info/names")
- @ApiOperation("Get all job names")
- public Set getAllJobNames() {
- return jobManager.getAllJobNames();
- }
-
- @GET
- @Produces({"application/json"})
- @Path("{jobName}/info")
- @ApiOperation("Get job details by job name.")
- public JobDetailsResponse getJobDetails(@BeanParam @Valid JobRequest jobRequest) throws NoSuchJobException {
- return jobManager.getJobDetails(jobRequest.getJobName(), jobRequest.getPage(), jobRequest.getSize());
- }
-
- @GET
- @Path("{jobName}/executions")
- @Produces({"application/json"})
- @ApiOperation("Get the id values of all the running job instances.")
- public Set getExecutionIdsByJobName(@PathParam("jobName") @NotNull @Valid String jobName) throws NoSuchJobException {
- return jobManager.getExecutionIdsByJobName(jobName);
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}")
- @ApiOperation("Get job and step details for job execution instance.")
- public JobExecutionDetailsResponse getExectionInfo(@PathParam("jobExecutionId") @Valid Long jobExecutionId) throws NoSuchJobExecutionException {
- return jobManager.getExecutionInfo(jobExecutionId);
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}/context")
- @ApiOperation("Get execution context for specific job.")
- public ExecutionContextResponse getExecutionContextByJobExecId(@PathParam("jobExecutionId") Long executionId) throws NoSuchJobExecutionException {
- return jobManager.getExecutionContextByJobExecutionId(executionId);
- }
-
-
- @DELETE
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}")
- @ApiOperation("Stop or abandon a running job execution.")
- public JobExecutionInfoResponse stopOrAbandonJobExecution(@BeanParam @Valid JobExecutionStopRequest request)
- throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException {
- return jobManager.stopOrAbandonJobByExecutionId(request.getJobExecutionId(), request.getOperation());
- }
-
- @DELETE
- @Produces({"application/json"})
- @Path("/executions")
- @ApiOperation("Stop all job executions.")
- public Integer stopAll() {
- return jobManager.stopAllJobs();
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/{jobName}/{jobInstanceId}/executions")
- @ApiOperation("Get execution for job instance.")
- public List getExecutionsForInstance(@BeanParam @Valid JobExecutionRequest request) throws
- NoSuchJobException, NoSuchJobInstanceException {
- return jobManager.getExecutionsForJobInstance(request.getJobName(), request.getJobInstanceId());
- }
-
- @POST
- @Produces({"application/json"})
- @Path("/{jobName}/{jobInstanceId}/executions")
- @ApiOperation("Restart job instance.")
- public JobExecutionInfoResponse restartJobInstance(@BeanParam @Valid JobExecutionRestartRequest request) throws JobInstanceAlreadyCompleteException,
- NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException, NoSuchJobException {
- return jobManager.restart(request.getJobInstanceId(), request.getJobName(), request.getOperation());
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}")
- @ApiOperation("Get step execution details.")
- public StepExecutionInfoResponse getStepExecution(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- return jobManager.getStepExecution(request.getJobExecutionId(), request.getStepExecutionId());
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context")
- @ApiOperation("Get the execution context of step execution.")
- public StepExecutionContextResponse getStepExecutionContext(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- return jobManager.getStepExecutionContext(request.getJobExecutionId(), request.getStepExecutionId());
- }
-
- @GET
- @Produces({"application/json"})
- @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/progress")
- @ApiOperation("Get progress of step execution.")
- public StepExecutionProgressResponse getStepExecutionProgress(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException {
- return jobManager.getStepExecutionProgress(request.getJobExecutionId(), request.getStepExecutionId());
- }
-
-}
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt b/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt
deleted file mode 100644
index 41da7250cb6..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-f1,f2
-v1,v2
-v3,v4
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
deleted file mode 100644
index 9a371fd7cc0..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Extend with java options or system properties. e.g.: INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n"
-export INFRA_MANAGER_OPTS=""
-
-# Log Search debug options
-# export INFRA_MANAGER_DEBUG=true
-# export INFRA_MANAGER_DEBUG_SUSPEND=n
-export INFRA_MANAGER_DEBUG_PORT=5005
-
-# Log Search memory
-# export INFRA_MANAGER_JAVA_MEM="--Xmx1024m"
-
-# export LOG_PATH=/var/log/ambari-logsearch-logfeeder/
-# export LOG_FILE=logsearch.log
-
-# Pid file of the application
-# export INFRA_MANAGER_PID_DIR=/var/run/ambari-infra-manager
-# export INFRA_MANAGER_PID_FILE=infra-manager.pid
-
-# SSL settings"
-# export INFRA_MANAGER_SSL="true"
-# export INFRA_MANAGER_KEYSTORE_LOCATION="/my/path/keystore.jks"
-# export INFRA_MANAGER_KEYSTORE_TYPE="jks"
-# export INFRA_MANAGER_TRUSTSTORE_LOCATION="/my/path/trutstore.jks"
-# export INFRA_MANAGER_TRUSTSTORE_TYPE="jks"
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
deleted file mode 100644
index a0712bae047..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ /dev/null
@@ -1,74 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-infra-manager.batch.db.file=job-repository.db
-infra-manager.batch.db.init=false
-infra-manager.batch.db.username=admin
-infra-manager.batch.db.password=admin
-management.security.enabled=false
-management.health.solr.enabled=false
-infra-manager.server.data.folder=/tmp/ambariInfraManager
-
-infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logtime:[${start} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
-infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL
-infra-manager.jobs.solr_data_archiving.archive_service_logs.local_destination_directory=/tmp/ambariInfraManager
-infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
-infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=true
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 * * * * ?
-infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.intervalEndDelta=PT24H
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=logtime:[${start} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=logtime
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3
-# TODO: logtime may not be enough: The same filename can be generated when more than write_block_size count docs has the same logtime value
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=logtime
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/test_audit_logs
-#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=.csv
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569
-# TODO: configure ranger audit logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.zoo_keeper_connection_string=zookeeper:2181
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.read_block_size=100
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.write_block_size=150
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.file_name_suffix_column=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.destination_directory_path=/tmp/ambariInfraManager
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.collection=hadoop_logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.query_text=logtime:[* TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.filter_query_text=(logtime:"${logtime}" AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[0]=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[1]=id
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=logtime
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
deleted file mode 100644
index 5ac32e30bbb..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [ "$INFRA_MANAGER_JAVA_MEM" = "" ]; then
- INFRA_MANAGER_JAVA_MEM="-Xmx1g"
-fi
-
-readlinkf(){
- # get real path on mac OSX
- perl -MCwd -e 'print Cwd::abs_path shift' "$1";
-}
-
-if [ "$(uname -s)" = 'Linux' ]; then
- SCRIPT_DIR="`dirname "$(readlink -f "$0")"`"
-else
- SCRIPT_DIR="`dirname "$(readlinkf "$0")"`"
-fi
-
-INFRA_MANAGER_ROOT_DIR="`dirname \"$SCRIPT_DIR\"`"
-INFRA_MANAGER_LIBS_DIR="$INFRA_MANAGER_ROOT_DIR/libs"
-
-if [ "$INFRA_MANAGER_CONF_DIR" = "" ]; then
- if [ -d "$INFRA_MANAGER_ROOT_DIR/conf" ]; then
- INFRA_MANAGER_CONF_DIR="$INFRA_MANAGER_ROOT_DIR/conf"
- fi
-fi
-
-if [ -f "$INFRA_MANAGER_CONF_DIR/infra-manager-env.sh" ]; then
- source $INFRA_MANAGER_CONF_DIR/infra-manager-env.sh
-fi
-
-JVM="java"
-
-if [ -x $JAVA_HOME/bin/java ]; then
- JVM=$JAVA_HOME/bin/java
-fi
-
-if [ ! -z "$INFRA_MANAGER_SOLR_CLIENT_SSL_INCLUDE" ]; then
- source $INFRA_MANAGER_SOLR_CLIENT_SSL_INCLUDE
-fi
-
-if [ -z "$INFRA_MANAGER_PID_FILE" ]; then
- INFRA_MANAGER_PID_DIR=$HOME
- export INFRA_MANAGER_PID_FILE=$INFRA_MANAGER_PID_DIR/infra-manager.pid
-fi
-
-if [ -z "$LOG_FILE" ]; then
- export LOG_FILE="infra-manager.log"
-fi
-
-INFRA_MANAGER_GC_LOGFILE="infra-manager-gc.log"
-
-if [ -z "$LOG_PATH" ]; then
- LOG_FILE="$HOME/$LOG_FILE"
- INFRA_MANAGER_GC_LOGFILE="$HOME/$INFRA_MANAGER_GC_LOGFILE"
-else
- LOG_PATH_WITHOUT_SLASH=${LOG_PATH%/}
- LOG_FILE="$LOG_PATH_WITHOUT_SLASH/$LOG_FILE"
- INFRA_MANAGER_GC_LOGFILE="$LOG_PATH_WITHOUT_SLASH/$INFRA_MANAGER_GC_LOGFILE"
-fi
-
-INFRA_MANAGER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$INFRA_MANAGER_GC_LOGFILE"
-
-function print_usage() {
- cat << EOF
-
- Usage: [] []
-
- commands:
- start Start Infra Manager
- stop Stop Infra Manager
- status Check Infra Manager is running or not (pid file)
- help Print usage
-
-
- start command arguments:
- -d, --debug Start java process in debug mode
- -f, --foreground Start java process in foreground
-
-EOF
-}
-
-function spinner() {
- local pid=$1
- local delay=0.5
- local spinstr='|/-\'
- while [ "$(ps aux | awk '{print $2}' | grep -w $pid)" ]; do
- local temp=${spinstr#?}
- printf " [%c] " "$spinstr"
- local spinstr=$temp${spinstr%"$temp"}
- sleep $delay
- printf "\b\b\b\b\b\b"
- done
- printf " \b\b\b\b"
-}
-
-function status() {
- echo "Checking Infra Manager status ..." >&2
- if [ -f "$INFRA_MANAGER_PID_FILE" ]; then
- INFRA_MANAGER_PID=`cat "$INFRA_MANAGER_PID_FILE"`
- else
- echo "Infra Manager pid not exists. (probably the process is not running)" >&2
- return 1
- fi
-
- if ps -p $INFRA_MANAGER_PID > /dev/null
- then
- echo "Infra Manager process is running. (pid: $INFRA_MANAGER_PID)" >&2
- return 0
- else
- echo "Infra Manager process is not running." >&2
- return 1
- fi
-}
-
-function start() {
- exit_status=$(status; echo $?)
- if [ "$exit_status" = "0" ]; then
- echo "Skipping start process."
- exit 0
- fi
-
- FG="false"
- INFRA_MANAGER_DEBUG_SUSPEND=${INFRA_MANAGER_DEBUG_SUSPEND:-n}
- INFRA_MANAGER_DEBUG_PORT=${INFRA_MANAGER_DEBUG_PORT:-"5005"}
-
- if [ "$INFRA_MANAGER_DEBUG" = "true" ]; then
- INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_PORT,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND "
- fi
-
- if [ "$INFRA_MANAGER_SSL" = "true" ]; then
- INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Djavax.net.ssl.keyStore=$INFRA_MANAGER_KEYSTORE_LOCATION -Djavax.net.ssl.keyStoreType=$INFRA_MANAGER_KEYSTORE_TYPE -Djavax.net.ssl.trustStore=$INFRA_MANAGER_TRUSTSTORE_LOCATION -Djavax.net.ssl.trustStoreType=$INFRA_MANAGER_TRUSTSTORE_TYPE"
- fi
-
- if [ "$INFRA_MANAGER_JMX" = "true" ]; then
- INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=2099"
- fi
-
- if [ $# -gt 0 ]; then
- while true; do
- case "$1" in
- -f|--foreground)
- FG="true"
- shift
- ;;
- -d|--debug)
- if [ "$INFRA_MANAGER_DEBUG" != "true" ]; then
- INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_PORT,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND "
- fi
- shift
- ;;
- *)
- if [ "${1:0:2}" == "-D" ]; then
- # pass thru any opts that begin with -D (java system props)
- INFRA_MANAGER_JAVA_OPTS+=("$1")
- echo "$INFRA_MANAGER_JAVA_OPTS"
- shift
- else
- if [ "$1" != "" ]; then
- print_usage
- exit 1
- else
- break
- fi
- fi
- ;;
- esac
- done
- fi
-
- if [ $FG == "true" ]; then
- echo "Starting Infra Manager... (foreground) pid_file=$INFRA_MANAGER_PID_FILE"
- echo "Run command $JVM -cp '$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*' $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager"
- $JVM -cp "$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*" $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager
- else
- echo "Starting Infra Manager... Output file=$LOG_FILE pid_file=$INFRA_MANAGER_PID_FILE"
- echo "Run command nohup $JVM -cp '$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*' $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager"
- nohup $JVM -cp "$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*" $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager > $LOG_FILE 2>&1 &
- fi
-}
-
-function stop() {
- INFRA_MANAGER_STOP_WAIT=3
- if [ -f "$INFRA_MANAGER_PID_FILE" ]; then
- INFRA_MANAGER_PID=`cat "$INFRA_MANAGER_PID_FILE"`
- fi
-
- if [ "$INFRA_MANAGER_PID" != "" ]; then
- echo -e "Sending stop command to Infra Manager... Checking PID: $INFRA_MANAGER_PID."
- kill $INFRA_MANAGER_PID
- (loops=0
- while true
- do
- CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '`
- if [ "$CHECK_PID" != "" ]; then
- slept=$((loops * 2))
- if [ $slept -lt $INFRA_MANAGER_STOP_WAIT ]; then
- sleep 2
- loops=$[$loops+1]
- else
- exit # subshell!
- fi
- else
- exit # subshell!
- fi
- done) &
- spinner $!
- rm -f "$INFRA_MANAGER_PID_FILE"
- else
- echo -e "No Infra Manager process found to stop."
- exit 0
- fi
-
- CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '`
- if [ "$CHECK_PID" != "" ]; then
- echo -e "Infra Manager process $INFRA_MANAGER_PID is still running; forcefully killing it now."
- kill -9 $INFRA_MANAGER_PID
- echo "Killed process $INFRA_MANAGER_PID"
- rm -f "$INFRA_MANAGER_PID_FILE"
- sleep 1
- else
- echo "Infra Manager is stopped."
- fi
-
- CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '`
- if [ "$CHECK_PID" != "" ]; then
- echo "ERROR: Failed to kill Infra Manager Java process $INFRA_MANAGER_PID ... script fails."
- exit 1
- fi
-}
-
-if [ $# -gt 0 ]; then
- SCRIPT_CMD="$1"
- shift
-else
- print_usage
- exit 1
-fi
-
-case $SCRIPT_CMD in
- start)
- start ${1+"$@"}
- ;;
- stop)
- stop
- ;;
- status)
- status
- ;;
- help)
- print_usage
- exit 0
- ;;
- *)
- print_usage
- exit 1
- ;;
-
-esac
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
deleted file mode 100644
index d3db3d7ed36..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-
-
-
-
- target/log/infra-manager.log
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html b/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html
deleted file mode 100644
index 3e648674c8c..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
-
-
-
Welcome!
-
-
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
deleted file mode 100644
index 4d261e7a279..00000000000
--- a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html
+++ /dev/null
@@ -1,136 +0,0 @@
-
-
-
-
-
- Infra Manager REST API
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java
deleted file mode 100644
index 26a6953d54a..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package org.apache.ambari.infra.conf.security;
-
-import org.junit.Test;
-
-import java.util.Optional;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class CompositePasswordStoreTest {
- @Test
- public void testGetPasswordReturnNullIfNoPasswordStoresWereAdded() {
- assertThat(new CompositePasswordStore().getPassword("any").isPresent(), is(false));
- }
-
- @Test
- public void testGetPasswordReturnNullIfPasswordNotFoundInAnyStore() {
- assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.empty()).getPassword("any").isPresent(), is(false));
- }
-
- @Test
- public void testGetPasswordReturnPasswordFromFirstStoreIfExists() {
- assertThat(new CompositePasswordStore((prop) -> Optional.of("Pass"), (prop) -> Optional.empty()).getPassword("any").get(), is("Pass"));
- }
-
- @Test
- public void testGetPasswordReturnPasswordFromSecondStoreIfNotExistsInFirst() {
- assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.of("Pass")).getPassword("any").get(), is("Pass"));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java
deleted file mode 100644
index 3b7caabd02e..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package org.apache.ambari.infra.job;
-
-import org.apache.ambari.infra.job.archive.DocumentArchivingProperties;
-import org.apache.ambari.infra.job.archive.SolrProperties;
-import org.junit.Test;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class JobPropertiesTest {
- @Test
- public void testDeepCopy() throws Exception {
- DocumentArchivingProperties documentArchivingProperties = new DocumentArchivingProperties();
- documentArchivingProperties.setLocalDestinationDirectory("/tmp");
- documentArchivingProperties.setFileNameSuffixColumn(".json");
- documentArchivingProperties.setReadBlockSize(10);
- documentArchivingProperties.setWriteBlockSize(20);
- SolrProperties solr = new SolrProperties();
- solr.setZooKeeperConnectionString("localhost:2181");
- solr.setFilterQueryText("id:1167");
- solr.setQueryText("name:'Joe'");
- solr.setCollection("Users");
- solr.setSortColumn(new String[] {"name"});
- documentArchivingProperties.setSolr(solr);
-
- DocumentArchivingProperties parsed = documentArchivingProperties.deepCopy();
-
- assertThat(parsed.getLocalDestinationDirectory(), is(documentArchivingProperties.getLocalDestinationDirectory()));
- assertThat(parsed.getFileNameSuffixColumn(), is(documentArchivingProperties.getFileNameSuffixColumn()));
- assertThat(parsed.getReadBlockSize(), is(documentArchivingProperties.getReadBlockSize()));
- assertThat(parsed.getWriteBlockSize(), is(documentArchivingProperties.getWriteBlockSize()));
- assertThat(parsed.getSolr().getZooKeeperConnectionString(), is(documentArchivingProperties.getSolr().getZooKeeperConnectionString()));
- assertThat(parsed.getSolr().getQueryText(), is(solr.getQueryText()));
- assertThat(parsed.getSolr().getFilterQueryText(), is(solr.getFilterQueryText()));
- assertThat(parsed.getSolr().getCollection(), is(solr.getCollection()));
- assertThat(parsed.getSolr().getSortColumn(), is(solr.getSortColumn()));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java
deleted file mode 100644
index ba1150f77ad..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java
+++ /dev/null
@@ -1,114 +0,0 @@
-package org.apache.ambari.infra.job;
-
-import org.apache.ambari.infra.manager.Jobs;
-import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
-import org.easymock.Mock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-
-import javax.batch.operations.NoSuchJobException;
-import java.util.Optional;
-import java.util.concurrent.ScheduledFuture;
-
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.isA;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-@RunWith(EasyMockRunner.class)
-public class JobSchedulerTest extends EasyMockSupport {
-
- @Mock
- private TaskScheduler taskScheduler;
- @Mock
- private Jobs jobs;
- @Mock
- private ScheduledFuture scheduledFuture;
- private JobScheduler jobScheduler;
-
- @Before
- public void setUp() throws Exception {
- jobScheduler = new JobScheduler(taskScheduler, jobs);
- }
-
- @After
- public void tearDown() throws Exception {
- verifyAll();
- }
-
- @Test(expected = NoSuchJobException.class)
- public void testScheduleWhenJobNotExistsThrowsException() throws Exception {
- String jobName = "notFoundJob";
- expect(jobs.lastRun(jobName)).andThrow(new NoSuchJobException());
- replayAll();
-
- jobScheduler.schedule(jobName, null);
- }
-
- @Test
- public void testScheduleWhenNoPreviousExecutionExistsJobIsScheduled() throws Exception {
- String jobName = "job0";
- SchedulingProperties schedulingProperties = new SchedulingProperties();
- schedulingProperties.setCron("* * * * * ?");
- expect(jobs.lastRun(jobName)).andReturn(Optional.empty());
- expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture);
- replayAll();
-
- jobScheduler.schedule(jobName, schedulingProperties);
- }
-
- @Test
- public void testScheduleWhenPreviousExecutionWasSuccessfulJobIsScheduled() throws Exception {
- String jobName = "job0";
- SchedulingProperties schedulingProperties = new SchedulingProperties();
- schedulingProperties.setCron("* * * * * ?");
- JobExecution jobExecution = new JobExecution(1L, new JobParameters());
- jobExecution.setExitStatus(ExitStatus.COMPLETED);
- expect(jobs.lastRun(jobName)).andReturn(Optional.of(jobExecution));
- expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture);
- replayAll();
-
- jobScheduler.schedule(jobName, schedulingProperties);
- }
-
- @Test
- public void testScheduleWhenPreviousExecutionFailedJobIsRestartedAndScheduled() throws Exception {
- String jobName = "job0";
- SchedulingProperties schedulingProperties = new SchedulingProperties();
- schedulingProperties.setCron("* * * * * ?");
- JobExecution jobExecution = new JobExecution(1L, new JobParameters());
- jobExecution.setExitStatus(ExitStatus.FAILED);
- expect(jobs.lastRun(jobName)).andReturn(Optional.of(jobExecution));
- jobs.restart(1L); expectLastCall();
- expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture);
- replayAll();
-
- jobScheduler.schedule(jobName, schedulingProperties);
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
deleted file mode 100644
index b31110cf84a..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.JobContextRepository;
-import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
-import org.easymock.Mock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.scope.context.ChunkContext;
-import org.springframework.batch.core.scope.context.StepContext;
-import org.springframework.batch.item.ExecutionContext;
-import org.springframework.batch.item.ItemStreamReader;
-import org.springframework.batch.repeat.RepeatStatus;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-
-@RunWith(EasyMockRunner.class)
-public class DocumentExporterTest extends EasyMockSupport {
-
- private static final long JOB_EXECUTION_ID = 1L;
- private static final long STEP_EXECUTION_ID = 1L;
- private static final Document DOCUMENT_2 = new Document(new HashMap() {{
- put("id", "2");
- }});
- private static final Document DOCUMENT_3 = new Document(new HashMap() {{
- put("id", "3");
- }});
- private DocumentExporter documentExporter;
- @Mock
- private ItemStreamReader reader;
- @Mock
- private DocumentDestination documentDestination;
- @Mock
- private DocumentItemWriter documentItemWriter;
- @Mock
- private DocumentItemWriter documentItemWriter2;
- @Mock
- private DocumentItemWriter documentItemWriter3;
- @Mock
- private JobContextRepository jobContextRepository;
-
-// private ExecutionContext executionContext;
- private ChunkContext chunkContext;
- private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }});
-
- @Before
- public void setUp() throws Exception {
- chunkContext = chunkContext(BatchStatus.STARTED);
- documentExporter = documentExporter(2);
- }
-
- private DocumentExporter documentExporter(int writeBlockSize) {
- return new DocumentExporter(reader, documentDestination, writeBlockSize, jobContextRepository);
- }
-
- private ChunkContext chunkContext(BatchStatus batchStatus) {
- StepExecution stepExecution = new StepExecution("exportDoc", new JobExecution(JOB_EXECUTION_ID));
- stepExecution.setId(STEP_EXECUTION_ID);
- stepExecution.getJobExecution().setStatus(batchStatus);
- return new ChunkContext(new StepContext(stepExecution));
- }
-
- @After
- public void tearDown() throws Exception {
- verifyAll();
- }
-
- @Test
- public void testNothingToRead() throws Exception {
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(null);
- reader.close(); expectLastCall();
- replayAll();
-
- documentExporter.execute(null, chunkContext);
- }
-
- private ExecutionContext executionContext(ChunkContext chunkContext) {
- return chunkContext.getStepContext().getStepExecution().getExecutionContext();
- }
-
- @Test
- public void testWriteLessDocumentsThanWriteBlockSize() throws Exception {
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT);
- expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
- documentItemWriter.write(DOCUMENT); expectLastCall();
- expect(reader.read()).andReturn(null);
- reader.close(); expectLastCall();
- documentItemWriter.close(); expectLastCall();
- replayAll();
-
- assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED));
- }
-
- @Test
- public void testWriteMoreDocumentsThanWriteBlockSize() throws Exception {
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT);
- expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
- documentItemWriter.write(DOCUMENT); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT_2);
- documentItemWriter.write(DOCUMENT_2); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT_3);
- documentItemWriter.close(); expectLastCall();
- jobContextRepository.updateExecutionContext(chunkContext.getStepContext().getStepExecution());
- expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(chunkContext.getStepContext().getStepExecution());
- expect(documentDestination.open(DOCUMENT_3)).andReturn(documentItemWriter2);
- documentItemWriter2.write(DOCUMENT_3); expectLastCall();
- expect(reader.read()).andReturn(null);
- reader.update(executionContext(chunkContext));
- reader.close(); expectLastCall();
- documentItemWriter2.close(); expectLastCall();
- replayAll();
-
- assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED));
- }
-
- @Test(expected = IOException.class)
- public void testReadError() throws Exception {
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT);
- expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
- documentItemWriter.write(DOCUMENT); expectLastCall();
- expect(reader.read()).andThrow(new IOException("TEST"));
- documentItemWriter.revert(); expectLastCall();
- reader.close(); expectLastCall();
- replayAll();
-
- documentExporter.execute(null, chunkContext);
- }
-
- @Test(expected = UncheckedIOException.class)
- public void testWriteError() throws Exception {
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT);
- expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
- documentItemWriter.write(DOCUMENT); expectLastCall().andThrow(new UncheckedIOException(new IOException("TEST")));
- documentItemWriter.revert(); expectLastCall();
- reader.close(); expectLastCall();
- replayAll();
-
- documentExporter.execute(null, chunkContext);
- }
-
- @Test
- public void testStopAndRestartExportsAllDocuments() throws Exception {
- ChunkContext stoppingChunkContext = chunkContext(BatchStatus.STOPPING);
- DocumentExporter documentExporter = documentExporter(1);
-
- reader.open(executionContext(chunkContext)); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT);
-
- expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
- documentItemWriter.write(DOCUMENT); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT_2);
- expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(chunkContext.getStepContext().getStepExecution());
- documentItemWriter.close(); expectLastCall();
- reader.update(executionContext(this.chunkContext));
- jobContextRepository.updateExecutionContext(this.chunkContext.getStepContext().getStepExecution());
-
- expect(documentDestination.open(DOCUMENT_2)).andReturn(documentItemWriter2);
- documentItemWriter2.write(DOCUMENT_2); expectLastCall();
- expect(reader.read()).andReturn(DOCUMENT_3);
- expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(stoppingChunkContext.getStepContext().getStepExecution());
- documentItemWriter2.revert(); expectLastCall();
- reader.close(); expectLastCall();
-
- reader.open(executionContext(chunkContext));
- expect(reader.read()).andReturn(DOCUMENT_3);
- expect(documentDestination.open(DOCUMENT_3)).andReturn(documentItemWriter3);
- documentItemWriter3.write(DOCUMENT_3); expectLastCall();
- documentItemWriter3.close(); expectLastCall();
-
- expect(reader.read()).andReturn(null);
- reader.close(); expectLastCall();
- replayAll();
-
- RepeatStatus repeatStatus = documentExporter.execute(null, this.chunkContext);
- assertThat(repeatStatus, is(RepeatStatus.CONTINUABLE));
- repeatStatus = documentExporter.execute(null, this.chunkContext);
- assertThat(repeatStatus, is(RepeatStatus.FINISHED));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
deleted file mode 100644
index 0776c3cf182..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.ambari.infra.job.ObjectSource;
-import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
-import org.easymock.Mock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.batch.item.ExecutionContext;
-
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.core.Is.is;
-import static org.hamcrest.core.IsNull.nullValue;
-import static org.junit.Assert.assertThat;
-
-@RunWith(EasyMockRunner.class)
-public class DocumentItemReaderTest extends EasyMockSupport {
- private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }});
- private static final Document DOCUMENT_2 = new Document(new HashMap() {{ put("id", "2"); }});
- private static final Document DOCUMENT_3 = new Document(new HashMap() {{ put("id", "3"); }});
- private static final int READ_BLOCK_SIZE = 2;
-
- private DocumentItemReader documentItemReader;
- @Mock
- private ObjectSource documentSource;
- @Mock
- private CloseableIterator documentIterator;
- @Mock
- private CloseableIterator documentIterator2;
-
- @Before
- public void setUp() throws Exception {
- documentItemReader = new DocumentItemReader(documentSource, READ_BLOCK_SIZE);
- }
-
- @After
- public void tearDown() throws Exception {
- verifyAll();
- }
-
- @Test
- public void testReadWhenCollectionIsEmpty() throws Exception {
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentIterator.next()).andReturn(null);
- documentIterator.close(); expectLastCall();
- replayAll();
-
- assertThat(documentItemReader.read(), is(nullValue()));
- assertThat(documentItemReader.isComplete(null), is(true));
- assertThat(documentItemReader.isComplete(null, null), is(true));
- }
-
- @Test
- public void testReadWhenCollectionContainsLessElementsThanReadBlockSize() throws Exception {
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentIterator.next()).andReturn(DOCUMENT);
- expect(documentIterator.next()).andReturn(null);
- documentIterator.close(); expectLastCall();
- replayAll();
-
- assertThat(documentItemReader.read(), is(DOCUMENT));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
- assertThat(documentItemReader.read(), is(nullValue()));
- assertThat(documentItemReader.isComplete(null), is(true));
- assertThat(documentItemReader.isComplete(null, null), is(true));
- }
-
- @Test
- public void testReadWhenCollectionContainsExactlySameCountElementsAsReadBlockSize() throws Exception {
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
- expect(documentIterator.next()).andReturn(DOCUMENT);
- expect(documentIterator.next()).andReturn(DOCUMENT_2);
- expect(documentIterator.next()).andReturn(null);
- documentIterator.close(); expectLastCall();
-
- expect(documentIterator2.next()).andReturn(null);
- documentIterator2.close(); expectLastCall();
- replayAll();
-
- assertThat(documentItemReader.read(), is(DOCUMENT));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
- assertThat(documentItemReader.read(), is(DOCUMENT_2));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
- assertThat(documentItemReader.read(), is(nullValue()));
- assertThat(documentItemReader.isComplete(null), is(true));
- assertThat(documentItemReader.isComplete(null, null), is(true));
- }
-
- @Test
- public void testReadWhenCollectionContainsMoreElementsThanReadBlockSize() throws Exception {
- Document document3 = new Document(new HashMap() {{ put("id", "2"); }});
-
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
- expect(documentIterator.next()).andReturn(DOCUMENT);
- expect(documentIterator.next()).andReturn(DOCUMENT_2);
- expect(documentIterator.next()).andReturn(null);
- documentIterator.close(); expectLastCall();
- expect(documentIterator2.next()).andReturn(document3);
- expect(documentIterator2.next()).andReturn(null);
- documentIterator2.close(); expectLastCall();
-
- replayAll();
-
- assertThat(documentItemReader.read(), is(DOCUMENT));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
-
- assertThat(documentItemReader.read(), is(DOCUMENT_2));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
-
- assertThat(documentItemReader.read(), is(document3));
- assertThat(documentItemReader.isComplete(null), is(false));
- assertThat(documentItemReader.isComplete(null, null), is(false));
-
- assertThat(documentItemReader.read(), is(nullValue()));
- assertThat(documentItemReader.isComplete(null), is(true));
- assertThat(documentItemReader.isComplete(null, null), is(true));
- }
-
- @Test
- public void testContinueWhenOnlyFirstElementWasRead() throws Exception {
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentIterator.next()).andReturn(DOCUMENT);
- documentIterator.close(); expectLastCall();
- expect(documentSource.open(null, 2)).andReturn(documentIterator2);
- expect(documentIterator2.next()).andReturn(DOCUMENT);
- documentIterator2.close(); expectLastCall();
- replayAll();
-
- ExecutionContext executionContext = new ExecutionContext();
- documentItemReader.open(executionContext);
- assertThat(documentItemReader.read(), is(DOCUMENT));
- documentItemReader.update(executionContext);
- assertThat(executionContext.containsKey(DocumentItemReader.POSITION), is(false));
- documentItemReader.close();
-
- documentItemReader.open(executionContext);
- assertThat(documentItemReader.read(), is(DOCUMENT));
- documentItemReader.close();
- }
-
- @Test
- public void testContinueWhenMoreThanOneElementWasRead() throws Exception {
- expect(documentSource.open(null, 2)).andReturn(documentIterator);
- expect(documentIterator.next()).andReturn(DOCUMENT);
- expect(documentIterator.next()).andReturn(DOCUMENT_2);
- documentIterator.close(); expectLastCall();
- expect(documentSource.open(DOCUMENT, 2)).andReturn(documentIterator2);
- expect(documentIterator2.next()).andReturn(DOCUMENT_2);
- expect(documentIterator2.next()).andReturn(DOCUMENT_3);
- documentIterator2.close(); expectLastCall();
-
- replayAll();
-
- ExecutionContext executionContext = new ExecutionContext();
- documentItemReader.open(executionContext);
- assertThat(documentItemReader.read(), is(DOCUMENT));
- assertThat(documentItemReader.read(), is(DOCUMENT_2));
- documentItemReader.update(executionContext);
- assertThat(executionContext.get(DocumentItemReader.POSITION), is(DOCUMENT));
- documentItemReader.close();
-
- documentItemReader.open(executionContext);
- assertThat(documentItemReader.read(), is(DOCUMENT_2));
- assertThat(documentItemReader.read(), is(DOCUMENT_3));
- documentItemReader.close();
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
deleted file mode 100644
index cca2c1a503e..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-
-import java.util.HashMap;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-public class FileNameSuffixFormatterTest {
-
- private FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", "yyyy-MM-dd'T'hh-mm-ss-SSSX");
-
- @Test(expected = NullPointerException.class)
- public void testFormatWhenDocumentIsNullThrowingException() throws Exception {
- formatter.format((Document) null);
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testFormatWhenSpecifiedColumnDoesNotExistsInTheDocumentThrowingException() throws Exception {
- formatter.format(new Document(new HashMap<>()));
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testFormatWhenSpecifiedColumnContainsBlankValueThrowingException() throws Exception {
- formatter.format(new Document(new HashMap() {{ put("logtime", " "); }}));
- }
-
- @Test
- public void testFormatWhenNoDateFormatSpecifiedRawColumnValueReturned() throws Exception {
- FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", null);
- assertThat(formatter.format(new Document(new HashMap() {{ put("logtime", "Monday"); }})), is("Monday"));
- }
-
- @Test
- public void testFormatWhenDateFormatIsSpecifiedAFormattedValueReturned() throws Exception {
- assertThat(formatter.format(new Document(new HashMap() {{ put("logtime", "2017-12-15T10:12:33.453Z"); }})), is("2017-12-15T10-12-33-453Z"));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
deleted file mode 100644
index 85e79e18277..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.job.archive;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.FileUtils;
-import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
-import org.easymock.Mock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.easymock.EasyMock.cmp;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.LogicalOperator.EQUAL;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-
-@RunWith(EasyMockRunner.class)
-public class LocalDocumentItemWriterTest extends EasyMockSupport {
-
- private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }});
- private static final Document DOCUMENT2 = new Document(new HashMap() {{ put("id", "2"); }});
- private static final Document DOCUMENT3 = new Document(new HashMap() {{ put("id", "3"); }});
- private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
-
- private LocalDocumentItemWriter localDocumentItemWriter;
- private File outFile;
- @Mock
- private ItemWriterListener itemWriterListener;
-
- @Before
- public void setUp() throws Exception {
- outFile = File.createTempFile("LocalDocumentItemWriterTest", "json.tmp");
- localDocumentItemWriter = new LocalDocumentItemWriter(outFile, itemWriterListener);
- }
-
- @After
- public void tearDown() throws Exception {
- outFile.delete();
- verifyAll();
- }
-
- @Test
- public void testWrite() throws Exception {
- itemWriterListener.onCompleted(
- cmp(new WriteCompletedEvent(outFile, DOCUMENT, DOCUMENT3), writeCompletedEventEqualityComparator(), EQUAL)); expectLastCall();
- replayAll();
-
- localDocumentItemWriter.write(DOCUMENT);
- localDocumentItemWriter.write(DOCUMENT2);
- localDocumentItemWriter.write(DOCUMENT3);
- localDocumentItemWriter.close();
-
- List documentList = readBack(outFile);
- assertThat(documentList.size(), is(3));
- assertThat(documentList.get(0).get("id"), is(DOCUMENT.get("id")));
- assertThat(documentList.get(1).get("id"), is(DOCUMENT2.get("id")));
- assertThat(documentList.get(2).get("id"), is(DOCUMENT3.get("id")));
- }
-
- private Comparator writeCompletedEventEqualityComparator() {
- return (o1, o2) -> {
- if (o1.getOutFile().equals(o2.getOutFile()) &&
- o1.getFirstDocument().equals(o2.getFirstDocument()) &&
- o1.getLastDocument().equals(o2.getLastDocument()))
- return 0;
- return 1;
- };
- }
-
- private List readBack(File file) throws IOException {
- List documentList = new ArrayList<>();
- for (String line : FileUtils.readLines(file)) {
- documentList.add(OBJECT_MAPPER.readValue(line, Document.class));
- }
- return documentList;
- }
-
- @Test
- public void testRevert() throws Exception {
- replayAll();
-
- localDocumentItemWriter.write(DOCUMENT);
- localDocumentItemWriter.revert();
-
- assertThat(outFile.exists(), is(false));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
deleted file mode 100644
index e34a222cd70..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-
-import java.io.StringReader;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class S3AccessCsvTest {
-
- private static final String VALID_ACCESS_FILE = "Access key ID,Secret access key\n" +
- "someKey,someSecret\n";
-
- private static final String ANY_CSV_FILE = "Column1,Column2\n" +
- "Foo,Bar\n";
-
- @Test
- public void testGetPasswordReturnsNullIfInputIsEmpty() {
- S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(""));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
- }
-
- @Test
- public void testGetPasswordReturnsAccessAndSecretKeyIfInputIsAValidS3AccessFile() {
- S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(VALID_ACCESS_FILE));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).get(), is("someKey"));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).get(), is("someSecret"));
- }
-
- @Test
- public void testGetPasswordReturnsNullIfNotAValidS3AccessFileProvided() {
- S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(ANY_CSV_FILE));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
- }
-
- @Test
- public void testGetPasswordReturnsNullIfAHeaderOnlyS3AccessFileProvided() {
- S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Secret access key\n"));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
- }
-
- @Test
- public void testGetPasswordReturnsNullIfOnlyOneValidColumnProvided() {
- S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Column\n"));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false));
- assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
deleted file mode 100644
index 018c993b429..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class SolrParametrizedStringTest {
-
- private static final Map PARAMETERS_1 = new HashMap() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); }};
- private static final Map PARAMETERS_START = new HashMap() {{ put("price", "1000"); }};
- private static final Map PARAMETERS_END = new HashMap() {{ put("price", "2000"); }};
-
- @Test
- public void testToStringEmptyStringResultsEmptyString() {
- assertThat(new SolrParametrizedString("").set(PARAMETERS_1).toString(), is(""));
- }
-
- @Test
- public void testParameterlessStringResultsItself() {
- assertThat(new SolrParametrizedString("Hello World!").set(PARAMETERS_1).toString(), is("Hello World!"));
- }
-
- @Test
- public void testParametersAreReplacedIfFoundInString() {
- assertThat(new SolrParametrizedString("Hello ${name}!").set(PARAMETERS_1).toString(), is("Hello User!"));
- }
-
- @Test
- public void testWhenStringContainsPrefixedParamtersOnlyPrefixedParametersAreSet() {
- assertThat(new SolrParametrizedString("The ${product} price is between $${start.price} and $${end.price}.")
- .set(PARAMETERS_1)
- .set("start", PARAMETERS_START)
- .set("end", PARAMETERS_END).toString(), is("The Computer price is between $1000 and $2000."));
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
deleted file mode 100644
index be8a2260f96..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.JobParametersBuilder;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class SolrPropertiesTest {
- @Test
- public void testApplySortColumns() throws Exception {
- JobParameters jobParameters = new JobParametersBuilder()
- .addString("sortColumn[0]", "logtime")
- .addString("sortColumn[1]", "id")
- .toJobParameters();
-
- SolrProperties solrProperties = new SolrProperties();
- solrProperties.setSortColumn(new String[] {"testColumn"});
- solrProperties.apply(jobParameters);
- assertThat(solrProperties.getSortColumn().length, is(2));
- assertThat(solrProperties.getSortColumn()[0], is("logtime"));
- assertThat(solrProperties.getSortColumn()[1], is("id"));
- }
-
- @Test
- public void testApplyWhenNoSortIsDefined() throws Exception {
- JobParameters jobParameters = new JobParametersBuilder()
- .toJobParameters();
-
- SolrProperties solrProperties = new SolrProperties();
- solrProperties.setSortColumn(new String[] {"testColumn"});
- solrProperties.apply(jobParameters);
- assertThat(solrProperties.getSortColumn().length, is(1));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
deleted file mode 100644
index ee0827965c2..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.job.archive;
-
-import org.apache.solr.client.solrj.SolrQuery;
-import org.junit.Test;
-
-import java.util.HashMap;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.junit.Assert.assertThat;
-
-public class SolrQueryBuilderTest {
- private static final Document DOCUMENT = new Document(new HashMap() {{
- put("logtime", "2017-10-02'T'10:00:11.634Z");
- put("id", "1");
- }});
-
- @Test
- public void testDefaultQuery() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .build();
- assertThat(solrQuery.getQuery(), is("*:*"));
- }
-
- @Test
- public void testSetQueryReplacesTheDefaultQueryTextAndParameterPlaceholdersAreReplacedToValues() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .setQueryText("logtime:[* TO ${end}]")
- .setInterval(null, "2017-11-27'T'10:12:11.372Z")
- .build();
- assertThat(solrQuery.getQuery(), is("logtime:[* TO 2017\\-11\\-27'T'10\\:12\\:11.372Z]"));
- }
-
- @Test
- public void testSetFilterQueryAddsAFilterQueryAndParameterPlaceholdersAreReplacedToValues() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .setFilterQueryText("(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]")
- .setDocument(DOCUMENT)
- .setInterval(null, "2017-11-27'T'10:12:11.372Z")
- .build();
- assertThat(solrQuery.getFilterQueries()[0], is( "(logtime:2017\\-10\\-02'T'10\\:00\\:11.634Z AND id:{1 TO *]) OR logtime:{2017\\-10\\-02'T'10\\:00\\:11.634Z TO 2017\\-11\\-27'T'10\\:12\\:11.372Z]"));
- }
-
- @Test
- public void testSetFilterQueryWhenDocumentIsNull() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .setFilterQueryText("(logtime:\"${logtime}\" AND id:{\"${id}\" TO *]) OR logtime:{\"${logtime}\" TO \"${end}\"]")
- .setInterval(null, "2017-11-27'T'10:12:11.372Z")
- .build();
- assertThat(solrQuery.getFilterQueries(), is(nullValue()));
- }
-
- @Test
- public void testNullEndValueDoesNotAffectFilterQuery() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .setFilterQueryText("logtime:${logtime} AND id:{${id} TO *]")
- .setDocument(DOCUMENT)
- .build();
- assertThat(solrQuery.getFilterQueries()[0], is("logtime:2017\\-10\\-02'T'10\\:00\\:11.634Z AND id:{1 TO *]"));
- }
-
- @Test
- public void testSetFilterQueryWhenQueryFilterIsNullButDocumentIsNot() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder()
- .setDocument(DOCUMENT)
- .build();
- assertThat(solrQuery.getFilterQueries(), is(nullValue()));
- }
-
- @Test
- public void testSort() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder().addSort("logtime", "id").build();
- assertThat(solrQuery.getSorts().get(0).getItem(), is("logtime"));
- assertThat(solrQuery.getSorts().get(1).getItem(), is("id"));
- }
-
- @Test
- public void test_start_and_end_values_are_given() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder().setQueryText("id:[\"${start}\" TO \"${end}\"]").setInterval("10", "13").build();
- assertThat(solrQuery.getQuery(), is("id:[\"10\" TO \"13\"]"));
- }
-
- @Test
- public void test_start_and_end_values_are_null() throws Exception {
- SolrQuery solrQuery = new SolrQueryBuilder().setQueryText("id:[${start} TO ${end}]").build();
- assertThat(solrQuery.getQuery(), is("id:[* TO *]"));
- }
-}
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java
deleted file mode 100644
index 322775e54b5..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package org.apache.ambari.infra.job.archive;
-
-import org.junit.Test;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.JobParametersBuilder;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-public class SolrQueryPropertiesTest {
- @Test
- public void testApplySortColumns() throws Exception {
- JobParameters jobParameters = new JobParametersBuilder()
- .addString("sortColumn[0]", "logtime")
- .addString("sortColumn[1]", "id")
- .toJobParameters();
-
- SolrQueryProperties solrQueryProperties = new SolrQueryProperties();
- solrQueryProperties.setSortColumn(new String[] {"testColumn"});
- solrQueryProperties.apply(jobParameters);
- assertThat(solrQueryProperties.getSortColumn().length, is(2));
- assertThat(solrQueryProperties.getSortColumn()[0], is("logtime"));
- assertThat(solrQueryProperties.getSortColumn()[1], is("id"));
- }
-
- @Test
- public void testApplyWhenNoSortIsDefined() throws Exception {
- JobParameters jobParameters = new JobParametersBuilder()
- .toJobParameters();
-
- SolrQueryProperties solrQueryProperties = new SolrQueryProperties();
- solrQueryProperties.setSortColumn(new String[] {"testColumn"});
- solrQueryProperties.apply(jobParameters);
- assertThat(solrQueryProperties.getSortColumn().length, is(1));
- }
-}
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample b/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample
deleted file mode 100644
index d722f0ef6c7..00000000000
--- a/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-infra-manager.batch.db.file=job-repository.db
-infra-manager.batch.db.init=true
-infra-manager.batch.db.username=admin
-infra-manager.batch.db.password=admin
-management.security.enabled=false
-management.health.solr.enabled=false
-infra-manager.server.data.folder=/tmp/ambariInfraManager
-
-infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=c6401.ambari.apache.org:2181/infra-solr
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logtime:[${start} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
-infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=2000
-infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=1000
-infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS
-infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime
-infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
-infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_endpoint=hdfs://c6401.ambari.apache.org:8020
-infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archived_service_logs
-# Note: set hdfs user using the HADOOP_USER_NAME environmental variable. Value: hdfs
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-solr-client/README.md b/ambari-infra/ambari-infra-solr-client/README.md
deleted file mode 100644
index 2b6d004cfa5..00000000000
--- a/ambari-infra/ambari-infra-solr-client/README.md
+++ /dev/null
@@ -1,1060 +0,0 @@
-
-
-## Ambari Infra Solr Client
-
-CLI helper tool(s) for Ambari Infra Solr.
-
-### Post Ambari Server Upgrade (Ambari 2.7.x)
-
-Ambari Infra Solr uses Solr 7 from Ambari 2.7.0, therefore it is required migrate Solr 5 index (Ambari Infra 2.6.x), if you want to keep your old data. (otherwise backup part can be skipped)
-
-#### Contents:
-- [I. Upgrade Ambari Infra Solr Clients](#i.-upgrade-ambari-infra-solr-client)
-- [II. Gather required Ambari and Solr parameters](#0-gather-params)
-- [III. Backup Solr Collections](#ii.-backup-collections-(ambari-2.6.x-to-ambari-2.7.x))
- - a.) If you have Ranger Ambari service with Solr audits:
- - [1. Backup Ranger collection](#ii/1.-backup-ranger-collection)
- - [2. Backup Ranger configs on Solr ZNode](#ii/2.-backup-ranger-configs-on-solr-znode)
- - [3. Delete Ranger collection](#ii/3.-delete-ranger-collection)
- - [4. Upgrade Ranger Solr schema](#ii/4.-upgrade-ranger-solr-schema)
- - b.) If you have Atlas Ambari service:
- - [5. Backup Atlas collections](#ii/5.-backup-atlas-collections)
- - [6. Delete Atlas collections](#ii/6.-delete-atlas-collections)
- - c.) If you have Log Search Ambari service:
- - [7. Delete Log Search collections](#ii/7.-delete-log-search-collections)
- - [8. Delete Log Search Solr configs](#ii/8.-delete-log-search-solr-configs)
-- [IV. Upgrade Ambari Infra Solr package](#iii.-upgrade-infra-solr-packages)
-- [V. Re-create Solr Collections](#iv.-re-create-collections)
-- [VI. Migrate Solr Collections](#v.-migrate-solr-collections)
- - a.) If you have Ranger Ambari service with Solr audits:
- - [1. Migrate Ranger Solr collection](#v/1.-migrate-ranger-collections)
- - b.) If you have Atlas Ambari service:
- - [2. Migrate Atlas Solr collections](#v/2.-migrate-atlas-collections)
-- [VII. Restore Solr Collections](#vi.-restore-collections)
- - a.) If you have Ranger Ambari service with Solr audits:
- - [1. Restore old Ranger collection](#vi/1.-restore-old-ranger-collection)
- - b.) If you have Atlas Ambari service:
- - [4. Restore old Atlas collections](#vi/4.-restore-old-atlas-collections)
-- [VIII. Restart Solr Instances](#vii.-restart-infra-solr-instances)
-- [IX. Transport old data to new collections](#viii.-transport-old-data-to-new-collections)
- - a.) If you have Ranger Ambari service with Solr audits:
- - [1. Transport old data to Ranger collection](#viii/1.-transport-old-data-to-ranger-collection)
- - b.) If you have Atlas Ambari service:
- - [2. Transport old data to Atlas collections](#viii/2.-transport-old-data-to-atlas-collections)
-- [Happy Path](#happy-path)
-- [APPENDIX](#appendix)
-
-### I. Upgrade Ambari Infra Solr Client
-
-##### Prerequisites:
-- Upgrade Ambari server
-- Make sure Solrs are up and running
-- Do NOT restart Infra Solr after Ambari server upgrade (if you do, see [this](#if-solr-restarted))
-- There will be a small time window between backup collections and deleting collections - Ranger plugins will operate during that time, that means you can loose data during that time period. If that means a big problem in order to avoid that, you can enable to auudit to HDFS for that time.
-
-First make sure `ambari-infra-solr-client` is the latest. (If its before 2.7.x) It will contain the migrationHelper.py script at `/usr/lib/ambari-infra-solr-client` location.
-Also make sure you won't upgrade `ambari-infra-solr` until the migration has not done. (all of this should happen after `ambari-server` upgrade, also make sure to not restart `INFRA_SOLR` instances).
-
-For upgrading `ambari-infra-solr-client` ssh into a host (where there is an `ambari=infra-solr` located as well):
-
-```bash
-# For RHEL/CentOS/Oracle Linux:
-
-yum clean all
-yum upgrade ambari-infra-solr-client
-
-# For SLES:
-
-zypper clean
-zypper up ambari-infra-solr-client
-
-# For Ubuntu/Debian:
-
-apt-get clean all
-apt-get update
-apt-get install ambari-infra-solr-client
-```
-
-You will need to repeat that step on every other host where `ambari-infra-solr-client` is installed or optionally you can skip ambari-infra-solr-client upgrade on all host, and you can do that after the end of the next step, see [here](#automatic-upgrade-ambari-infra-solr-client).
-
-### II. Gather required Ambari and Solr parameters
-
-At the start, it is required to create a proper configuration input for the migration helper script. That can be done with [/usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py](#migration-config-generator) script. Choose one of the Solr server host, and ssh there and run (with proper ambari-server configurations as flags):
-
-```bash
-# use a sudoer user for running the script !!
-CONFIG_INI_LOCATION=ambari_solr_migration.ini # output of the script with required parameters for migrationHelper.py
-# note 1: use -s if ambari-server uses https
-# note 2: use --shared-driver if the backup location is shared for different hosts
-# note 3: use --hdfs-base-path if the index data is located on hdfs (or --ranger-hdfs-base-path if only ranger collection is located there), e.g.: /user/infra-solr
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py --ini-file $CONFIG_INI_LOCATION --host c7401.ambari.apache.org --port 8080 --cluster cl1 --username admin --password admin --backup-base-path=/my/path --java-home /usr/jdk64/jdk1.8.0_112
-```
-
-Some important flags that can be added at this point;
-- `--shared-drive` : Use this flag if the location of the backup is shared between hosts (it will generate the index location as , therefore migration commands can be parallel on different hosts)
-- `--backup-base-path`: base path of the backup. e.g. if you provide `/my/path`, the backup locations will be `/my/path/ranger` and `/my/path/atlas`, if the base path won't be the same for these, you can provie Ranger or Atlas specific ones with `--ranger-backup-base-path` and `--atlas-backup-base-path`
-- `--hdfs-base-path`: use this if index is stored hdfs (that does not mean that the backup is stored on hdfs, it is only the index location), that is applied for all index, most of the time that is only used for ranger, so if that is the case ose `--ranger-hdfs-base-path` instead of this option, the value is mostly `/user/infra-solr` which means the collection itself could be at `hdfs:///user/infra-solr/ranger_audits` location
-(IMPORTANT NOTE: if ranger index is stored on hdfs, make sure to use the proper `-Dsolr.hdfs.security.kerberos.principal` in `infra-solr-env/content` config, by default it points to the Infra Solr principal, but if it was set to something else before, that needs to be changed to that)
-
-The generated config file output could be something like that:
-```ini
-[ambari_server]
-host = c7401.ambari.apache.org
-port = 8080
-cluster = cl1
-protocol = http
-username = admin
-password = admin
-
-[local]
-java_home = /usr/jdk64/jdk1.8.0_112/
-hostname = c7402.ambari.apache.org
-shared_drive = false
-
-[cluster]
-kerberos_enabled = true
-
-[infra_solr]
-protocol = http
-hosts = c7402.ambari.apache.org,c7403.ambari.apache.org
-port = 8886
-zk_connect_string = c7401.ambari.apache.org:2181
-znode = /infra-solr
-user = infra-solr
-keytab = /etc/security/keytabs/ambari-infra-solr.service.keytab
-principal = infra-solr/c7402.ambari.apache.org
-zk_principal_user = zookeeper
-
-[ranger_collection]
-enabled = true
-ranger_config_set_name = ranger_audits
-ranger_collection_name = ranger_audits
-ranger_collection_shards = 2
-ranger_collection_max_shards_per_node = 4
-backup_ranger_config_set_name = old_ranger_audits
-backup_ranger_collection_name = old_ranger_audits
-backup_path = /my/path/ranger
-
-[atlas_collections]
-enabled = true
-config_set = atlas_configs
-fulltext_index_name = fulltext_index
-fulltext_index_shards = 2
-fulltext_index_max_shards_per_node = 4
-edge_index_name = edge_index
-edge_index_shards = 2
-edge_index_max_shards_per_node = 4
-vertex_index_name = vertex_index
-vertex_index_shards = 2
-vertex_index_max_shards_per_node = 4
-backup_fulltext_index_name = old_fulltext_index
-backup_edge_index_name = old_edge_index
-backup_vertex_index_name = old_vertex_index
-backup_path = /my/path/atlas
-
-[logsearch_collections]
-enabled = true
-hadoop_logs_collection_name = hadoop_logs
-audit_logs_collection_name = audit_logs
-history_collection_name = history
-```
-(NOTE: if Infra Solr is external from Ranger perspective and the Solr instances are not even located in the cluster, migrationConfigGenerator.py needs to be executed on the Infra Solr cluuster, then it won't find the Ranger service, so you will need to fill the Ranger parameters in the configuration ini file manually.`)
-
-After the file has created successfully by the script, review the configuration (e.g.: if 1 of the Solr is not up yet, and you do not want to use its REST API for operations, you can remove its host from the hosts of infra_solr section or you can change backup locations for different collections etc.). Also if it's not required to backup e.g. Atlas collections (so you are ok to drop those), you can change the `enabled` config of the collections section to `false`.
-
-[![asciicast](https://asciinema.org/a/188260.png)](https://asciinema.org/a/188260?speed=2)
-
-##### (Optional) Upgrade All ambari-infra-solr packages
-
-If you did not upgrade ambari-infra-solr-client packages on all host, you can do that from the host where you are, to send a command to Ambari to do that on every host where there is an `INFRA_SOLR_CLIENT` component located:
-
-```bash
-CONFIG_INI_LOCATION=ambari_solr_migration.ini
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients
-```
-
-### III. Backup collections (Ambari 2.6.x to Ambari 2.7.x)
-
-##### Prerequisites:
-- Check the Solr instances are running and also make sure you have stable shards (at least one core is up and running)
-- Have enough space on the disks to store Solr backup data
-
-The backup process contains a few steps: backup ranger configs on znode, backup collections, delete Log Search znodes, then upgrade `managed-schema` znode for Ranger.
-These tasks can be done with 1 [migrationHelper.py](#solr-migration-helper-script) command:
-
-```bash
-# use a sudoer user for running the script !!
-# first (optionally) you can check that there are any ACTIVE relplicas for all the shards
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action check-shards
-# then run backup-and-cleanup ... you can run these actions separately with these action: 'backup','delete-collections', 'cleanup-znodes'
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup-and-cleanup
-```
-
-If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Infra Solr package upgrade](#iii.-upgrade-infra-solr-packages). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections.
-
-[![asciicast](https://asciinema.org/a/187421.png)](https://asciinema.org/a/187421?speed=2)
-
-#### III/1. Backup Ranger collection
-
-The [migrationHelper.py](#solr-migration-helper-script) script can be used to backup only Ranger collection (use `-s` option to filter on services)
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup -s RANGER
-```
-
-Also you can do the backup manually on every Solr node, by using [backup API of Solr](https://lucene.apache.org/solr/guide/6_6/making-and-restoring-backups.html). (use against core names, not collection name, it works as expected only if you have 1 shard on every node)
-
-Example:
-```bash
-
-su infra-solr
-SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr
-# collection parameters
-BACKUP_PATH=... # backup location, e.g.: /tmp/ranger-backup
-
-# RUN THIS FOR EVERY CORE ON SPECIFIC HOSTS !!!
-BACKUP_CORE=... # specific core on a host
-BACKUP_CORE_NAME=... # core names for backup -> /
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-mkdir -p $BACKUP_PATH
-
-curl --negotiate -k -u : "$SOLR_URL/$BACKUP_CORE/replication?command=BACKUP&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME"
-```
-
-(help: [get core names](#get-core-/-shard-names-with-hosts))
-
-#### III/2. Backup Ranger configs on Solr ZNode
-
-Next you can copy `ranger_audits` configs to a different znode, in order to keep the old schema.
-
-```bash
-export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location
-export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181
-# note 1: --transfer-mode copyToLocal or --transfer-mode copyFromLocal can be used if you want to use the local filesystem
-# note 2: use --jaas-file option only if the cluster is kerberized
-infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits --copy-dest /infra-solr/configs/old_ranger_audits
-```
-
-#### III/3. Delete Ranger collection
-
-At this point you can delete the actual Ranger collection with this command:
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s RANGER
-```
-
-Or do it manually by the Solr API:
-
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-COLLECTION_NAME=ranger_audits
-
-# use kinit and --negotiate option for curl only if the cluster is kerberized
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-```
-
-#### III/4. Upgrade Ranger Solr schema
-
-Before creating the new Ranger collection, it is required to upgrade `managed-schema` configs.
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action cleanup-znodes -s RANGER
-```
-
-It can be done manually by `infra-solr-cloud-cli` as well:
-
-```bash
-sudo -u infra-solr -i
-
-# If kerberos enabled
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-## BACKUP OLD CONFIG
-export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location
-export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181
-# note: --transfer-mode copyToLocal or --transfer-mode copyFromLocal can be used if you want to use the local filesystem
-infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits --copy-dest /infra-solr/configs/old_ranger_audits
-## UPLOAD NEW SCHEMA
-# Setup env for zkcli.sh
-source /etc/ambari-infra-solr/conf/infra-solr-env.sh
-# Run that command only if kerberos is enabled.
-export SOLR_ZK_CREDS_AND_ACLS="${SOLR_AUTHENTICATION_OPTS}"
-
-# Upload the new schema
-/usr/lib/ambari-infra-solr/server/scripts/cloud-scripts/zkcli.sh --zkhost "${ZK_HOST}" -cmd putfile /configs/ranger_audits/managed-schema /usr/lib/ambari-infra-solr-client/migrate/managed-schema
-```
-
-#### III/5. Backup Atlas collections
-
-Atlas has 3 collections: fulltext_index, edge_index, vertex_index.
-You will need to do similar steps that you did for Ranger, only difference is you will need to filter ATLAS service.
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup -s ATLAS
-```
-
-Also you can do the backup manually on every Solr node, by using [backup API of Solr](https://lucene.apache.org/solr/guide/6_6/making-and-restoring-backups.html). (use against core names, not collection name, it works as expected only if you have 1 shard on every node)
-
-Example:
-```bash
-
-su infra-solr
-SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr
-# collection parameters
-BACKUP_PATH=... # backup location, e.g.: /tmp/fulltext_index_backup
-
-# RUN THIS FOR EVERY CORE ON SPECIFIC HOSTS !!!
-BACKUP_CORE=... # specific core on a host
-BACKUP_CORE_NAME=... # core names for backup -> /
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-mkdir -p $BACKUP_PATH
-
-curl --negotiate -k -u : "$SOLR_URL/$BACKUP_CORE/replication?command=BACKUP&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME"
-```
-(help: [get core names](#get-core-/-shard-names-with-hosts))
-
-#### III/6. Delete Atlas collections
-
-Next step for Atlas is to delete all 3 old collections. It can be done by `delete-collections` action with ATLAS filter.
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s ATLAS
-```
-
-Or manually run DELETE operation with 3 Solr API call on all 3 Atlas collections:
-
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-
-# use kinit and --negotiate option for curl only if the cluster is kerberized
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-COLLECTION_NAME=fulltext_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-COLLECTION_NAME=edge_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-COLLECTION_NAME=vertex_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-```
-
-#### III/7. Delete Log Search collections
-
-For Log Search, it is a must to delete all the old collections. Can be done similar way as for Ranger or Atlas:
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s LOGSEARCH
-```
-Or manually run Solr API DELETE commands here as well:
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-
-# use kinit and --negotiate option for curl only if the cluster is kerberized
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-COLLECTION_NAME=hadoop_logs
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-COLLECTION_NAME=audit_logs
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-COLLECTION_NAME=history
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME"
-```
-
-#### III/8. Delete Log Search Solr configs
-
-Log Search configs are changed a lot between Ambari 2.6.x and Ambari 2.7.x, so it is required to delete those as well. (configs will be regenerated during Log Search startup)
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action cleanup-znodes -s LOGSEARCH
-```
-You can delete the znodes by zookeeper-client as well:
-
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-# ZOOKEEPER CONNECTION STRING from zookeeper servers
-export ZK_CONN_STR=... # without znode,e.g.: myhost1:2181,myhost2:2181,myhost3:2181
-
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/hadoop_logs
-zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/audit_logs
-zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/history
-```
-
-### IV. Upgrade Infra Solr packages
-
-At this step, you will need to upgrade `ambari-infra-solr` packages. (also make sure ambari-logsearch* packages are upgraded as well)
-
-You can do that through ambari commands with the migrationHelper.py script (that means you wont need to ssh into every Infra Solr instance host):
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances
-# same can be done for logfeeders and logsearch portals if required:
-# just use '--action upgrade-logsearch-portal' or '--action upgrade-logfeeders'
-```
-That runs a package remove and a package install.
-
-Or the usual way is to run these commands on every host where `ambari-infra-solr` packages are located:
-
-```bash
-# For RHEL/CentOS/Oracle Linux:
-
-yum clean all
-yum upgrade ambari-infra-solr
-
-# For SLES:
-
-zypper clean
-zypper up ambari-infra-solr
-
-# For Ubuntu/Debian:
-
-apt-get clean all
-apt-get update
-apt-get install ambari-infra-solr
-```
-
-After the packages are updated, Solr instances can be restarted. It can be done from the UI or from command line as well:
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr
-```
-
-### V. Re-create collections
-
-Restart Ranger Admin / Atlas / Log Search Ambari service, as the collections were deleted before, during startup, new collections will be created (as a Solr 7 collection). This can be done through the UI or with the following commands:
-
-```bash
-# if Ranger installed on the cluster
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger
-# if Atlas installed on the cluster
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas
-# If LogSearch installed on the cluster
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch
-```
-
-At this point you can stop, and do the migration / restore later (until you will have the backup), and go ahead with e.g. HDP upgrade. (migration part can take long - 1GB/min.)
-
-### VI. Migrate Solr Collections
-
-From this point, you can migrate your old index in the background. On every hosts, where there is a backup located, you can run luce index migration tool (packaged with ambari-infra-solr-client).. For lucene index migration, [migrationHelper.py](#solr-migration-helper-script) can be used, or `/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh` directly. That script uses [IndexMigrationTool](#https://lucene.apache.org/solr/guide/7_3/indexupgrader-tool.html)
-The whole migration can be done with execuing 1 command;
-```bash
-# use a sudoer user for running the script !!
-# you can use this command with nohup in the background, like: `nohup > nohup2.out&`, as migration can take so much time (~1GB/min)
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate
-```
-If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Restore collections](#vi.-restore-collections). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections.
-
-[![asciicast](https://asciinema.org/a/187125.png)](https://asciinema.org/a/187125?speed=2)
-
-#### VI/1. Migrate Ranger collections
-
-Migration for `ranger_audits` collection (cores):
-
-```bash
-# by default, you will mirate to Lucene 6.6.2, if you want to migrate again to Solr 7 (not requred), you can use --version 7.4.0 flag
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -s RANGER
-```
-
-Or you can run commands manually on nodes where your backups are located:
-```bash
-
-export JAVA_HOME=/usr/jdk64/1.8.0_112
-
-# if /tmp/ranger-backup is your backup location
-infra-lucene-index-tool upgrade-index -d /tmp/ranger-backup -f -b -g
-
-# with 'infra-lucene-index-tool help' command you can checkout the command line options
-```
-
-By default, the tool will migrate from lucene version 5 to lucene version 6.6.2. (that's ok for Solr 7) If you want a lucene 7 index, you will need to re-run the migration tool command with `-v 7.4.0` option.
-
-#### VI/2. Migrate Atlas collections
-
-As Atlas has 3 collections, you will need similar steps that is required for Ranger, just for all 3 collections.
-(fulltext_index, edge_index, vertex_index)
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -s ATLAS
-```
-
-Or you can run commands manually on nodes where your backups are located:
-```bash
-
-export JAVA_HOME=/usr/jdk64/1.8.0_112
-
-# if /tmp/fulltext_index_backup is your backup location
-infra-lucene-index-tool upgrade-index -d /tmp/fulltext_index_backup -f -b -g
-
-# with 'infra-lucene-index-tool help' command you can checkout the command line options
-```
-
-By default, the tool will migrate from lucene version 5 to lucene version 6.6.2. (that's ok for Solr 7) If you want a lucene 7 index, you will need to re-run the migration tool command with `-v 7.4.0` option.
-
-### VII. Restore Collections
-
-For restoring the old collections, first you will need to create them. As those collections could be not listed in the security.json of Infra Solr, you can get 403 errors if you will try to access those collections later, for that time until you are doing the restoring + transport solr data to another collections, you can [trun off](#turn-off-infra-solr-authorization) the Solr authorization plugin.
-
-The collection creation and restore part can be done with 1 command:
-
-```bash
-# use a sudoer user for running the script !!
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore --keep-backup
-```
-
-If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Restart Solr Instances](#vii.-restart-infra-solr-instances). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections.
-
-[![asciicast](https://asciinema.org/a/187423.png)](https://asciinema.org/a/187423?speed=2)
-
-#### VII/1. Restore Old Ranger collection
-
-After lucene data migration is finished, you can restore your replicas on every hosts where you have the backups. But we need to restore the old data to a new collection, so first you will need to create that: (on a host where you have an installed Infra Solr component). For Ranger, use old_ranger_audits config set that you backup up during Solr schema config upgrade step. (set this as CONFIG_NAME), to make that collection to work with Solr 7, you need to copy your solrconfig.xml as well.
-That can be done with executing the following command:
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore -s RANGER
-```
-
-Or you can manually create a collection for restoring the backup (`old_ranger_audits`)
-
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-NUM_SHARDS=... # use that number that was used for the old collection - important to use at least that many that you have originally before backup
-NUM_REP=1 # can be more, but 1 is recommended for that temp collection
-MAX_SHARDS_PER_NODE=... # use that number that was used for the old collection
-CONFIG_NAME=old_ranger_audits
-OLD_DATA_COLLECTION=old_ranger_audits
-
-# kinit only if kerberos is enabled for tha cluster
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location
-export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181
-
-# note 1: jaas-file option required only if kerberos is enabled for the cluster
-# note 2: copy new solrconfig.xml as the old one won't be compatible with solr 7
-infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits/solrconfig.xml --copy-dest /infra-solr/configs/old_ranger_audits/solrconfig.xml
-
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME"
-```
-
-Then restore the cores with Solr REST API: ([get core names](#get-core-/-shard-names-with-hosts))
-
-```bash
-su infra-solr
-SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr
-BACKUP_PATH=... # backup location, e.g.: /tmp/ranger-backup
-
-OLD_BACKUP_COLLECTION_CORE=... # choose a core to restore
-BACKUP_CORE_NAME=... # choose a core from backup cores - you can find these names as : /snapshot.$BACKUP_CORE_NAME
-
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-curl --negotiate -k -u : "$SOLR_URL/$OLD_BACKUP_COLLECTION_CORE/replication?command=RESTORE&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME"
-```
-
-Or use simple `cp` or `hdfs dfs -put` commands to copy the migrated cores to the right places.
-
-#### VII/2. Restore Old Atlas collections
-
-For Atlas, use `old_` prefix for all 3 collections that you need to create and use `atlas_configs` config set, then use those for restore the backups;
-
-```bash
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore -s ATLAS
-```
-
-Or you can do the create collection and restore collections (cores) step by step:
-
-Create a collection for restoring the backup (`old_fulltext_index`, `old_vertex_index`, `old_edge_index`)
-```bash
-su infra-solr # infra-solr user - if you have a custom one, use that
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-NUM_SHARDS=... # use that number that was used for the old collection - important to use at least that many that you have originally before backup
-NUM_REP=1 # use 1!
-MAX_SHARDS_PER_NODE=... # use that number that was used for the old collection
-CONFIG_NAME=atlas_configs
-
-# kinit only if kerberos is enabled for tha cluster
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-
-OLD_DATA_COLLECTION=old_fulltext_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME"
-OLD_DATA_COLLECTION=old_edge_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME"
-OLD_DATA_COLLECTION=old_vertex_index
-curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME"
-```
-
-Also you can manually run restore commands: ([get core names](#get-core-/-shard-names-with-hosts))
-
-```bash
-su infra-solr
-SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr
-BACKUP_PATH=... # backup location, e.g.: /tmp/fulltext_index-backup
-
-OLD_BACKUP_COLLECTION_CORE=... # choose a core to restore
-BACKUP_CORE_NAME=... # choose a core from backup cores - you can find these names as : /snapshot.$BACKUP_CORE_NAME
-
-kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f)
-curl --negotiate -k -u : "$SOLR_URL/$OLD_BACKUP_COLLECTION_CORE/replication?command=RESTORE&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME"
-```
-
-Or use simple `cp` or `hdfs dfs -put` commands to copy the migrated cores to the right places.
-
-### VIII. Restart Infra Solr instances
-
-Next step is to restart Solr instances. That can be done on the Ambari UI, or optionally you can use the migrationHelper script for that as well (rolling restart)
-```bash
-# --batch-interval -> interval between restart solr tasks
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action rolling-restart-solr --batch-interval 60
-```
-
-### IX. Transport old data to new collections
-
-Last step (that can be done any time, as you already have your data in Solr) is to transport all data from the backup collections to the live ones.
-It can be done by running `transport-old-data` action by migration helper script:
-
-```bash
-# working directory is under '/tmp/solrDataManager' folder
-/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action transport-old-data
-```
-
-Or in the next few steps, you can see what needs to be done manually to transport old Ranger and Atlas Solr data to active collections.
-
-#### IX/1. Transport old data to Ranger collection
-
-In the end, you end up with 2 collections (ranger_audits and old_ranger_audits), in order to drop the restored one, you will need to transfer your old data to the new collection. To achieve this, you can use [solrDataManager.py](#solr-data-manager-script), which is located next to the `migrationHelper.py` script
-
-```bash
-# Init values:
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-
-END_DATE=... # example: 2018-02-18T12:00:00.000Z , date until you export data
-
-OLD_COLLECTION=old_ranger_audits
-ACTIVE_COLLECTION=ranger_audits
-EXCLUDE_FIELDS=_version_ # comma separated exclude fields, at least _version_ is required
-
-# provide these with -k and -n options only if kerberos is enabled for Infra Solr !!!
-INFRA_SOLR_KEYTAB=... # example: /etc/security/keytabs/ambari-infra-solr.service.keytab
-INFRA_SOLR_PRINCIPAL=... # example: infra-solr/$(hostname -f)@EXAMPLE.COM
-
-DATE_FIELD=evtTime
-# infra-solr-data-manager is a symlink points to /usr/lib/ambari-infra-solr-client/solrDataManager.py
-infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 -f $DATE_FIELD -e $END_DATE --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS
-
-# Or if you want to run the command in the background (with log and pid file):
-nohup infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 -f $DATE_FIELD -e $END_DATE --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS > /tmp/solr-data-mgr.log 2>&1 & echo $! > /tmp/solr-data-mgr.pid
-```
-[![asciicast](https://asciinema.org/a/188396.png)](https://asciinema.org/a/188396?speed=2)
-
-#### IX/2. Transport old data to Atlas collections
-
-In the end, you end up with 6 Atlas collections (vertex_index, old_vertex_index, edge_index, old_edge_index, fulltext_index, old_fulltext_index ... old_* collections will only exist if there was a restore against a non-empty collections, that means you won't need to transfer data if there is no old_* pair for a specific collection), in order to drop the restored one, you will need to transfer your old data to the new collection. To achieve this, you can use [solrDataManager.py](#solr-data-manager-script), which is located next to the `migrationHelper.py` script. Here, the script usage will be a bit different as we cannot provide a proper date/timestamp field, so during the data transfer, the records will be sorted only by id. (to do this it will be needed to use `--skip-date-usage` flag)
-
-Example: (with vertex_index, to the same with edge_index and fulltext_index, most likely at least edge_index will be empty)
-```bash
-# Init values:
-SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr
-
-OLD_COLLECTION=old_vertex_index
-ACTIVE_COLLECTION=vertex_index
-EXCLUDE_FIELDS=_version_ # comma separated exclude fields, at least _version_ is required
-
-# provide these with -k and -n options only if kerberos is enabled for Infra Solr !!!
-INFRA_SOLR_KEYTAB=... # example: /etc/security/keytabs/ambari-infra-solr.service.keytab
-INFRA_SOLR_PRINCIPAL=... # example: infra-solr/$(hostname -f)@EXAMPLE.COM
-
-# infra-solr-data-manager is a symlink points to /usr/lib/ambari-infra-solr-client/solrDataManager.py
-infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 --skip-date-usage --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS
-
-# Or if you want to run the command in the background (with log and pid file):
-nohup infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 --skip-date-usage --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS > /tmp/solr-data-mgr.log 2>&1 & echo $! > /tmp/solr-data-mgr.pid
-```
-
-[![asciicast](https://asciinema.org/a/188402.png)](https://asciinema.org/a/188402?speed=2)
-
-### Happy path
-
-Happy path steps are mainly for automation.
-
-##### 1. Generate migration config
-
-Generate ini config first for the migration, after running the following script, review the ini file content.
-
-```bash
-CONFIG_INI_LOCATION=ambari_migration.ini
-BACKUP_BASE_PATH=/tmp
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py --ini-file $CONFIG_INI_LOCATION --host c7401.ambari.apache.org -port 8080 --cluster cl1 --username admin --password admin --backup-base-path=$BACKUP_BASE_PATH --java-home /usr/jdk64/jdk1.8.0_112
-```
-##### 2.a) Do backup-migrate-restore
-
-For doing a backup + cleanup, then later migrate + restore, use the following commands:
-
-```bash
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode backup
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode delete --skip-solr-client-upgrade
-# go ahead with HDP upgrade or anything else, then if you have resource / time (recommended to use nohup as migrate part can take a lot of time):
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode migrate-restore # you can use --keep-backup option, it will keep the backup data, it's more safe but you need enough pace for that
-```
-
-Or you can execute these commands together (if you won't go with HDP upgrade after backup):
-```bash
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode all
-```
-
-Which is equivalent will execute the following migrationHelper.py commands:
-
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup-and-cleanup
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logsearch-portal
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logfeeders
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action rolling-restart-solr
-```
-
-##### 2.b) Do delete only if backup is not required
-
-For only cleanup collections, execute this script:
-```bash
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode delete
-```
-
-Which is equivalent will execute the following migrationHelper.py commands:
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logsearch-portal
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logfeeders
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas
-```
-
-##### 3. Transport Solr data from old collections to active collections (optional)
-
-Run this command to transport old data to active collections:
-```bash
-# recommended to use with nohup as that command can take long time as well
-# working directory is under '/tmp/solrDataManager' folder
-/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode transport
-```
-
-Or see [transport old data to new collections](#viii.-transport-old-data-to-new-collections) step
-
-### APPENDIX
-
-#### Additional filters for migrationHelper.py script
-
-- `--service-filter` or `-s`: you can filter on services for migration commands (like run against only ATLAS or RANGER), possible values: ATLAS,RANGER,LOGSEARCH
-- `--skip-cores`: skip specific cores from migration (can be useful if just one of it failed during restore etc.)
-- `--collection` or `-c`: run migration commands on just a specific collection (like: `ranger_adits`, or `old_ranger_audits` for restore)
-- `--core-filter`: can be used only for index migration, that will work as a regex filter on the snapshot core folder e.g.: "mycore" means it will be applied only on "/index/location/mycore_folder" but not on "/index/location/myother_folder"
-
-#### What to do if Solr instances restarted right after Ambari upgrade but before upgrade Solr instance packages?
-
-If you restarted Solr before backup or upgrade Solr server packages, you can fix the Solr config with the following command:
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action fix-solr5-kerberos-config
-```
-
-That is basically add `SOLR_KERB_NAME_RULES` back to `infra-solr-env/content` and disable authorization for Solr. (upload a /security.json to /infra-solr znode without the authorization config, then turn manually managed /security.json on in order to not override /security.json again on Solr restart) After the command finished successfully, you will need to restart Solr instances.
-
-But if you added `SOLR_KERB_NAME_RULES` config to the `infra-solr-env/content`, you will require to delete that after you upgraded Solr package (and before restarting them). You can do that with the `fix-solr7-kerberos-config` action:
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action fix-solr7-kerberos-config
-```
-
-#### Get core / shard names with hosts
-
-To get which hosts are related for your collections, you can check the Solr UI (using SPNEGO), or checkout get state.json details using a zookeeper-client or Solr zookeeper api to get state.json details of the collection (`/solr/admin/zookeeper?detail=true&path=/collections//state.json`)
-
-#### Turn off Infra Solr Authorization
-
-You can turn off Solr authorization plugin with the `disable-solr-authorization` action (can be executed after config generation [step](#0-gather-params)):
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action disable-solr-authorization
-```
-
-You can re-enable it with the following command: (or set `infra-solr-security-json/infra_solr_security_manually_managed` configuration to `false`, then restart Solr)
-
-```bash
-/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action enable-solr-authorization
-```
-
-#### Solr Migration Helper Script
-
-`/usr/lib/ambari-infra-solr-client/migrationHelper.py --help`
-
-```text
-Usage: migrationHelper.py [options]
-
-Options:
- -h, --help show this help message and exit
- -a ACTION, --action=ACTION
- delete-collections | backup | cleanup-znodes | backup-
- and-cleanup | migrate | restore |' '
- rolling-restart-solr | rolling-restart-atlas |
- rolling-restart-ranger | check-shards | check-backup-
- shards | enable-solr-authorization | disable-solr-
- authorization |' ' fix-solr5-kerberos-
- config | fix-solr7-kerberos-config | upgrade-solr-
- clients | upgrade-solr-instances | upgrade-logsearch-
- portal | upgrade-logfeeders | stop-logsearch |'
- ' restart-solr |restart-logsearch | restart-ranger |
- restart-atlas | transport-old-data
- -i INI_FILE, --ini-file=INI_FILE
- Config ini file to parse (required)
- -f, --force force index upgrade even if it's the right version
- -v, --verbose use for verbose logging
- -s SERVICE_FILTER, --service-filter=SERVICE_FILTER
- run commands only selected services (comma separated:
- LOGSEARCH,ATLAS,RANGER)
- -c COLLECTION, --collection=COLLECTION
- selected collection to run an operation
- --async async Ambari operations (backup | restore | migrate)
- --index-location=INDEX_LOCATION
- location of the index backups. add ranger/atlas prefix
- after the path. required only if no backup path in the
- ini file
- --atlas-index-location=ATLAS_INDEX_LOCATION
- location of the index backups (for atlas). required
- only if no backup path in the ini file
- --ranger-index-location=RANGER_INDEX_LOCATION
- location of the index backups (for ranger). required
- only if no backup path in the ini file
- --version=INDEX_VERSION
- lucene index version for migration (6.6.2 or 7.4.0)
- --solr-async-request-tries=SOLR_ASYNC_REQUEST_TRIES
- number of max tries for async Solr requests (e.g.:
- delete operation)
- --request-tries=REQUEST_TRIES
- number of tries for BACKUP/RESTORE status api calls in
- the request
- --request-time-interval=REQUEST_TIME_INTERVAL
- time interval between BACKUP/RESTORE status api calls
- in the request
- --request-async skip BACKUP/RESTORE status api calls from the command
- --transport-read-block-size=TRANSPORT_READ_BLOCK_SIZE
- block size to use for reading from solr during
- transport
- --transport-write-block-size=TRANSPORT_WRITE_BLOCK_SIZE
- number of records in the output files during transport
- --include-solr-hosts=INCLUDE_SOLR_HOSTS
- comma separated list of included solr hosts
- --exclude-solr-hosts=EXCLUDE_SOLR_HOSTS
- comma separated list of excluded solr hosts
- --disable-solr-host-check
- Disable to check solr hosts are good for the
- collection backups
- --core-filter=CORE_FILTER
- core filter for replica folders
- --skip-cores=SKIP_CORES
- specific cores to skip (comma separated)
- --hdfs-base-path=HDFS_BASE_PATH
- hdfs base path where the collections are located
- (e.g.: /user/infrasolr). Use if both atlas and ranger
- collections are on hdfs.
- --ranger-hdfs-base-path=RANGER_HDFS_BASE_PATH
- hdfs base path where the ranger collection is located
- (e.g.: /user/infra-solr). Use if only ranger
- collection is on hdfs.
- --atlas-hdfs-base-path=ATLAS_HDFS_BASE_PATH
- hdfs base path where the atlas collections are located
- (e.g.: /user/infra-solr). Use if only atlas
- collections are on hdfs.
- --keep-backup If it is turned on, Snapshot Solr data will not be
- deleted from the filesystem during restore.
- --batch-interval=BATCH_INTERVAL
- batch time interval (seconds) between requests (for
- restarting INFRA SOLR, default: 60)
- --batch-fault-tolerance=BATCH_FAULT_TOLERANCE
- fault tolerance of tasks for batch request (for
- restarting INFRA SOLR, default: 0)
- --shared-drive Use if the backup location is shared between hosts.
- (override config from config ini file)
- --skip-json-dump-files=SKIP_JSON_DUMP_FILES
- comma separated list of files that won't be download
- during collection dump (could be useful if it is
- required to change something in manually in the
- already downloaded file)
- --skip-index-size Skip index size check for check-shards or check-
- backup-shards
- --skip-warnings Pass check-shards or check-backup-shards even if there
- are warnings
-```
-
-#### Solr Migration Config Generator Script
-
-```text
-Usage: migrationConfigGenerator.py [options]
-
-Options:
- -h, --help show this help message and exit
- -H HOST, --host=HOST hostname for ambari server
- -P PORT, --port=PORT port number for ambari server
- -c CLUSTER, --cluster=CLUSTER
- name cluster
- -f, --force-ranger force to get Ranger details - can be useful if Ranger
- is configured to use external Solr (but points to
- internal Sols)
- -s, --ssl use if ambari server using https
- -v, --verbose use for verbose logging
- -u USERNAME, --username=USERNAME
- username for accessing ambari server
- -p PASSWORD, --password=PASSWORD
- password for accessing ambari server
- -j JAVA_HOME, --java-home=JAVA_HOME
- local java_home location
- -i INI_FILE, --ini-file=INI_FILE
- Filename of the generated ini file for migration
- (default: ambari_solr_migration.ini)
- --backup-base-path=BACKUP_BASE_PATH
- base path for backup, e.g.: /tmp/backup, then
- /tmp/backup/ranger/ and /tmp/backup/atlas/ folders
- will be generated
- --backup-ranger-base-path=BACKUP_RANGER_BASE_PATH
- base path for ranger backup (override backup-base-path
- for ranger), e.g.: /tmp/backup/ranger
- --backup-atlas-base-path=BACKUP_ATLAS_BASE_PATH
- base path for atlas backup (override backup-base-path
- for atlas), e.g.: /tmp/backup/atlas
- --hdfs-base-path=HDFS_BASE_PATH
- hdfs base path where the collections are located
- (e.g.: /user/infrasolr). Use if both atlas and ranger
- collections are on hdfs.
- --ranger-hdfs-base-path=RANGER_HDFS_BASE_PATH
- hdfs base path where the ranger collection is located
- (e.g.: /user/infra-solr). Use if only ranger
- collection is on hdfs.
- --atlas-hdfs-base-path=ATLAS_HDFS_BASE_PATH
- hdfs base path where the atlas collections are located
- (e.g.: /user/infra-solr). Use if only atlas
- collections are on hdfs.
- --skip-atlas skip to gather Atlas service details
- --skip-ranger skip to gather Ranger service details
- --retry=RETRY number of retries during accessing random solr urls
- --delay=DELAY delay (seconds) between retries during accessing
- random solr urls
- --shared-drive Use if the backup location is shared between hosts.
-```
-
-#### Solr Data Manager Script
-
-`/usr/lib/ambari-infra-solr-client/solrDataManager.py --help`
-
-```text
-Usage: solrDataManager.py [options]
-
-Options:
- --version show program's version number and exit
- -h, --help show this help message and exit
- -m MODE, --mode=MODE archive | delete | save
- -s SOLR_URL, --solr-url=SOLR_URL
- the url of the solr server including the port and
- protocol
- -c COLLECTION, --collection=COLLECTION
- the name of the solr collection
- -f FILTER_FIELD, --filter-field=FILTER_FIELD
- the name of the field to filter on
- -r READ_BLOCK_SIZE, --read-block-size=READ_BLOCK_SIZE
- block size to use for reading from solr
- -w WRITE_BLOCK_SIZE, --write-block-size=WRITE_BLOCK_SIZE
- number of records in the output files
- -i ID_FIELD, --id-field=ID_FIELD
- the name of the id field
- -o DATE_FORMAT, --date-format=DATE_FORMAT
- the date format to use for --days
- -q ADDITIONAL_FILTER, --additional-filter=ADDITIONAL_FILTER
- additional solr filter
- -j NAME, --name=NAME name included in result files
- -g, --ignore-unfinished-uploading
- --json-file create a json file instead of line delimited json
- -z COMPRESSION, --compression=COMPRESSION
- none | tar.gz | tar.bz2 | zip | gz
- -k SOLR_KEYTAB, --solr-keytab=SOLR_KEYTAB
- the keytab for a kerberized solr
- -n SOLR_PRINCIPAL, --solr-principal=SOLR_PRINCIPAL
- the principal for a kerberized solr
- -a HDFS_KEYTAB, --hdfs-keytab=HDFS_KEYTAB
- the keytab for a kerberized hdfs
- -l HDFS_PRINCIPAL, --hdfs-principal=HDFS_PRINCIPAL
- the principal for a kerberized hdfs
- -u HDFS_USER, --hdfs-user=HDFS_USER
- the user for accessing hdfs
- -p HDFS_PATH, --hdfs-path=HDFS_PATH
- the hdfs path to upload to
- -t KEY_FILE_PATH, --key-file-path=KEY_FILE_PATH
- the file that contains S3 ,
- -b BUCKET, --bucket=BUCKET
- the bucket name for S3 upload
- -y KEY_PREFIX, --key-prefix=KEY_PREFIX
- the key prefix for S3 upload
- -x LOCAL_PATH, --local-path=LOCAL_PATH
- the local path to save the files to
- -v, --verbose
- --solr-output-collection=SOLR_OUTPUT_COLLECTION
- target output solr collection for archive
- --solr-output-url=SOLR_OUTPUT_URL
- the url of the output solr server including the port
- and protocol
- --exclude-fields=EXCLUDE_FIELDS
- Comma separated list of excluded fields from json
- response
- --skip-date-usage datestamp field won't be used for queries (sort based
- on id field)
-
- specifying the end of the range:
- -e END, --end=END end of the range
- -d DAYS, --days=DAYS
- number of days to keep
-```
-
-#### Ambari Solr Migration script
-
-`/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --help`
-
-```text
-Usage: /usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --mode --ini-file [additional options]
-
- -m, --mode available migration modes: delete-only | backup-only | migrate-restore | all | transport
- -i, --ini-file ini-file location (used by migrationHelper.py)
- -s, --migration-script-location migrateHelper.py location (default: /usr/lib/ambari-infra-solr-client/migrationHelper.py)
- -w, --wait-between-steps wait between different migration steps in seconds (default: 15)
- -p, --python-path python location, default: /usr/bin/python
- -b, --batch-interval seconds between batch tasks for rolling restart solr at last step (default: 60)
- -k, --keep-backup keep backup data (more secure, useful if you have enough space for that)
- --skip-solr-client-upgrade skip ambari-infra-solr-client package upgrades
- --skip-solr-server-upgrade skip ambari-infra-solr package upgrades
- --skip-logsearch-upgrade skip ambari-logsearch-portal and ambari-logsearch-logfeeder package upgrades
- --skip-warnings skip warnings at check-shards step
- -h, --help print help
-```
diff --git a/ambari-infra/ambari-infra-solr-client/build.xml b/ambari-infra/ambari-infra-solr-client/build.xml
deleted file mode 100644
index f7b2633978b..00000000000
--- a/ambari-infra/ambari-infra-solr-client/build.xml
+++ /dev/null
@@ -1,79 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/ambari-infra/ambari-infra-solr-client/pom.xml b/ambari-infra/ambari-infra-solr-client/pom.xml
deleted file mode 100644
index a4abc9aaf73..00000000000
--- a/ambari-infra/ambari-infra-solr-client/pom.xml
+++ /dev/null
@@ -1,178 +0,0 @@
-
-
-
-
- ambari-infra
- org.apache.ambari
- 2.0.0.0-SNAPSHOT
-
- 4.0.0
- http://maven.apache.org
- Ambari Infra Solr Client
-
- ambari-infra-solr-client
-
-
- 6.6.2
- lucene-core-${lucene6.version}.jar
- http://central.maven.org/maven2/org/apache/lucene/lucene-core/${lucene6.version}/${lucene6-core-jar.name}
- lucene-backward-codecs-${lucene6.version}.jar
- http://central.maven.org/maven2/org/apache/lucene/lucene-backward-codecs/${lucene6.version}/${lucene6-backward-codecs-jar.name}
-
-
-
-
- org.apache.solr
- solr-solrj
- ${solr.version}
-
-
- org.apache.lucene
- lucene-core
- ${solr.version}
-
-
- org.apache.lucene
- lucene-backward-codecs
- ${solr.version}
-
-
- org.apache.zookeeper
- zookeeper
-
-
- commons-cli
- commons-cli
-
-
- org.codehaus.jackson
- jackson-mapper-asl
- 1.9.13
-
-
- commons-codec
- commons-codec
- 1.8
-
-
- commons-lang
- commons-lang
-
-
- org.slf4j
- slf4j-api
- 1.7.20
-
-
- org.slf4j
- slf4j-log4j12
- 1.7.20
-
-
- log4j
- log4j
- 1.2.17
-
-
- com.sun.jdmk
- jmxtools
-
-
- com.sun.jmx
- jmxri
-
-
- javax.mail
- mail
-
-
- javax.jms
- jmx
-
-
- javax.jms
- jms
-
-
-
-
- com.amazonaws
- aws-java-sdk-s3
- 1.11.5
-
-
- junit
- junit
- test
-
-
- org.easymock
- easymock
- 3.4
- test
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-dependency-plugin
- 2.8
-
-
- copy-dependencies
- package
-
- copy-dependencies
-
-
- true
- ${basedir}/target/libs
- false
- false
- true
-
-
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.7
-
-
- package
-
-
-
-
-
-
-
-
- run
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
deleted file mode 100644
index b0c778154f4..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java
+++ /dev/null
@@ -1,680 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.solr;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class AmbariSolrCloudCLI {
-
- private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudCLI.class);
-
- private static final int ZK_CLIENT_TIMEOUT = 60000; // 1 minute
- private static final int ZK_CLIENT_CONNECT_TIMEOUT = 60000; // 1 minute
- private static final String CREATE_COLLECTION_COMMAND = "create-collection";
- private static final String UPLOAD_CONFIG_COMMAND = "upload-config";
- private static final String DOWNLOAD_CONFIG_COMMAND = "download-config";
- private static final String CONFIG_CHECK_COMMAND = "check-config";
- private static final String CREATE_SHARD_COMMAND = "create-shard";
- private static final String CREATE_ZNODE = "create-znode";
- private static final String SET_CLUSTER_PROP = "cluster-prop";
- private static final String SETUP_KERBEROS_PLUGIN = "setup-kerberos-plugin";
- private static final String CHECK_ZNODE = "check-znode";
- private static final String SECURE_ZNODE_COMMAND = "secure-znode";
- private static final String UNSECURE_ZNODE_COMMAND = "unsecure-znode";
- private static final String SECURE_SOLR_ZNODE_COMMAND = "secure-solr-znode";
- private static final String SECURITY_JSON_LOCATION = "security-json-location";
- private static final String REMOVE_ADMIN_HANDLERS = "remove-admin-handlers";
- private static final String TRANSFER_ZNODE_COMMAND = "transfer-znode";
- private static final String DELETE_ZNODE_COMMAND = "delete-znode";
- private static final String DUMP_COLLECTIONS_DATA_COMMAND = "dump-collections";
- private static final String CMD_LINE_SYNTAX =
- "\n./solrCloudCli.sh --create-collection -z host1:2181,host2:2181/ambari-solr -c collection -cs conf_set"
- + "\n./solrCloudCli.sh --upload-config -z host1:2181,host2:2181/ambari-solr -d /tmp/myconfig_dir -cs config_set"
- + "\n./solrCloudCli.sh --download-config -z host1:2181,host2:2181/ambari-solr -cs config_set -d /tmp/myonfig_dir"
- + "\n./solrCloudCli.sh --check-config -z host1:2181,host2:2181/ambari-solr -cs config_set"
- + "\n./solrCloudCli.sh --create-shard -z host1:2181,host2:2181/ambari-solr -c collection -sn myshard"
- + "\n./solrCloudCli.sh --remove-admin-handlers -z host1:2181,host2:2181/ambari-solr -c collection"
- + "\n./solrCloudCli.sh --dump-collections -z host1:2181,host2:2181/ambari-solr -o collection-data.json"
- + "\n./solrCloudCli.sh --create-znode -z host1:2181,host2:2181 -zn /ambari-solr"
- + "\n./solrCloudCli.sh --check-znode -z host1:2181,host2:2181 -zn /ambari-solr"
- + "\n./solrCloudCli.sh --delete-znode -z host1:2181,host2:2181 -zn /ambari-solr"
- + "\n./solrCloudCli.sh --transfer-znode -z host1:2181,host2:2181 -cps /ambari-solr -cpd /ambari-solr-backup"
- + "\n./solrCloudCli.sh --cluster-prop -z host1:2181,host2:2181/ambari-solr -cpn urlScheme -cpn http"
- + "\n./solrCloudCli.sh --secure-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file"
- + "\n./solrCloudCli.sh --unsecure-znode -z host1:2181,host2:2181 -zn /ambari-solr --jaas-file /etc/myconf/jaas_file"
- + "\n./solrCloudCli.sh --secure-solr-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file"
- + "\n./solrCloudCli.sh --setup-kerberos-plugin -z host1:2181,host2:2181 -zn /ambari-solr --security-json-location /etc/infra-solr/conf/security.json\n ";
-
- public static void main(String[] args) {
- Options options = new Options();
- HelpFormatter helpFormatter = new HelpFormatter();
- helpFormatter.setDescPadding(10);
- helpFormatter.setWidth(200);
-
- final Option helpOption = Option.builder("h")
- .longOpt("help")
- .desc("Print commands")
- .build();
-
- final Option createCollectionOption = Option.builder("cc")
- .longOpt(CREATE_COLLECTION_COMMAND)
- .desc("Create collection in Solr (command)")
- .build();
-
- final Option uploadConfigurationOption = Option.builder("uc")
- .longOpt(UPLOAD_CONFIG_COMMAND)
- .desc("Upload configuration set to Zookeeper (command)")
- .build();
-
- final Option downloadConfigOption = Option.builder("dc")
- .longOpt(DOWNLOAD_CONFIG_COMMAND)
- .desc("Download configuration set from Zookeeper (command)")
- .build();
-
- final Option checkConfigOption = Option.builder("chc")
- .longOpt(CONFIG_CHECK_COMMAND)
- .desc("Check configuration exists in Zookeeper (command)")
- .build();
-
- final Option checkZnodeOption = Option.builder("chz")
- .longOpt(CHECK_ZNODE)
- .desc("Check znode exists in Zookeeper (command)")
- .build();
-
- final Option createShardOption = Option.builder("csh")
- .longOpt(CREATE_SHARD_COMMAND)
- .desc("Create shard in Solr (command)")
- .build();
-
- final Option setClusterPropOption = Option.builder("cp")
- .longOpt(SET_CLUSTER_PROP)
- .desc("Set cluster property (command)")
- .build();
-
- final Option createZnodeOption = Option.builder("cz")
- .longOpt(CREATE_ZNODE)
- .desc("Create Znode (command)")
- .build();
-
- final Option setupKerberosPluginOption = Option.builder("skp")
- .longOpt(SETUP_KERBEROS_PLUGIN)
- .desc("Setup kerberos plugin in security.json (command)")
- .build();
-
- final Option secureSolrZnodeOption = Option.builder("ssz")
- .longOpt(SECURE_SOLR_ZNODE_COMMAND)
- .desc("Set acls for solr znode (command)")
- .build();
-
- final Option secureZnodeOption = Option.builder("sz")
- .longOpt(SECURE_ZNODE_COMMAND)
- .desc("Set acls for znode (command)")
- .build();
-
- final Option unsecureZnodeOption = Option.builder("uz")
- .longOpt(UNSECURE_ZNODE_COMMAND)
- .desc("Disable security for znode (command)")
- .build();
-
- final Option removeAdminHandlerOption = Option.builder("rah")
- .longOpt(REMOVE_ADMIN_HANDLERS)
- .desc("Remove AdminHandlers request handler from solrconfig.xml (command)")
- .build();
-
- final Option transferZnodeOption = Option.builder("tz")
- .longOpt(TRANSFER_ZNODE_COMMAND)
- .desc("Transfer znode (copy from/to local or to another znode)")
- .build();
-
- final Option deleteZnodeOption = Option.builder("dz")
- .longOpt(DELETE_ZNODE_COMMAND)
- .desc("Delete znode")
- .build();
-
- final Option dumpCollectionsOption = Option.builder("dcd")
- .longOpt(DUMP_COLLECTIONS_DATA_COMMAND)
- .desc("Dump collections data")
- .build();
-
- final Option shardNameOption = Option.builder("sn")
- .longOpt("shard-name")
- .desc("Name of the shard for create-shard command")
- .numberOfArgs(1)
- .argName("my_new_shard")
- .build();
-
- final Option implicitRoutingOption = Option.builder("ir")
- .longOpt("implicit-routing")
- .desc("Use implicit routing when creating a collection")
- .build();
-
- final Option zkConnectStringOption = Option.builder("z")
- .longOpt("zookeeper-connect-string")
- .desc("Zookeeper quorum [and Znode (optional)]")
- .numberOfArgs(1)
- .argName("host:port,host:port[/ambari-solr]")
- .build();
-
- final Option znodeOption = Option.builder("zn")
- .longOpt("znode")
- .desc("Zookeeper ZNode")
- .numberOfArgs(1)
- .argName("/ambari-solr")
- .build();
-
- final Option collectionOption = Option.builder("c")
- .longOpt("collection")
- .desc("Collection name")
- .numberOfArgs(1)
- .argName("collection name")
- .build();
-
- final Option configSetOption = Option.builder("cs")
- .longOpt("config-set")
- .desc("Configuration set")
- .numberOfArgs(1)
- .argName("config_set")
- .build();
-
- final Option configDirOption = Option.builder("d")
- .longOpt("config-dir")
- .desc("Configuration directory")
- .numberOfArgs(1)
- .argName("config_dir")
- .build();
-
- final Option shardsOption = Option.builder("s")
- .longOpt("shards")
- .desc("Number of shards")
- .numberOfArgs(1)
- .argName("shard number")
- .type(Integer.class)
- .build();
-
- final Option replicationOption = Option.builder("r")
- .longOpt("replication")
- .desc("Replication factor")
- .numberOfArgs(1)
- .argName("replication factor")
- .type(Integer.class)
- .build();
-
- final Option retryOption = Option.builder("rt")
- .longOpt("retry")
- .desc("Number of retries for access Solr [default:10]")
- .numberOfArgs(1)
- .argName("number of retries")
- .type(Integer.class)
- .build();
-
- final Option intervalOption = Option.builder("i")
- .longOpt("interval")
- .desc("Interval for retry logic in sec [default:5]")
- .numberOfArgs(1)
- .argName("interval")
- .type(Integer.class)
- .build();
-
- final Option maxShardsOption = Option.builder("m")
- .longOpt("max-shards")
- .desc("Max number of shards per node (default: replication * shards)")
- .numberOfArgs(1)
- .argName("max number of shards")
- .build();
-
- final Option routerNameOption = Option.builder("rn")
- .longOpt("router-name")
- .desc("Router name for collection [default:implicit]")
- .numberOfArgs(1)
- .argName("router_name")
- .build();
-
- final Option routerFieldOption = Option.builder("rf")
- .longOpt("router-field")
- .desc("Router field for collection [default:_router_field_]")
- .numberOfArgs(1)
- .argName("router_field")
- .build();
-
- final Option jaasFileOption = Option.builder("jf")
- .longOpt("jaas-file")
- .desc("Location of the jaas-file to communicate with kerberized Solr")
- .numberOfArgs(1)
- .argName("jaas_file")
- .build();
-
- final Option keyStoreLocationOption = Option.builder("ksl")
- .longOpt("key-store-location")
- .desc("Location of the key store used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("key store location")
- .build();
-
- final Option keyStorePasswordOption = Option.builder("ksp")
- .longOpt("key-store-password")
- .desc("Key store password used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("key store password")
- .build();
-
- final Option keyStoreTypeOption = Option.builder("kst")
- .longOpt("key-store-type")
- .desc("Type of the key store used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("key store type")
- .build();
-
- final Option trustStoreLocationOption = Option.builder("tsl")
- .longOpt("trust-store-location")
- .desc("Location of the trust store used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("trust store location")
- .build();
-
- final Option trustStorePasswordOption = Option.builder("tsp")
- .longOpt("trust-store-password")
- .desc("Trust store password used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("trust store password")
- .build();
-
- final Option trustStoreTypeOption = Option.builder("tst")
- .longOpt("trust-store-type")
- .desc("Type of the trust store used to communicate with Solr using SSL")
- .numberOfArgs(1)
- .argName("trust store type")
- .build();
-
- final Option propNameOption = Option.builder("cpn")
- .longOpt("property-name")
- .desc("Cluster property name")
- .numberOfArgs(1)
- .argName("cluster prop name")
- .build();
-
- final Option propValueOption = Option.builder("cpv")
- .longOpt("property-value")
- .desc("Cluster property value")
- .numberOfArgs(1)
- .argName("cluster prop value")
- .build();
-
- final Option saslUsersOption = Option.builder("su")
- .longOpt("sasl-users")
- .desc("Sasl users (comma separated list)")
- .numberOfArgs(1)
- .argName("atlas,ranger,logsearch-solr")
- .build();
-
- final Option copyScrOption = Option.builder("cps")
- .longOpt("copy-src")
- .desc("ZNode or local source (used for ZNode transfer)")
- .numberOfArgs(1)
- .argName("/myznode | /my/path")
- .build();
-
- final Option copyDestOption = Option.builder("cpd")
- .longOpt("copy-dest")
- .desc("ZNode or local destination (used for ZNode transfer)")
- .numberOfArgs(1)
- .argName("/myznode | /my/path")
- .build();
-
- final Option transferModeOption = Option.builder("tm")
- .longOpt("transfer-mode")
- .desc("Transfer mode, if not used copy znode to znode.")
- .numberOfArgs(1)
- .argName("copyFromLocal | copyToLocal")
- .build();
-
- final Option securityJsonLocationOption = Option.builder("sjl")
- .longOpt(SECURITY_JSON_LOCATION)
- .desc("Local security.json path")
- .numberOfArgs(1)
- .argName("security.json location")
- .build();
-
- final Option secureOption = Option.builder("sec")
- .longOpt("secure")
- .desc("Flag for enable/disable kerberos (with --setup-kerberos or --setup-kerberos-plugin)")
- .build();
-
- final Option outputOption = Option.builder("o")
- .longOpt("output")
- .desc("File output for collections dump")
- .numberOfArgs(1)
- .build();
-
- final Option includeDocNumberOption = Option.builder("idn")
- .longOpt("include-doc-number")
- .desc("Include the number of docs as well in collection dump")
- .build();
-
- options.addOption(helpOption);
- options.addOption(retryOption);
- options.addOption(removeAdminHandlerOption);
- options.addOption(intervalOption);
- options.addOption(zkConnectStringOption);
- options.addOption(configSetOption);
- options.addOption(configDirOption);
- options.addOption(collectionOption);
- options.addOption(secureZnodeOption);
- options.addOption(unsecureZnodeOption);
- options.addOption(secureSolrZnodeOption);
- options.addOption(transferZnodeOption);
- options.addOption(shardsOption);
- options.addOption(replicationOption);
- options.addOption(maxShardsOption);
- options.addOption(routerNameOption);
- options.addOption(routerFieldOption);
- options.addOption(shardNameOption);
- options.addOption(implicitRoutingOption);
- options.addOption(createCollectionOption);
- options.addOption(downloadConfigOption);
- options.addOption(uploadConfigurationOption);
- options.addOption(checkConfigOption);
- options.addOption(createShardOption);
- options.addOption(jaasFileOption);
- options.addOption(keyStoreLocationOption);
- options.addOption(keyStorePasswordOption);
- options.addOption(keyStoreTypeOption);
- options.addOption(trustStoreLocationOption);
- options.addOption(trustStorePasswordOption);
- options.addOption(trustStoreTypeOption);
- options.addOption(setClusterPropOption);
- options.addOption(propNameOption);
- options.addOption(propValueOption);
- options.addOption(createZnodeOption);
- options.addOption(znodeOption);
- options.addOption(secureOption);
- options.addOption(transferModeOption);
- options.addOption(copyScrOption);
- options.addOption(copyDestOption);
- options.addOption(saslUsersOption);
- options.addOption(checkZnodeOption);
- options.addOption(deleteZnodeOption);
- options.addOption(dumpCollectionsOption);
- options.addOption(setupKerberosPluginOption);
- options.addOption(securityJsonLocationOption);
- options.addOption(outputOption);
- options.addOption(includeDocNumberOption);
-
- AmbariSolrCloudClient solrCloudClient = null;
-
- try {
- CommandLineParser cmdLineParser = new DefaultParser();
- CommandLine cli = cmdLineParser.parse(options, args);
-
- if(cli.hasOption('h')) {
- helpFormatter.printHelp("sample", options);
- exit(0, null);
- }
- String command = "";
- if (cli.hasOption("cc")) {
- command = CREATE_COLLECTION_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, configSetOption);
- } else if (cli.hasOption("uc")) {
- command = UPLOAD_CONFIG_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption);
- } else if (cli.hasOption("dc")) {
- command = DOWNLOAD_CONFIG_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption);
- } else if (cli.hasOption("csh")) {
- command = CREATE_SHARD_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, shardNameOption);
- } else if (cli.hasOption("chc")) {
- command = CONFIG_CHECK_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption);
- } else if (cli.hasOption("cp")) {
- command = SET_CLUSTER_PROP;
- validateRequiredOptions(cli, command, zkConnectStringOption, propNameOption, propValueOption);
- } else if (cli.hasOption("cz")) {
- command = CREATE_ZNODE;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption);
- } else if (cli.hasOption("chz")){
- command = CHECK_ZNODE;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption);
- } else if (cli.hasOption("skp")) {
- command = SETUP_KERBEROS_PLUGIN;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption);
- } else if (cli.hasOption("sz")) {
- command = SECURE_ZNODE_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption, saslUsersOption);
- } else if (cli.hasOption("ssz")) {
- command = SECURE_SOLR_ZNODE_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption, saslUsersOption);
- } else if (cli.hasOption("uz")) {
- command = UNSECURE_ZNODE_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption);
- } else if (cli.hasOption("rah")) {
- command = REMOVE_ADMIN_HANDLERS;
- validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption);
- } else if (cli.hasOption("tz")) {
- command = TRANSFER_ZNODE_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, copyScrOption, copyDestOption);
- } else if (cli.hasOption("dz")) {
- command = DELETE_ZNODE_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption);
- } else if (cli.hasOption("dcd")) {
- command = DUMP_COLLECTIONS_DATA_COMMAND;
- validateRequiredOptions(cli, command, zkConnectStringOption, outputOption);
- } else {
- List commands = Arrays.asList(CREATE_COLLECTION_COMMAND, CREATE_SHARD_COMMAND, UPLOAD_CONFIG_COMMAND,
- DOWNLOAD_CONFIG_COMMAND, CONFIG_CHECK_COMMAND, SET_CLUSTER_PROP, CREATE_ZNODE, SECURE_ZNODE_COMMAND, UNSECURE_ZNODE_COMMAND,
- SECURE_SOLR_ZNODE_COMMAND, CHECK_ZNODE, SETUP_KERBEROS_PLUGIN, REMOVE_ADMIN_HANDLERS, TRANSFER_ZNODE_COMMAND, DELETE_ZNODE_COMMAND,
- DUMP_COLLECTIONS_DATA_COMMAND);
- helpFormatter.printHelp(CMD_LINE_SYNTAX, options);
- exit(1, String.format("One of the supported commands is required (%s)", StringUtils.join(commands, "|")));
- }
-
- String zkConnectString = cli.getOptionValue('z');
- String collection = cli.getOptionValue('c');
- String configSet = cli.getOptionValue("cs");
- String configDir = cli.getOptionValue("d");
- int shards = cli.hasOption('s') ? Integer.parseInt(cli.getOptionValue('s')) : 1;
- int replication = cli.hasOption('r') ? Integer.parseInt(cli.getOptionValue('r')) : 1;
- int retry = cli.hasOption("rt") ? Integer.parseInt(cli.getOptionValue("rt")) : 5;
- int interval = cli.hasOption('i') ? Integer.parseInt(cli.getOptionValue('i')) : 10;
- int maxShards = cli.hasOption('m') ? Integer.parseInt(cli.getOptionValue('m')) : shards * replication;
- String routerName = cli.hasOption("rn") ? cli.getOptionValue("rn") : null;
- String routerField = cli.hasOption("rf") ? cli.getOptionValue("rf") : null;
- String shardName = cli.hasOption("sn") ? cli.getOptionValue("sn") : null;
- boolean implicitRouting = cli.hasOption("ir");
- String jaasFile = cli.hasOption("jf") ? cli.getOptionValue("jf") : null;
- String keyStoreLocation = cli.hasOption("ksl") ? cli.getOptionValue("ksl") : null;
- String keyStorePassword = cli.hasOption("ksp") ? cli.getOptionValue("ksp") : null;
- String keyStoreType = cli.hasOption("kst") ? cli.getOptionValue("kst") : null;
- String trustStoreLocation = cli.hasOption("tsl") ? cli.getOptionValue("tsl") : null;
- String trustStorePassword = cli.hasOption("tsp") ? cli.getOptionValue("tsp") : null;
- String trustStoreType = cli.hasOption("tst") ? cli.getOptionValue("tst") : null;
- String clusterPropName = cli.hasOption("cpn") ? cli.getOptionValue("cpn") : null;
- String clusterPropValue = cli.hasOption("cpv") ? cli.getOptionValue("cpv") : null;
- String znode = cli.hasOption("zn") ? cli.getOptionValue("zn") : null;
- boolean isSecure = cli.hasOption("sec");
- String saslUsers = cli.hasOption("su") ? cli.getOptionValue("su") : "";
- String securityJsonLocation = cli.hasOption("sjl") ? cli.getOptionValue("sjl") : "";
- String copySrc = cli.hasOption("cps") ? cli.getOptionValue("cps") : null;
- String copyDest = cli.hasOption("cpd") ? cli.getOptionValue("cpd") : null;
- String transferMode = cli.hasOption("tm") ? cli.getOptionValue("tm") : "NONE";
- String output = cli.hasOption("o") ? cli.getOptionValue("o") : null;
- boolean includeDocNumber = cli.hasOption("idn");
-
- AmbariSolrCloudClientBuilder clientBuilder = new AmbariSolrCloudClientBuilder()
- .withZkConnectString(zkConnectString)
- .withCollection(collection)
- .withConfigSet(configSet)
- .withShards(shards)
- .withReplication(replication)
- .withMaxShardsPerNode(maxShards)
- .withRetry(retry)
- .withInterval(interval)
- .withRouterName(routerName)
- .withRouterField(routerField)
- .withJaasFile(jaasFile) // call before creating SolrClient
- .isImplicitRouting(implicitRouting)
- .withSolrZkClient(ZK_CLIENT_TIMEOUT, ZK_CLIENT_CONNECT_TIMEOUT)
- .withKeyStoreLocation(keyStoreLocation)
- .withKeyStorePassword(keyStorePassword)
- .withKeyStoreType(keyStoreType)
- .withTrustStoreLocation(trustStoreLocation)
- .withTrustStorePassword(trustStorePassword)
- .withTrustStoreType(trustStoreType)
- .withClusterPropName(clusterPropName)
- .withClusterPropValue(clusterPropValue)
- .withTransferMode(transferMode)
- .withCopySrc(copySrc)
- .withCopyDest(copyDest)
- .withOutput(output)
- .withIncludeDocNumber(includeDocNumber)
- .withSecurityJsonLocation(securityJsonLocation)
- .withZnode(znode)
- .withSecure(isSecure)
- .withSaslUsers(saslUsers);
-
- switch (command) {
- case CREATE_COLLECTION_COMMAND:
- solrCloudClient = clientBuilder
- .withSolrCloudClient()
- .build();
- solrCloudClient.createCollection();
- break;
- case UPLOAD_CONFIG_COMMAND:
- solrCloudClient = clientBuilder
- .withConfigDir(configDir)
- .build();
- solrCloudClient.uploadConfiguration();
- break;
- case DOWNLOAD_CONFIG_COMMAND:
- solrCloudClient = clientBuilder
- .withConfigDir(configDir)
- .build();
- solrCloudClient.downloadConfiguration();
- break;
- case CONFIG_CHECK_COMMAND:
- solrCloudClient = clientBuilder.build();
- boolean configExists = solrCloudClient.configurationExists();
- if (!configExists) {
- exit(1, null);
- }
- break;
- case CREATE_SHARD_COMMAND:
- solrCloudClient = clientBuilder
- .withSolrCloudClient()
- .build();
- solrCloudClient.createShard(shardName);
- break;
- case SET_CLUSTER_PROP:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.setClusterProp();
- break;
- case CREATE_ZNODE:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.createZnode();
- break;
- case CHECK_ZNODE:
- solrCloudClient = clientBuilder.build();
- boolean znodeExists = solrCloudClient.isZnodeExists(znode);
- if (!znodeExists) {
- exit(1, String.format("'%s' znode does not exist. Solr is responsible to create the ZNode, " +
- "check Solr started successfully or not", znode));
- }
- break;
- case SETUP_KERBEROS_PLUGIN:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.setupKerberosPlugin();
- break;
- case SECURE_ZNODE_COMMAND:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.secureZnode();
- break;
- case UNSECURE_ZNODE_COMMAND:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.unsecureZnode();
- break;
- case SECURE_SOLR_ZNODE_COMMAND:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.secureSolrZnode();
- case REMOVE_ADMIN_HANDLERS:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.removeAdminHandlerFromCollectionConfig();
- break;
- case TRANSFER_ZNODE_COMMAND:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.transferZnode();
- break;
- case DELETE_ZNODE_COMMAND:
- solrCloudClient = clientBuilder.build();
- solrCloudClient.deleteZnode();
- break;
- case DUMP_COLLECTIONS_DATA_COMMAND:
- solrCloudClient = clientBuilder
- .withSolrCloudClient().build();
- solrCloudClient.outputCollectionData();
- break;
- default:
- throw new AmbariSolrCloudClientException(String.format("Not found command: '%s'", command));
- }
- } catch (Exception e) {
- helpFormatter.printHelp(
- CMD_LINE_SYNTAX, options);
- exit(1, e.getMessage());
- } finally {
- if (solrCloudClient != null && solrCloudClient.getSolrZkClient() != null) {
- solrCloudClient.getSolrZkClient().close();
- }
- }
- exit(0, null);
- }
-
- private static void validateRequiredOptions(CommandLine cli, String command, Option... optionsToValidate)
- throws AmbariSolrCloudClientException {
- List requiredOptions = new ArrayList<>();
- for (Option opt : optionsToValidate) {
- if (!cli.hasOption(opt.getOpt())) {
- requiredOptions.add(opt.getOpt());
- }
- }
- if (!requiredOptions.isEmpty()) {
- throw new AmbariSolrCloudClientException(
- String.format("The following options required for '%s' : %s",
- command, StringUtils.join(requiredOptions, ",")));
- }
- }
-
- private static void exit(int exitCode, String message) {
- if (message != null){
- LOG.error(message);
- }
- LOG.info("Return code: {}", exitCode);
- System.exit(exitCode);
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
deleted file mode 100644
index 7571c99b1dc..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr;
-
-import org.apache.ambari.infra.solr.commands.CheckConfigZkCommand;
-import org.apache.ambari.infra.solr.commands.CreateCollectionCommand;
-import org.apache.ambari.infra.solr.commands.CreateShardCommand;
-import org.apache.ambari.infra.solr.commands.CreateSolrZnodeZkCommand;
-import org.apache.ambari.infra.solr.commands.DeleteZnodeZkCommand;
-import org.apache.ambari.infra.solr.commands.DownloadConfigZkCommand;
-import org.apache.ambari.infra.solr.commands.DumpCollectionsCommand;
-import org.apache.ambari.infra.solr.commands.EnableKerberosPluginSolrZkCommand;
-import org.apache.ambari.infra.solr.commands.GetShardsCommand;
-import org.apache.ambari.infra.solr.commands.GetSolrHostsCommand;
-import org.apache.ambari.infra.solr.commands.ListCollectionCommand;
-import org.apache.ambari.infra.solr.commands.RemoveAdminHandlersCommand;
-import org.apache.ambari.infra.solr.commands.SecureSolrZNodeZkCommand;
-import org.apache.ambari.infra.solr.commands.SecureZNodeZkCommand;
-import org.apache.ambari.infra.solr.commands.SetClusterPropertyZkCommand;
-import org.apache.ambari.infra.solr.commands.TransferZnodeZkCommand;
-import org.apache.ambari.infra.solr.commands.UnsecureZNodeZkCommand;
-import org.apache.ambari.infra.solr.commands.UploadConfigZkCommand;
-import org.apache.ambari.infra.solr.commands.CheckZnodeZkCommand;
-import org.apache.ambari.infra.solr.util.ShardUtils;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Client for communicate with Solr (and Zookeeper)
- */
-public class AmbariSolrCloudClient {
-
- private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
-
- private final String zkConnectString;
- private final String collection;
- private final String configSet;
- private final String configDir;
- private final int shards;
- private final int replication;
- private final int retryTimes;
- private final int interval;
- private final CloudSolrClient solrCloudClient;
- private final SolrZkClient solrZkClient;
- private final int maxShardsPerNode;
- private final String routerName;
- private final String routerField;
- private final boolean implicitRouting;
- private final String jaasFile;
- private final String znode;
- private final String saslUsers;
- private final String propName;
- private final String propValue;
- private final String securityJsonLocation;
- private final boolean secure;
- private final String transferMode;
- private final String copySrc;
- private final String copyDest;
- private final String output;
- private final boolean includeDocNumber;
-
- public AmbariSolrCloudClient(AmbariSolrCloudClientBuilder builder) {
- this.zkConnectString = builder.zkConnectString;
- this.collection = builder.collection;
- this.configSet = builder.configSet;
- this.configDir = builder.configDir;
- this.shards = builder.shards;
- this.replication = builder.replication;
- this.retryTimes = builder.retryTimes;
- this.interval = builder.interval;
- this.jaasFile = builder.jaasFile;
- this.solrCloudClient = builder.solrCloudClient;
- this.solrZkClient = builder.solrZkClient;
- this.maxShardsPerNode = builder.maxShardsPerNode;
- this.routerName = builder.routerName;
- this.routerField = builder.routerField;
- this.implicitRouting = builder.implicitRouting;
- this.znode = builder.znode;
- this.saslUsers = builder.saslUsers;
- this.propName = builder.propName;
- this.propValue = builder.propValue;
- this.securityJsonLocation = builder.securityJsonLocation;
- this.secure = builder.secure;
- this.transferMode = builder.transferMode;
- this.copySrc = builder.copySrc;
- this.copyDest = builder.copyDest;
- this.output = builder.output;
- this.includeDocNumber = builder.includeDocNumber;
- }
-
- /**
- * Get Solr collections
- */
- public List listCollections() throws Exception {
- return new ListCollectionCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Create Solr collection if exists
- */
- public String createCollection() throws Exception {
- List collections = listCollections();
- if (!collections.contains(getCollection())) {
- String collection = new CreateCollectionCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("Collection '{}' creation request sent.", collection);
- } else {
- LOG.info("Collection '{}' already exits.", getCollection());
- if (this.isImplicitRouting()) {
- createShard(null);
- }
- }
- return getCollection();
- }
-
- public String outputCollectionData() throws Exception {
- List collections = listCollections();
- String result = new DumpCollectionsCommand(getRetryTimes(), getInterval(), collections).run(this);
- LOG.info("Dump collections response: {}", result);
- return result;
- }
-
- /**
- * Set cluster property in clusterprops.json.
- */
- public void setClusterProp() throws Exception {
- LOG.info("Set cluster prop: '{}'", this.getPropName());
- String newPropValue = new SetClusterPropertyZkCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue);
- }
-
- /**
- * Create a znode only if it does not exist. Return 0 code if it exists.
- */
- public void createZnode() throws Exception {
- boolean znodeExists = isZnodeExists(this.znode);
- if (znodeExists) {
- LOG.info("Znode '{}' already exists.", this.znode);
- } else {
- LOG.info("Znode '{}' does not exist. Creating...", this.znode);
- String newZnode = new CreateSolrZnodeZkCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("Znode '{}' is created successfully.", newZnode);
- }
- }
-
- /**
- * Check znode exists or not based on the zookeeper connect string.
- * E.g.: localhost:2181 and znode: /ambari-solr, checks existance of localhost:2181/ambari-solr
- */
- public boolean isZnodeExists(String znode) throws Exception {
- LOG.info("Check '{}' znode exists or not", znode);
- boolean result = new CheckZnodeZkCommand(getRetryTimes(), getInterval(), znode).run(this);
- if (result) {
- LOG.info("'{}' znode exists", znode);
- } else {
- LOG.info("'{}' znode does not exist", znode);
- }
- return result;
- }
-
- public void setupKerberosPlugin() throws Exception {
- LOG.info("Setup kerberos plugin in security.json");
- new EnableKerberosPluginSolrZkCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("KerberosPlugin is set in security.json");
- }
-
- /**
- * Secure solr znode
- */
- public void secureSolrZnode() throws Exception {
- new SecureSolrZNodeZkCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Secure znode
- */
- public void secureZnode() throws Exception {
- new SecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Unsecure znode
- */
- public void unsecureZnode() throws Exception {
- LOG.info("Disable security for znode - ", this.getZnode());
- new UnsecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Upload config set to zookeeper
- */
- public String uploadConfiguration() throws Exception {
- String configSet = new UploadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("'{}' is uploaded to zookeeper.", configSet);
- return configSet;
- }
-
- /**
- * Download config set from zookeeper
- */
- public String downloadConfiguration() throws Exception {
- String configDir = new DownloadConfigZkCommand(getRetryTimes(), getInterval()).run(this);
- LOG.info("Config set is download from zookeeper. ({})", configDir);
- return configDir;
- }
-
- /**
- * Get configuration if exists in zookeeper
- */
- public boolean configurationExists() throws Exception {
- boolean configExits = new CheckConfigZkCommand(getRetryTimes(), getInterval()).run(this);
- if (configExits) {
- LOG.info("Config {} exits", configSet);
- } else {
- LOG.info("Configuration '{}' does not exist", configSet);
- }
- return configExits;
- }
-
- /**
- * Create shard in collection - create a new one if shard name specified, if
- * not create based on the number of shards logic (with shard_# suffix)
- *
- * @param shard
- * name of the created shard
- */
- public Collection createShard(String shard) throws Exception {
- Collection existingShards = getShardNames();
- if (shard != null) {
- new CreateShardCommand(shard, getRetryTimes(), getInterval()).run(this);
- existingShards.add(shard);
- } else {
- List shardList = ShardUtils.generateShardList(getMaxShardsPerNode());
- for (String shardName : shardList) {
- if (!existingShards.contains(shardName)) {
- new CreateShardCommand(shardName, getRetryTimes(), getInterval()).run(this);
- LOG.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName);
- existingShards.add(shardName);
- }
- }
- }
- return existingShards;
- }
-
- /**
- * Get shard names
- */
- public Collection getShardNames() throws Exception {
- Collection slices = new GetShardsCommand(getRetryTimes(), getInterval()).run(this);
- return ShardUtils.getShardNamesFromSlices(slices, this.getCollection());
- }
-
- /**
- * Get Solr Hosts
- */
- public Collection getSolrHosts() throws Exception {
- return new GetSolrHostsCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Remove solr.admin.AdminHandlers requestHandler from solrconfi.xml
- */
- public boolean removeAdminHandlerFromCollectionConfig() throws Exception {
- return new RemoveAdminHandlersCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Transfer znode data (cannot be both scr and dest local)
- */
- public boolean transferZnode() throws Exception {
- return new TransferZnodeZkCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- /**
- * Delete znode path (and all sub nodes)
- */
- public boolean deleteZnode() throws Exception {
- return new DeleteZnodeZkCommand(getRetryTimes(), getInterval()).run(this);
- }
-
- public String getZkConnectString() {
- return zkConnectString;
- }
-
- public String getCollection() {
- return collection;
- }
-
- public String getConfigSet() {
- return configSet;
- }
-
- public String getConfigDir() {
- return configDir;
- }
-
- public int getShards() {
- return shards;
- }
-
- public int getReplication() {
- return replication;
- }
-
- public int getRetryTimes() {
- return retryTimes;
- }
-
- public int getInterval() {
- return interval;
- }
-
- public CloudSolrClient getSolrCloudClient() {
- return solrCloudClient;
- }
-
- public SolrZkClient getSolrZkClient() {
- return solrZkClient;
- }
-
- public int getMaxShardsPerNode() {
- return maxShardsPerNode;
- }
-
- public String getRouterName() {
- return routerName;
- }
-
- public String getRouterField() {
- return routerField;
- }
-
- public boolean isImplicitRouting() {
- return implicitRouting;
- }
-
- public String getJaasFile() {
- return jaasFile;
- }
-
- public String getSaslUsers() {
- return saslUsers;
- }
-
- public String getZnode() {
- return znode;
- }
-
- public String getPropName() {
- return propName;
- }
-
- public String getPropValue() {
- return propValue;
- }
-
- public boolean isSecure() {
- return secure;
- }
-
- public String getSecurityJsonLocation() {
- return securityJsonLocation;
- }
-
- public String getTransferMode() {
- return transferMode;
- }
-
- public String getCopySrc() {
- return copySrc;
- }
-
- public String getCopyDest() {
- return copyDest;
- }
-
- public String getOutput() {
- return output;
- }
-
- public boolean isIncludeDocNumber() {
- return includeDocNumber;
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java
deleted file mode 100644
index db4396b5819..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.solr;
-
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
-import org.apache.solr.common.cloud.SolrZkClient;
-
-public class AmbariSolrCloudClientBuilder {
- private static final String KEYSTORE_LOCATION_ARG = "javax.net.ssl.keyStore";
- private static final String KEYSTORE_PASSWORD_ARG = "javax.net.ssl.keyStorePassword";
- private static final String KEYSTORE_TYPE_ARG = "javax.net.ssl.keyStoreType";
- private static final String TRUSTSTORE_LOCATION_ARG = "javax.net.ssl.trustStore";
- private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword";
- private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType";
- private static final String JAVA_SECURITY_AUTH_LOGIN_CONFIG = "java.security.auth.login.config";
- private static final String SOLR_HTTPCLIENT_BUILDER_FACTORY = "solr.httpclient.builder.factory";
-
- String zkConnectString;
- String collection;
- String configSet;
- String configDir;
- int shards = 1;
- int replication = 1;
- int retryTimes = 10;
- int interval = 5;
- int maxShardsPerNode = replication * shards;
- String routerName = "implicit";
- String routerField = "_router_field_";
- CloudSolrClient solrCloudClient;
- SolrZkClient solrZkClient;
- boolean implicitRouting;
- String jaasFile;
- String znode;
- String saslUsers;
- String propName;
- String propValue;
- String securityJsonLocation;
- boolean secure;
- String transferMode;
- String copySrc;
- String copyDest;
- String output;
- public boolean includeDocNumber;
-
- public AmbariSolrCloudClient build() {
- return new AmbariSolrCloudClient(this);
- }
-
- public AmbariSolrCloudClientBuilder withZkConnectString(String zkConnectString) {
- this.zkConnectString = zkConnectString;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withCollection(String collection) {
- this.collection = collection;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withConfigSet(String configSet) {
- this.configSet = configSet;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withConfigDir(String configDir) {
- this.configDir = configDir;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withShards(int shards) {
- this.shards = shards;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withReplication(int replication) {
- this.replication = replication;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withRetry(int retryTimes) {
- this.retryTimes = retryTimes;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withInterval(int interval) {
- this.interval = interval;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withMaxShardsPerNode(int maxShardsPerNode) {
- this.maxShardsPerNode = maxShardsPerNode;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withRouterName(String routerName) {
- this.routerName = routerName;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withRouterField(String routerField) {
- this.routerField = routerField;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder isImplicitRouting(boolean implicitRouting) {
- this.implicitRouting = implicitRouting;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withJaasFile(String jaasFile) {
- this.jaasFile = jaasFile;
- setupSecurity(jaasFile);
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withSolrCloudClient() {
- this.solrCloudClient = new CloudSolrClient.Builder().withZkHost(this.zkConnectString).build();
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withSolrZkClient(int zkClientTimeout, int zkClientConnectTimeout) {
- this.solrZkClient = new SolrZkClient(this.zkConnectString, zkClientTimeout, zkClientConnectTimeout);
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withKeyStoreLocation(String keyStoreLocation) {
- if (keyStoreLocation != null) {
- System.setProperty(KEYSTORE_LOCATION_ARG, keyStoreLocation);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withKeyStorePassword(String keyStorePassword) {
- if (keyStorePassword != null) {
- System.setProperty(KEYSTORE_PASSWORD_ARG, keyStorePassword);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withKeyStoreType(String keyStoreType) {
- if (keyStoreType != null) {
- System.setProperty(KEYSTORE_TYPE_ARG, keyStoreType);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withTrustStoreLocation(String trustStoreLocation) {
- if (trustStoreLocation != null) {
- System.setProperty(TRUSTSTORE_LOCATION_ARG, trustStoreLocation);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withTrustStorePassword(String trustStorePassword) {
- if (trustStorePassword != null) {
- System.setProperty(TRUSTSTORE_PASSWORD_ARG, trustStorePassword);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withTrustStoreType(String trustStoreType) {
- if (trustStoreType != null) {
- System.setProperty(TRUSTSTORE_TYPE_ARG, trustStoreType);
- }
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withSaslUsers(String saslUsers) {
- this.saslUsers = saslUsers;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withZnode(String znode) {
- this.znode = znode;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withClusterPropName(String clusterPropName) {
- this.propName = clusterPropName;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withClusterPropValue(String clusterPropValue) {
- this.propValue = clusterPropValue;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withTransferMode(String transferMode) {
- this.transferMode = transferMode;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withCopySrc(String copySrc) {
- this.copySrc = copySrc;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withCopyDest(String copyDest) {
- this.copyDest = copyDest;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withOutput(String output) {
- this.output = output;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withIncludeDocNumber(boolean includeDocNumber) {
- this.includeDocNumber = includeDocNumber;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withSecurityJsonLocation(String securityJson) {
- this.securityJsonLocation = securityJson;
- return this;
- }
-
- public AmbariSolrCloudClientBuilder withSecure(boolean isSecure) {
- this.secure = isSecure;
- return this;
- }
-
- private void setupSecurity(String jaasFile) {
- if (jaasFile != null) {
- System.setProperty(JAVA_SECURITY_AUTH_LOGIN_CONFIG, jaasFile);
- System.setProperty(SOLR_HTTPCLIENT_BUILDER_FACTORY, Krb5HttpClientBuilder.class.getCanonicalName());
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java
deleted file mode 100644
index d339a77b431..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr;
-
-public class AmbariSolrCloudClientException extends Exception{
- public AmbariSolrCloudClientException(String message) {
- super(message);
- }
- public AmbariSolrCloudClientException(String message, Throwable throwable) {
- super(message, throwable);
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java
deleted file mode 100644
index 60b4e0af940..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.infra.solr;
-
-import java.io.File;
-
-import org.apache.commons.io.FileUtils;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.s3.AmazonS3Client;
-
-/**
- * Uploads a file to S3, meant to be used by solrDataManager.py
- */
-public class S3Uploader {
- public static void main(String[] args) {
- try {
- String keyFilePath = args[0];
- String bucketName = args[1];
- String keyPrefix = args[2];
- String filePath = args[3];
-
- String keyFileContent = FileUtils.readFileToString(new File(keyFilePath)).trim();
- String[] keys = keyFileContent.split(",");
- String accessKey = keys[0];
- String secretKey = keys[1];
-
- BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
- AmazonS3Client client = new AmazonS3Client(credentials);
-
- File file = new File(filePath);
- String key = keyPrefix + file.getName();
-
- if (client.doesObjectExist(bucketName, key)) {
- System.out.println("Object '" + key + "' already exists");
- System.exit(0);
- }
-
- client.putObject(bucketName, key, file);
- } catch (Exception e) {
- e.printStackTrace(System.err);
- System.exit(1);
- }
-
- System.exit(0);
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
deleted file mode 100644
index 5e87859590f..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClientException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class AbstractRetryCommand {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractRetryCommand.class);
-
- private final int interval;
- private final int maxRetries;
-
- public AbstractRetryCommand(int maxRetries, int interval) {
- this.maxRetries = maxRetries;
- this.interval = interval;
- }
-
- public abstract RESPONSE createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception;
-
- public RESPONSE run(AmbariSolrCloudClient solrCloudClient) throws Exception {
- return retry(0, solrCloudClient);
- }
-
- private RESPONSE retry(int tries, AmbariSolrCloudClient solrCloudClient) throws Exception {
- try {
- return createAndProcessRequest(solrCloudClient);
- } catch (Exception ex) {
- LOG.error(ex.getMessage(), ex);
- tries++;
- LOG.info("Command failed, tries again (tries: {})", tries);
- if (maxRetries == tries) {
- throw new AmbariSolrCloudClientException(String.format("Maximum retries exceeded: %d", tries), ex);
- } else {
- Thread.sleep(interval * 1000);
- return retry(tries, solrCloudClient);
- }
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java
deleted file mode 100644
index fdf26a728b6..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClientException;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-import org.apache.solr.client.solrj.response.SolrResponseBase;
-
-public abstract class AbstractSolrRetryCommand
- extends AbstractRetryCommand {
-
- public AbstractSolrRetryCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- public abstract RESPONSE handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception;
-
- public abstract REQUEST createRequest(AmbariSolrCloudClient client);
-
- public abstract String errorMessage(AmbariSolrCloudClient client);
-
- @Override
- public RESPONSE createAndProcessRequest(AmbariSolrCloudClient client) throws Exception {
- REQUEST request = createRequest(client);
- CollectionAdminResponse response = (CollectionAdminResponse) request.process(client.getSolrCloudClient());
- handleErrorIfExists(response, errorMessage(client));
- return handleResponse(response, client);
- }
-
- private void handleErrorIfExists(SolrResponseBase response, String message) throws AmbariSolrCloudClientException {
- if (response.getStatus() != 0) {
- throw new AmbariSolrCloudClientException(message);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java
deleted file mode 100644
index b4872e21777..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.domain.AmbariSolrState;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
-
-public abstract class AbstractStateFileZkCommand extends AbstractZookeeperRetryCommand{
-
- public static final String STATE_FILE = "ambari-solr-state.json";
- public static final String STATE_FIELD = "ambari_solr_security_state";
-
- public AbstractStateFileZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- public AmbariSolrState getStateFromJson(AmbariSolrCloudClient client, String fileName) throws Exception {
- byte[] data = client.getSolrZkClient().getData(fileName, null, null, true);
- String input = new String(data);
- ObjectMapper mapper = new ObjectMapper();
- JsonNode rootNode = mapper.readValue(input.getBytes(), JsonNode.class);
- return AmbariSolrState.valueOf(rootNode.get(STATE_FIELD).asText());
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java
deleted file mode 100644
index dec34f1dc95..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.solr.common.cloud.ZkConfigManager;
-
-public abstract class AbstractZookeeperConfigCommand extends AbstractZookeeperRetryCommand {
-
- public AbstractZookeeperConfigCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- protected abstract RESPONSE executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client)
- throws Exception;
-
- @Override
- protected RESPONSE executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- ZkConfigManager zkConfigManager = createZkConfigManager(zkClient);
- return executeZkConfigCommand(zkConfigManager, client);
- }
-
- protected ZkConfigManager createZkConfigManager(SolrZkClient zkClient) {
- return new ZkConfigManager(zkClient);
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java
deleted file mode 100644
index e37088db3f0..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-
-public abstract class AbstractZookeeperRetryCommand extends AbstractRetryCommand {
-
- public AbstractZookeeperRetryCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- protected abstract RESPONSE executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper)
- throws Exception;
-
- @Override
- public RESPONSE createAndProcessRequest(AmbariSolrCloudClient client) throws Exception {
- SolrZkClient zkClient = client.getSolrZkClient();
- SolrZooKeeper solrZooKeeper = zkClient.getSolrZooKeeper();
- return executeZkCommand(client, zkClient, solrZooKeeper);
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java
deleted file mode 100644
index 0a03a65425a..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.ZkConfigManager;
-
-public class CheckConfigZkCommand extends AbstractZookeeperConfigCommand {
-
- public CheckConfigZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected Boolean executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) throws Exception {
- return zkConfigManager.configExists(client.getConfigSet());
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java
deleted file mode 100644
index 93eb478d2e3..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClientException;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.zookeeper.KeeperException;
-
-public class CheckZnodeZkCommand extends AbstractZookeeperRetryCommand {
-
- private String znode;
-
- public CheckZnodeZkCommand(int maxRetries, int interval, String znode) {
- super(maxRetries, interval);
- this.znode = znode;
- }
-
- @Override
- protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- try {
- return zkClient.exists(this.znode, false);
- } catch (KeeperException e) {
- throw new AmbariSolrCloudClientException("Exception during checking znode, " +
- "Check zookeeper servers are running (n+1/2) or zookeeper quorum has established or not.", e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java
deleted file mode 100644
index 5d296ae839f..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.util.ShardUtils;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-
-public class CreateCollectionCommand extends AbstractSolrRetryCommand {
-
- public CreateCollectionCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- public String handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception {
- return client.getCollection();
- }
-
- @Override
- public CollectionAdminRequest.Create createRequest(AmbariSolrCloudClient client) {
- CollectionAdminRequest.Create request =
- CollectionAdminRequest.createCollection(client.getCollection(), client.getConfigSet(), client.getShards(), client.getReplication());
- request.setMaxShardsPerNode(client.getMaxShardsPerNode());
- if (client.isImplicitRouting()) {
- request.setRouterName(client.getRouterName());
- request.setRouterField(client.getRouterField());
- request.setShards(ShardUtils.generateShardListStr(client.getMaxShardsPerNode()));
- }
- return request;
- }
-
- @Override
- public String errorMessage(AmbariSolrCloudClient client) {
- return String.format("Cannot create collection: '%s'", client.getCollection());
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java
deleted file mode 100644
index 549296678e1..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-
-public class CreateShardCommand extends AbstractSolrRetryCommand {
-
- private final String shardName;
-
- public CreateShardCommand(String shardName, int maxRetries, int interval) {
- super(maxRetries, interval);
- this.shardName = shardName;
- }
-
- @Override
- public String handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception {
- return shardName;
- }
-
- @Override
- public CollectionAdminRequest.CreateShard createRequest(AmbariSolrCloudClient client) {
- return CollectionAdminRequest.createShard(client.getCollection(), shardName);
- }
-
- @Override
- public String errorMessage(AmbariSolrCloudClient client) {
- return String.format("Cannot add shard to collection '%s'", client.getCollection());
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java
deleted file mode 100644
index 1460a8468b1..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClientException;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.zookeeper.KeeperException;
-
-public class CreateSolrZnodeZkCommand extends AbstractZookeeperRetryCommand {
-
- public CreateSolrZnodeZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- try {
- zkClient.makePath(client.getZnode(), true);
- return client.getZnode();
- } catch (KeeperException e) {
- throw new AmbariSolrCloudClientException("Cannot create ZNode, check zookeeper servers are running (n+1/2), or zookeeper quorum has established or not.",e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java
deleted file mode 100644
index 1c9d4fb6187..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-
-public class DeleteZnodeZkCommand extends AbstractZookeeperRetryCommand {
-
- public DeleteZnodeZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- zkClient.clean(client.getZnode());
- return true;
- }
-
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java
deleted file mode 100644
index 990c3c31275..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClientException;
-import org.apache.solr.common.cloud.ZkConfigManager;
-
-import java.io.IOException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-
-public class DownloadConfigZkCommand extends AbstractZookeeperConfigCommand {
-
- public DownloadConfigZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected String executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) throws Exception {
- Path configDir = Paths.get(client.getConfigDir());
- String configSet = client.getConfigSet();
- try {
- zkConfigManager.downloadConfigDir(configSet, configDir);
- return configDir.toString();
- } catch (IOException e){
- throw new AmbariSolrCloudClientException("Error downloading configuration set, check Solr Znode has started or not " +
- "(starting Solr (for Log Search) is responsible to create the Znode)" ,e);
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java
deleted file mode 100644
index 708ecac3139..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.domain.json.SolrCollection;
-import org.apache.ambari.infra.solr.domain.json.SolrCoreData;
-import org.apache.ambari.infra.solr.domain.json.SolrShard;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class DumpCollectionsCommand extends AbstractZookeeperRetryCommand {
-
- private static final Logger logger = LoggerFactory.getLogger(DumpCollectionsCommand.class);
-
- private final List collections;
-
- public DumpCollectionsCommand(int maxRetries, int interval, List collections) {
- super(maxRetries, interval);
- this.collections = collections;
- }
-
- @Override
- protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- Map collectionMap = new HashMap<>();
- if (!this.collections.isEmpty()) {
- for (String collection : this.collections) {
- SolrCollection solrCollection = new SolrCollection();
- CloudSolrClient solrClient = client.getSolrCloudClient();
- if (client.isIncludeDocNumber()) {
- long numberOfDocs = getNumberOfDocs(solrClient, collection);
- solrCollection.setNumberOfDocs(numberOfDocs);
- }
- Collection slices = getSlices(solrClient, collection);
- Integer numShards = slices.size();
- Map solrShardMap = new HashMap<>();
- Map> leaderHostCoreMap = new HashMap<>();
- Map leaderCoreDataMap = new HashMap<>();
- Map> leaderShardCoreMap = new HashMap<>();
- Map leaderCoreHostMap = new HashMap<>();
- for (Slice slice : slices) {
- SolrShard solrShard = new SolrShard();
- solrShard.setName(slice.getName());
- solrShard.setState(slice.getState());
- Collection replicas = slice.getReplicas();
- Map replicaMap = new HashMap<>();
- leaderShardCoreMap.put(slice.getName(), new ArrayList<>());
- for (Replica replica : replicas) {
- replicaMap.put(replica.getName(), replica);
- Replica.State state = replica.getState();
- if (Replica.State.ACTIVE.equals(state)
- && replica.getProperties().get("leader") != null && "true".equals(replica.getProperties().get("leader"))) {
- String coreName = replica.getCoreName();
- String hostName = getHostFromNodeName(replica.getNodeName());
- if (leaderHostCoreMap.containsKey(hostName)) {
- List coresList = leaderHostCoreMap.get(hostName);
- coresList.add(coreName);
- } else {
- List coreList = new ArrayList<>();
- coreList.add(coreName);
- leaderHostCoreMap.put(hostName, coreList);
- }
- Map properties = new HashMap<>();
- properties.put("name", coreName);
- properties.put("coreNodeName", replica.getName());
- properties.put("shard", slice.getName());
- properties.put("collection", collection);
- properties.put("numShards", numShards.toString());
- properties.put("replicaType", replica.getType().name());
- SolrCoreData solrCoreData = new SolrCoreData(replica.getName(), hostName, properties);
- leaderCoreDataMap.put(coreName, solrCoreData);
- leaderShardCoreMap.get(slice.getName()).add(coreName);
- leaderCoreHostMap.put(coreName, hostName);
- }
- }
- solrShard.setReplicas(replicaMap);
- solrShardMap.put(slice.getName(), solrShard);
- }
- solrCollection.setShards(solrShardMap);
- solrCollection.setLeaderHostCoreMap(leaderHostCoreMap);
- solrCollection.setLeaderSolrCoreDataMap(leaderCoreDataMap);
- solrCollection.setLeaderShardsMap(leaderShardCoreMap);
- solrCollection.setLeaderCoreHostMap(leaderCoreHostMap);
- solrCollection.setName(collection);
- collectionMap.put(collection, solrCollection);
- }
- }
- ObjectMapper objectMapper = new ObjectMapper();
- final ObjectWriter objectWriter = objectMapper
- .writerWithDefaultPrettyPrinter();
- File file = new File(client.getOutput());
- if (!file.exists()) {
- file.createNewFile();
- }
- objectWriter.writeValue(file, collectionMap);
- return objectWriter.writeValueAsString(collectionMap);
- }
-
- private String getHostFromNodeName(String nodeName) {
- String[] splitted = nodeName.split(":");
- if (splitted.length > 0) {
- return splitted[0];
- } else {
- if (nodeName.endsWith("_solr")) {
- String[] splitted_ = nodeName.split("_");
- return splitted_[0];
- }
- return nodeName;
- }
- }
-
- private Collection getSlices(CloudSolrClient solrClient, String collection) {
- ZkStateReader reader = solrClient.getZkStateReader();
- DocCollection docCollection = reader.getClusterState().getCollection(collection);
- return docCollection.getSlices();
- }
-
- private long getNumberOfDocs(CloudSolrClient solrClient, String collection) throws Exception {
- solrClient.setDefaultCollection(collection);
- SolrQuery q = new SolrQuery("*:*");
- q.setRows(0);
- return solrClient.query(q).getResults().getNumFound();
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java
deleted file mode 100644
index 793addd9e18..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.zookeeper.CreateMode;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-
-public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCommand {
-
- private static final String SECURITY_JSON = "/security.json";
- private static final String UNSECURE_CONTENT = "{}";
-
- public EnableKerberosPluginSolrZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- String result = "";
- String filePath = client.getZnode() + SECURITY_JSON;
- String fileContent = getFileContentFromZnode(zkClient, filePath);
- String securityContent = getFileContent(client.getSecurityJsonLocation());
- if (client.isSecure()) {
- if (!fileContent.equals(securityContent)) {
- putFileContent(zkClient, filePath, securityContent);
- }
- result = securityContent;
- } else {
- if (!fileContent.equals(UNSECURE_CONTENT)) {
- putFileContent(zkClient, filePath, UNSECURE_CONTENT);
- }
- result = UNSECURE_CONTENT;
- }
- return result;
- }
-
- private void putFileContent(SolrZkClient zkClient, String fileName, String content) throws Exception {
- if (zkClient.exists(fileName, true)) {
- zkClient.setData(fileName, content.getBytes(StandardCharsets.UTF_8), true);
- } else {
- zkClient.create(fileName, content.getBytes(StandardCharsets.UTF_8), CreateMode.PERSISTENT, true);
- }
- }
-
- private String getFileContentFromZnode(SolrZkClient zkClient, String fileName) throws Exception {
- String result;
- if (zkClient.exists(fileName, true)) {
- byte[] data = zkClient.getData(fileName, null, null, true);
- result = new String(data, StandardCharsets.UTF_8);
- } else {
- result = UNSECURE_CONTENT;
- }
- return result;
- }
-
- private String getFileContent(String fileLocation) throws IOException {
- File securityJson = new File(fileLocation);
- if (StringUtils.isNotEmpty(fileLocation) && securityJson.exists()) {
- return FileUtils.readFileToString(securityJson);
- } else {
- return UNSECURE_CONTENT;
- }
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java
deleted file mode 100644
index 3683a1b9b45..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkStateReader;
-
-import java.util.Collection;
-
-public class GetShardsCommand extends AbstractRetryCommand> {
-
- public GetShardsCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- public Collection createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception {
- ZkStateReader zkReader = new ZkStateReader(solrCloudClient.getSolrZkClient());
- zkReader.createClusterStateWatchersAndUpdate();
- return zkReader.getClusterState().getCollection(solrCloudClient.getCollection()).getSlices();
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java
deleted file mode 100644
index 5a14a448174..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.zookeeper.ZooKeeper;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-public class GetSolrHostsCommand extends AbstractRetryCommand> {
-
- public GetSolrHostsCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- public Collection createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception {
- List solrHosts = new ArrayList<>();
-
- ZooKeeper zk = new ZooKeeper(solrCloudClient.getZkConnectString(), 10000, null);
- List ids = zk.getChildren("/live_nodes", false);
- for (String id : ids) {
- if (id.endsWith("_solr")) {
- String hostAndPort = id.substring(0, id.length() - 5);
- String[] tokens = hostAndPort.split(":");
- String host = InetAddress.getByName(tokens[0]).getHostName();
-
- solrHosts.add(host);
- }
- }
-
- return solrHosts;
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java
deleted file mode 100644
index 10a8daae316..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.domain.AmbariSolrState;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-
-public class GetStateFileZkCommand extends AbstractStateFileZkCommand {
- private String unsecureZnode;
-
- public GetStateFileZkCommand(int maxRetries, int interval, String unsecureZnode) {
- super(maxRetries, interval);
- this.unsecureZnode = unsecureZnode;
- }
-
- @Override
- protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- AmbariSolrState result = AmbariSolrState.UNSECURE;
- String stateFile = String.format("%s/%s", unsecureZnode, AbstractStateFileZkCommand.STATE_FILE);
- if (zkClient.exists(stateFile, true)) {
- result = getStateFromJson(client, stateFile);
- }
- return result;
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java
deleted file mode 100644
index 41094c72047..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-
-import java.util.List;
-
-public class ListCollectionCommand extends AbstractSolrRetryCommand> {
-
- public ListCollectionCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- public List handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception {
- List allCollectionList = (List) response
- .getResponse().get("collections");
- return allCollectionList;
- }
-
- @Override
- public CollectionAdminRequest.List createRequest(AmbariSolrCloudClient client) {
- return new CollectionAdminRequest.List();
- }
-
- @Override
- public String errorMessage(AmbariSolrCloudClient client) {
- return "Cannot get collections.";
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java
deleted file mode 100644
index 32fae7b141b..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.zookeeper.data.Stat;
-
-public class RemoveAdminHandlersCommand extends AbstractZookeeperRetryCommand {
-
- public RemoveAdminHandlersCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- String solrConfigXmlPath = String.format("/configs/%s/solrconfig.xml", client.getCollection());
- if (zkClient.exists(solrConfigXmlPath, true)) {
- Stat stat = new Stat();
- byte[] solrConfigXmlBytes = zkClient.getData(solrConfigXmlPath, null, stat, true);
- String solrConfigStr = new String(solrConfigXmlBytes);
- if (solrConfigStr.contains("class=\"solr.admin.AdminHandlers\"")) {
- byte[] newSolrConfigXmlBytes = new String(solrConfigXmlBytes).replaceAll("(?s)", "").getBytes();
- zkClient.setData(solrConfigXmlPath, newSolrConfigXmlBytes, stat.getVersion() + 1, true);
- }
- }
- return true;
- }
-}
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
deleted file mode 100644
index 695862394d0..00000000000
--- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.solr.commands;
-
-import org.apache.ambari.infra.solr.AmbariSolrCloudClient;
-import org.apache.ambari.infra.solr.util.AclUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.SolrZooKeeper;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.apache.zookeeper.data.Stat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand {
-
- private static final Logger LOG = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class);
-
- public SecureSolrZNodeZkCommand(int maxRetries, int interval) {
- super(maxRetries, interval);
- }
-
- @Override
- protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
- String zNode = client.getZnode();
- List newAclList = new ArrayList<>();
- List saslUserList = AclUtils.createAclListFromSaslUsers(client.getSaslUsers().split(","));
- newAclList.addAll(saslUserList);
- newAclList.add(new ACL(ZooDefs.Perms.READ, new Id("world", "anyone")));
-
- String configsPath = String.format("%s/%s", zNode, "configs");
- String collectionsPath = String.format("%s/%s", zNode, "collections");
- String aliasesPath = String.format("%s/%s", zNode, "aliases.json"); // TODO: protect this later somehow
- List