diff --git a/ambari-infra/.gitignore b/ambari-infra/.gitignore deleted file mode 100644 index a7d91c4d714..00000000000 --- a/ambari-infra/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -target -.settings -.classpath -.project -/bin/ -job-repository.db \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml deleted file mode 100644 index 5badd26e465..00000000000 --- a/ambari-infra/ambari-infra-assembly/pom.xml +++ /dev/null @@ -1,429 +0,0 @@ - - - - - ambari-infra - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - ambari-infra-assembly - Ambari Infra Assembly - http://maven.apache.org - - - /usr/lib - - http://public-repo-1.hortonworks.com/ARTIFACTS/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz - ${mapping.base.path}/ambari-infra-solr - ambari-infra-solr - ambari-infra-solr-client - ${mapping.base.path}/${solr.client.package.name} - ${project.basedir}/../ambari-infra-solr-client - ${project.basedir}/../ambari-infra-solr-plugin - ambari-infra-manager - ${project.basedir}/../ambari-infra-manager - ${mapping.base.path}/${infra-manager.package.name} - ${infra-manager.mapping.path}/conf - http://central.maven.org/maven2/commons-fileupload/commons-fileupload/1.3.3/commons-fileupload-1.3.3.jar - commons-fileupload-1.3.3.jar - commons-fileupload-1.3.2.jar - - - - - - rpm - - true - - - - build-rpm - - - - - - org.codehaus.mojo - rpm-maven-plugin - 2.1.4 - - 2012, Apache Software Foundation - Development - Maven Recipe: RPM Package. - false - / - noarch - linux - - ${package-version} - ${package-release} - - root - root - - - - infra-solr - package - - rpm - - - Development - ${solr.package.name} - - - ${solr.mapping.path} - - - ${project.build.directory}/solr - - - - - - - - infra-solr-client - package - - rpm - - - Development - ${solr.client.package.name} - noarch - linux - - ${project.basedir}/src/main/package/rpm/solr-client/postinstall.sh - utf-8 - - - - ${solr.client.mapping.path} - - - ${solr.client.dir}/target/package - - libs/checkstyle*.jar - - - - - - - - - infra-manager - package - - rpm - - - Development - ${infra-manager.package.name} - - ${project.basedir}/src/main/package/rpm/manager/postinstall.sh - utf-8 - - - ${project.basedir}/src/main/package/rpm/manager/postremove.sh - utf-8 - - - - ${infra-manager.mapping.path} - - - ${infra-manager.dir}/target/package - - - - - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - generate-resources - - run - - - - - - - - - - - - - - - - - - - - - - - - - - - - deb - - - - build-deb - - - - - - - maven-resources-plugin - 2.7 - - - - copy-resources - package - - copy-resources - - - ${project.build.directory}/resources/deb - - - ${project.basedir}/src/main/package/deb - - solr/postinst - solr-client/postinst - - false - - - ${project.basedir}/src/main/package/deb - - solr/postinst - solr-client/postinst - - true - - - - - - - - org.vafer - jdeb - 1.4 - - - package - jdeb-solr - - jdeb - - - ${basedir}/src/main/package/deb/solr - ${basedir}/target/${solr.package.name}_${package-version}-${package-release}.deb - false - false - - - ${project.build.directory}/solr - directory - - perm - root - root - ${solr.mapping.path} - - - bin/**,server/scripts/** - - - - ${project.build.directory}/solr - directory - - perm - root - root - 755 - ${solr.mapping.path} - - - bin/**,server/scripts/** - - - - - - - - package - jdeb-solr-client - - jdeb - - - ${basedir}/src/main/package/deb/solr-client - ${basedir}/target/${solr.client.package.name}_${package-version}-${package-release}.deb - false - false - - - ${solr.client.dir}/target/ambari-infra-solr-client.tar.gz - archive - - perm - root - root - ${solr.client.mapping.path} - - libs/checkstyle*.jar - - - - - - - package - jdeb-infra-manager - - jdeb - - - ${basedir}/src/main/package/deb/manager - ${basedir}/target/${infra-manager.package.name}_${package-version}-${package-release}.deb - false - false - - - ${infra-manager.dir}/target/ambari-infra-manager.tar.gz - archive - - perm - root - root - ${infra-manager.mapping.path} - - - - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - generate-resources - - run - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - org.apache.ambari - ambari-infra-solr-client - ${project.version} - - - org.apache.ambari - ambari-infra-solr-plugin - ${project.version} - - - org.apache.ambari - ambari-infra-manager - ${project.version} - - - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control deleted file mode 100644 index 03663a06c61..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/control +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -Package: [[infra-manager.package.name]] -Version: [[package-version]]-[[package-release]] -Section: [[deb.section]] -Priority: [[deb.priority]] -Depends: [[deb.dependency.list]] -Architecture: [[deb.architecture]] -Description: [[description]] -Maintainer: [[deb.publisher]] \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst deleted file mode 100644 index acce62dd2cb..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager" -INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/bin/infraManager.sh" -INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager" -INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf" -INFRA_MANAGER_CONF_SOURCE="/usr/lib/ambari-infra-manager/conf" - -rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME -rm -f $INFRA_MANAGER_CONF_LINK_NAME -rm -rf $INFRA_MANAGER_CONF_LINK_DIR -mkdir -p $INFRA_MANAGER_CONF_LINK_DIR -ln -s $INFRA_MANAGER_CONF_SOURCE $INFRA_MANAGER_CONF_LINK_NAME diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm deleted file mode 100644 index e62abc6f2bc..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postrm +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager" -INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf" -INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager" - -rm -f $INFRA_MANAGER_LINK_NAME -rm -f $INFRA_MANAGER_CONF_LINK_NAME -rm -rf $INFRA_MANAGER_CONF_LINK_DIR \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/preinst +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/prerm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control deleted file mode 100644 index 88bafcb590b..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/control +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -Package: [[solr.client.package.name]] -Version: [[package-version]]-[[package-release]] -Section: [[deb.section]] -Priority: [[deb.priority]] -Depends: [[deb.dependency.list]] -Architecture: [[deb.architecture]] -Description: [[description]] -Maintainer: [[deb.publisher]] diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst deleted file mode 100644 index ccc377b7e3f..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli" -SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh" - -SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool" -SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh" - -SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager" -SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py" - -rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME -rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME -rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postrm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/preinst +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/prerm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control deleted file mode 100644 index 5087cd0e8c8..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/control +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -Package: [[solr.package.name]] -Version: [[package-version]]-[[package-release]] -Section: [[deb.section]] -Priority: [[deb.priority]] -Depends: [[deb.dependency.list]] -Architecture: [[deb.architecture]] -Description: [[description]] -Maintainer: [[deb.publisher]] diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postinst +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/postrm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/preinst +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr/prerm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh deleted file mode 100644 index acce62dd2cb..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager" -INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/bin/infraManager.sh" -INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager" -INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf" -INFRA_MANAGER_CONF_SOURCE="/usr/lib/ambari-infra-manager/conf" - -rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME -rm -f $INFRA_MANAGER_CONF_LINK_NAME -rm -rf $INFRA_MANAGER_CONF_LINK_DIR -mkdir -p $INFRA_MANAGER_CONF_LINK_DIR -ln -s $INFRA_MANAGER_CONF_SOURCE $INFRA_MANAGER_CONF_LINK_NAME diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh deleted file mode 100644 index e62abc6f2bc..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postremove.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -INFRA_MANAGER_CONF_LINK_DIR="/etc/ambari-infra-manager" -INFRA_MANAGER_CONF_LINK_NAME="$INFRA_MANAGER_CONF_LINK_DIR/conf" -INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager" - -rm -f $INFRA_MANAGER_LINK_NAME -rm -f $INFRA_MANAGER_CONF_LINK_NAME -rm -rf $INFRA_MANAGER_CONF_LINK_DIR \ No newline at end of file diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh deleted file mode 100644 index ccc377b7e3f..00000000000 --- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli" -SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh" - -SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool" -SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh" - -SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager" -SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py" - -rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME -rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME -rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager-it/pom.xml b/ambari-infra/ambari-infra-manager-it/pom.xml deleted file mode 100644 index c9b8bad5019..00000000000 --- a/ambari-infra/ambari-infra-manager-it/pom.xml +++ /dev/null @@ -1,224 +0,0 @@ - - - - - - ambari-infra - org.apache.ambari - 2.0.0.0-SNAPSHOT - - - Ambari Infra Manager Integration Tests - http://maven.apache.org - 4.0.0 - - ambari-infra-manager-it - - - 4.0.5 - 2.20 - localhost - NONE - - - - - org.apache.solr - solr-solrj - ${solr.version} - - - com.amazonaws - aws-java-sdk-s3 - 1.11.5 - - - commons-io - commons-io - 2.5 - - - - org.slf4j - slf4j-api - 1.7.20 - - - org.slf4j - slf4j-log4j12 - 1.7.20 - - - - org.jbehave - jbehave-core - ${jbehave.version} - test - - - junit - junit - test - - - org.easymock - easymock - 3.4 - test - - - org.hamcrest - hamcrest-all - 1.3 - test - - - com.sparkjava - spark-core - 2.7.1 - test - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - test - - - javax.servlet - servlet-api - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util - - - com.sun.jersey - jetty-util - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - org.slf4j - slf4j-log4j12 - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-security - - - - - org.apache.hadoop - hadoop-hdfs-client - ${hadoop.version} - - - - - target/classes - - - src/test/java/ - - **/*.story - - - - src/test/resources - - - - - - - it - - - it - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe-plugin.version} - - - run-integration-tests - integration-test - - integration-test - - - - **/*Stories.java - **/*IT.java - - - file:${project.build.testOutputDirectory}/log4j.properties - ${docker.host} - ${stories.location} - - - - - verify-integration-tests - verify - - verify - - - - - - - - - - diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java deleted file mode 100644 index 3d8711b3eb5..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/HttpResponse.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -public class HttpResponse { - private final int code; - private final String body; - - public HttpResponse(int code, String body) { - this.code = code; - this.body = body; - } - - public int getCode() { - return code; - } - - public String getBody() { - return body; - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java deleted file mode 100644 index 0118c769574..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraClient.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.io.IOUtils; -import org.apache.http.client.ClientProtocolException; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; -import org.apache.http.impl.client.HttpClientBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.Charset; -import java.util.HashMap; -import java.util.Map; - -import static org.apache.commons.lang.StringUtils.isBlank; - -// TODO: use swagger -public class InfraClient implements AutoCloseable { - private static final Logger LOG = LoggerFactory.getLogger(InfraClient.class); - - private final CloseableHttpClient httpClient; - private final URI baseUrl; - - public InfraClient(String baseUrl) { - try { - this.baseUrl = new URI(baseUrl); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - httpClient = HttpClientBuilder.create().setRetryHandler(new DefaultHttpRequestRetryHandler(0, false)).build(); - } - - @Override - public void close() throws Exception { - httpClient.close(); - } - - // TODO: return job data - public void getJobs() { - execute(new HttpGet(baseUrl)); - } - - private HttpResponse execute(HttpRequestBase post) { - try (CloseableHttpResponse response = httpClient.execute(post)) { - String responseBodyText = IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); - int statusCode = response.getStatusLine().getStatusCode(); - LOG.info("Response code {} body {} ", statusCode, responseBodyText); - if (!(200 <= statusCode && statusCode <= 299)) - throw new RuntimeException("Error while executing http request: " + responseBodyText); - return new HttpResponse(statusCode, responseBodyText); - } catch (ClientProtocolException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public JobExecutionInfo startJob(String jobName, String parameters) { - URIBuilder uriBuilder = new URIBuilder(baseUrl); - uriBuilder.setScheme("http"); - uriBuilder.setPath(uriBuilder.getPath() + "/" + jobName); - if (!isBlank(parameters)) - uriBuilder.addParameter("params", parameters); - try { - String responseText = execute(new HttpPost(uriBuilder.build())).getBody(); - Map responseContent = new ObjectMapper().readValue(responseText, new TypeReference>() {}); - return new JobExecutionInfo(responseContent.get("jobId").toString(), ((Map)responseContent.get("jobExecutionData")).get("id").toString()); - } catch (URISyntaxException | JsonParseException | JsonMappingException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public void restartJob(String jobName, String jobId) { - URIBuilder uriBuilder = new URIBuilder(baseUrl); - uriBuilder.setScheme("http"); - uriBuilder.setPath(String.format("%s/%s/%s/executions", uriBuilder.getPath(), jobName, jobId)); - uriBuilder.addParameter("operation", "RESTART"); - try { - HttpResponse httpResponse = execute(new HttpPost(uriBuilder.build())); - if (httpResponse.getCode() != 200) - throw new RuntimeException(httpResponse.getBody()); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public void stopJob(String jobExecutionId) { - URIBuilder uriBuilder = new URIBuilder(baseUrl); - uriBuilder.setScheme("http"); - uriBuilder.setPath(String.format("%s/executions/%s", uriBuilder.getPath(), jobExecutionId)); - uriBuilder.addParameter("operation", "STOP"); - try { - execute(new HttpDelete(uriBuilder.build())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java deleted file mode 100644 index 564de9a3a5f..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/InfraManagerStories.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import org.apache.ambari.infra.steps.ExportJobsSteps; -import org.apache.commons.lang.StringUtils; -import org.jbehave.core.configuration.Configuration; -import org.jbehave.core.configuration.MostUsefulConfiguration; -import org.jbehave.core.io.LoadFromClasspath; -import org.jbehave.core.io.LoadFromRelativeFile; -import org.jbehave.core.io.StoryFinder; -import org.jbehave.core.io.StoryLoader; -import org.jbehave.core.junit.JUnitStories; -import org.jbehave.core.reporters.Format; -import org.jbehave.core.reporters.StoryReporterBuilder; -import org.jbehave.core.steps.InjectableStepsFactory; -import org.jbehave.core.steps.InstanceStepsFactory; -import org.jbehave.core.steps.ParameterConverters; - -import java.io.File; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; - -import static java.util.Collections.singletonList; -import static org.jbehave.core.io.CodeLocations.codeLocationFromClass; - -public class InfraManagerStories extends JUnitStories { - private static final String BACKEND_STORIES_LOCATION_PROPERTY = "backend.stories.location"; - private static final String STORY_SUFFIX = ".story"; - - @Override - public Configuration configuration() { - return new MostUsefulConfiguration() - .useStoryLoader(getStoryLoader(BACKEND_STORIES_LOCATION_PROPERTY, this.getClass())) - .useParameterConverters(new ParameterConverters().addConverters(new OffsetDateTimeConverter())) - .useStoryReporterBuilder( - new StoryReporterBuilder().withFailureTrace(true).withDefaultFormats().withFormats(Format.CONSOLE, Format.TXT)); - } - - private static StoryLoader getStoryLoader(String property, Class clazz) { - boolean useExternalStoryLocation = useExternalStoryLocation(property); - if (useExternalStoryLocation) { - try { - return new LoadFromRelativeFile(new URL("file://" + System.getProperty(property))); - } catch (Exception e) { - throw new RuntimeException("Cannot load story files from url: file://" + System.getProperty(property)); - } - } else { - return new LoadFromClasspath(clazz); - } - } - - @Override - public InjectableStepsFactory stepsFactory() { - return new InstanceStepsFactory(configuration(), new ExportJobsSteps()); - } - - @Override - protected List storyPaths() { - return findStories(BACKEND_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass()); - } - - private static List findStories(String property, String suffix, Class clazz) { - if (useExternalStoryLocation(property)) { - return findStoriesInFolder(System.getProperty(property), suffix); - } else { - return new StoryFinder() - .findPaths(codeLocationFromClass(clazz).getFile(), singletonList(String.format("**/*%s", suffix)), null); - } - } - - private static List findStoriesInFolder(String folderAbsolutePath, String suffix) { - List results = new ArrayList<>(); - File folder = new File(folderAbsolutePath); - File[] listOfFiles = folder.listFiles(); - if (listOfFiles != null) { - for (File file : listOfFiles) { - if (file.getName().endsWith(suffix)) { - results.add(file.getName()); - } - } - } - return results; - } - - private static boolean useExternalStoryLocation(String property) { - String storyLocationProp = System.getProperty(property); - return StringUtils.isNotEmpty(storyLocationProp) && !"NONE".equals(storyLocationProp); - } - -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java deleted file mode 100644 index 92b783476f7..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/JobExecutionInfo.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -public class JobExecutionInfo { - private final String jobId; - private final String executionId; - - public JobExecutionInfo(String jobId, String executionId) { - this.jobId = jobId; - this.executionId = executionId; - } - - public String getJobId() { - return jobId; - } - - public String getExecutionId() { - return executionId; - } - - @Override - public String toString() { - return "JobExecutionInfo{" + - "jobId='" + jobId + '\'' + - ", executionId='" + executionId + '\'' + - '}'; - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java deleted file mode 100644 index ef469a48bba..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/OffsetDateTimeConverter.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import org.jbehave.core.steps.ParameterConverters; - -import java.lang.reflect.Type; -import java.time.OffsetDateTime; -import java.time.format.DateTimeFormatter; - -public class OffsetDateTimeConverter implements ParameterConverters.ParameterConverter { - public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX"); - - @Override - public boolean accept(Type type) { - return type instanceof Class && OffsetDateTime.class.isAssignableFrom((Class) type); - } - - @Override - public Object convertValue(String value, Type type) { - return OffsetDateTime.parse(value, SOLR_DATETIME_FORMATTER); - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java deleted file mode 100644 index 1ffdb2a2da4..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/Solr.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import static org.apache.ambari.infra.TestUtil.doWithin; -import static org.apache.ambari.infra.TestUtil.getDockerHost; -import static org.apache.ambari.infra.TestUtil.runCommand; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.file.Paths; - -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.LBHttpSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrInputDocument; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class Solr { - private static final Logger LOG = LoggerFactory.getLogger(Solr.class); - public static final String AUDIT_LOGS_COLLECTION = "audit_logs"; - public static final String HADOOP_LOGS_COLLECTION = "hadoop_logs"; - private static final int SOLR_PORT = 8983; - - private final SolrClient solrClient; - private final String configSetPath; - - public Solr() { - this(""); - } - - public Solr(String configSetPath) { - this.configSetPath = configSetPath; - this.solrClient = new LBHttpSolrClient.Builder().withBaseSolrUrls(String.format("http://%s:%d/solr/%s_shard1_replica1", - getDockerHost(), - SOLR_PORT, - AUDIT_LOGS_COLLECTION)).build(); - } - - public void waitUntilSolrIsUp() throws Exception { - try (CloseableHttpClient httpClient = HttpClientBuilder.create().setRetryHandler(new DefaultHttpRequestRetryHandler(0, false)).build()) { - doWithin(60, "Check Solr running", () -> pingSolr(httpClient)); - } - } - - private boolean pingSolr(CloseableHttpClient httpClient) { - try (CloseableHttpResponse response = httpClient.execute(new HttpGet(String.format("http://%s:%d/solr/admin/collections?action=LIST", getDockerHost(), SOLR_PORT)))) { - return response.getStatusLine().getStatusCode() == 200; - } - catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public void add(SolrInputDocument solrInputDocument) { - try { - solrClient.add(solrInputDocument); - } catch (SolrServerException | IOException e) { - throw new RuntimeException(e); - } - } - - public void createSolrCollection(String collectionName) { - LOG.info("Creating collection"); - runCommand(new String[]{"docker", "exec", "docker_solr_1", "solr", "create_collection", "-force", "-c", collectionName, "-d", Paths.get(configSetPath, "configsets", collectionName, "conf").toString(), "-n", collectionName + "_conf"}); - } - - public QueryResponse query(SolrQuery query) { - try { - return solrClient.query(query); - } catch (SolrServerException | IOException e) { - throw new RuntimeException(e); - } - } - - public void commit() { - try { - solrClient.commit(); - } catch (SolrServerException | IOException e) { - throw new RuntimeException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java deleted file mode 100644 index f48e10734a7..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/TestUtil.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import static java.lang.System.currentTimeMillis; - -import java.nio.charset.StandardCharsets; -import java.util.function.BooleanSupplier; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TestUtil { - private static final Logger LOG = LoggerFactory.getLogger(TestUtil.class); - - public static void doWithin(int sec, String actionName, BooleanSupplier predicate) { - doWithin(sec, actionName, () -> { - if (!predicate.getAsBoolean()) - throw new RuntimeException("Predicate was false!"); - }); - } - - public static void doWithin(int sec, String actionName, Runnable runnable) { - long start = currentTimeMillis(); - Exception exception; - while (true) { - try { - runnable.run(); - return; - } - catch (Exception e) { - exception = e; - } - - if (currentTimeMillis() - start > sec * 1000) { - throw new AssertionError(String.format("Unable to perform action '%s' within %d seconds", actionName, sec), exception); - } - else { - LOG.info("Performing action '{}' failed. retrying...", actionName); - } - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RuntimeException(e); - } - } - } - - public static String getDockerHost() { - return System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost"; - } - - public static void runCommand(String[] command) { - try { - LOG.info("Exec command: {}", StringUtils.join(command, " ")); - Process process = Runtime.getRuntime().exec(command); - String stdout = IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8); - LOG.info("Exec command result {}", stdout); - } catch (Exception e) { - throw new RuntimeException("Error during execute shell command: ", e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java deleted file mode 100644 index 3016d67c0e8..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MetricsIT.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics; - -import static java.lang.System.currentTimeMillis; -import static org.apache.ambari.infra.Solr.HADOOP_LOGS_COLLECTION; -import static org.apache.ambari.infra.TestUtil.runCommand; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.net.URL; -import java.util.HashSet; -import java.util.Set; - -import org.apache.ambari.infra.Solr; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MetricsIT { - private static final Logger LOG = LoggerFactory.getLogger(MetricsIT.class); - - private static MockMetricsServer metricsServer; - private static String shellScriptLocation; - - @BeforeClass - public static void setupMetricsServer() throws Exception { - URL location = MetricsIT.class.getProtectionDomain().getCodeSource().getLocation(); - String ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent(); - - // TODO: use the same containers as ambari-infra-manager-it - shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh"; - LOG.info("Creating new docker containers for testing Ambari Infra Solr Metrics plugin ..."); - runCommand(new String[]{shellScriptLocation, "start"}); - - Solr solr = new Solr("/usr/lib/ambari-infra-solr/server/solr"); - solr.waitUntilSolrIsUp(); - solr.createSolrCollection(HADOOP_LOGS_COLLECTION); - - metricsServer = new MockMetricsServer(); - metricsServer.init(); - } - - @AfterClass - public static void tearDown() throws Exception { - LOG.info("shutdown containers"); - runCommand(new String[]{shellScriptLocation, "stop"}); - } - - @Test - public void testAllMetricsArrived() throws Exception { - metricsServer.addExpectedMetrics(EXPECTED_METRICS); - long start = currentTimeMillis(); - while (!metricsServer.getNotReceivedMetrics().isEmpty()) { - Thread.sleep(1000); - if (currentTimeMillis() - start > 30 * 1000) - break; - LOG.info("Checking any metrics arrived..."); - } - - metricsServer.getNotReceivedMetrics().forEach(metric -> LOG.info("Metric not received: {}", metric)); - assertThat(metricsServer.getNotReceivedMetrics().isEmpty(), is(true)); - } - - private static final Set EXPECTED_METRICS = new HashSet() {{ - add("infra.solr.jvm.threads.count"); - add("infra.solr.jvm.threads.deadlock.count"); - add("infra.solr.jvm.memory.heap.used"); - add("infra.solr.jvm.memory.heap.max"); - add("infra.solr.jvm.memory.non-heap.used"); - add("infra.solr.jvm.memory.non-heap.max"); - add("infra.solr.jvm.memory.pools.CMS-Old-Gen.used"); - add("infra.solr.jvm.memory.pools.CMS-Old-Gen.max"); - add("infra.solr.jvm.gc.ConcurrentMarkSweep.count"); - add("infra.solr.jvm.gc.ConcurrentMarkSweep.time"); - add("infra.solr.jvm.gc.ParNew.count"); - add("infra.solr.jvm.gc.ParNew.time"); - add("infra.solr.jvm.memory.pools.Metaspace.used"); - add("infra.solr.jvm.memory.pools.Metaspace.max"); - add("infra.solr.jvm.memory.pools.Par-Eden-Space.used"); - add("infra.solr.jvm.memory.pools.Par-Eden-Space.max"); - add("infra.solr.jvm.memory.pools.Par-Survivor-Space.used"); - add("infra.solr.jvm.memory.pools.Par-Survivor-Space.max"); - add("infra.solr.jvm.os.processCpuLoad"); - add("infra.solr.jvm.os.systemCpuLoad"); - add("infra.solr.jvm.os.openFileDescriptorCount"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.adds"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.deletesById"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.errors"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE.updateHandler.docsPending"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requests"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.avgRequestsPerSecond"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.avgTimePerRequest"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./select.requestTimes.medianRequestTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requests"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.avgRequestsPerSecond"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.avgTimePerRequest"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.UPDATE./update.requestTimes.medianRequestTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requests"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.avgRequestsPerSecond"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.avgTimePerRequest"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./get.requestTimes.medianRequestTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requests"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.avgRequestsPerSecond"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.avgTimePerRequest"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.ADMIN./admin/luke.requestTimes.medianRequestTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requests"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.avgRequestsPerSecond"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.avgTimePerRequest"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.QUERY./query.requestTimes.medianRequestTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.INDEX.sizeInBytes"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.hitratio"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.size"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.filterCache.warmupTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.hitratio"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.size"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.queryResultCache.warmupTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.hitratio"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.size"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.searcher.documentCache.warmupTime"); - add("infra.solr.core.hadoop_logs.shard1.replica_n1.CACHE.core.fieldCache.entries_count"); - }}; -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java deleted file mode 100644 index 9d2734fcb60..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/solr/metrics/MockMetricsServer.java +++ /dev/null @@ -1,75 +0,0 @@ -package org.apache.ambari.infra.solr.metrics; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import static java.util.Collections.singletonList; -import static spark.Spark.get; -import static spark.Spark.port; -import static spark.Spark.post; - -import java.util.Set; -import java.util.concurrent.ConcurrentSkipListSet; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.gson.Gson; - -import spark.Request; -import spark.Response; -import spark.servlet.SparkApplication; - -public class MockMetricsServer implements SparkApplication { - private static final Logger LOG = LoggerFactory.getLogger(MockMetricsServer.class); - private static final String HOST_NAME = "metrics_collector"; - - private Set expectedMetrics; - - @Override - public void init() { - port(6188); - get("/ping", (req, resp) -> "pong"); - get("/ws/v1/timeline/metrics/livenodes", this::queryState); - post("/ws/v1/timeline/metrics", this::logBody); - } - - private Object queryState(Request request, Response response) { - LOG.info("Sending hostname {}", HOST_NAME); - response.type("application/json"); - return new Gson().toJson(singletonList(HOST_NAME)); - } - - private Object logBody(Request req, Response resp) { - String body = req.body(); - LOG.info("Incoming metrics {}", body); - - expectedMetrics.removeIf(body::contains); - - return "OK"; - } - - public void addExpectedMetrics(Set expectedMetrics) { - this.expectedMetrics = new ConcurrentSkipListSet<>(expectedMetrics); - } - - public Set getNotReceivedMetrics() { - return expectedMetrics; - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java deleted file mode 100644 index f219ce5cfab..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/AbstractInfraSteps.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.steps; - -import static org.apache.ambari.infra.Solr.AUDIT_LOGS_COLLECTION; -import static org.apache.ambari.infra.Solr.HADOOP_LOGS_COLLECTION; -import static org.apache.ambari.infra.TestUtil.doWithin; -import static org.apache.ambari.infra.TestUtil.getDockerHost; -import static org.apache.ambari.infra.TestUtil.runCommand; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.time.OffsetDateTime; -import java.util.Date; - -import org.apache.ambari.infra.InfraClient; -import org.apache.ambari.infra.Solr; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocatedFileStatus; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; -import org.apache.solr.common.SolrInputDocument; -import org.jbehave.core.annotations.AfterStories; -import org.jbehave.core.annotations.BeforeStories; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.services.s3.AmazonS3Client; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.amazonaws.services.s3.model.ObjectListing; - -public abstract class AbstractInfraSteps { - private static final Logger LOG = LoggerFactory.getLogger(AbstractInfraSteps.class); - - private static final int INFRA_MANAGER_PORT = 61890; - private static final int FAKE_S3_PORT = 4569; - private static final int HDFS_PORT = 9000; - protected static final String S3_BUCKET_NAME = "testbucket"; - private String ambariFolder; - private String shellScriptLocation; - private String dockerHost; - private AmazonS3Client s3client; - private int documentId = 0; - private Solr solr; - - public InfraClient getInfraClient() { - return new InfraClient(String.format("http://%s:%d/api/v1/jobs", dockerHost, INFRA_MANAGER_PORT)); - } - - public Solr getSolr() { - return solr; - } - - public AmazonS3Client getS3client() { - return s3client; - } - - public String getLocalDataFolder() { - return ambariFolder + "/ambari-infra/ambari-infra-manager/docker/test-out"; - } - - @BeforeStories - public void initDockerContainer() throws Exception { - System.setProperty("HADOOP_USER_NAME", "root"); - - URL location = AbstractInfraSteps.class.getProtectionDomain().getCodeSource().getLocation(); - ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent(); - - LOG.info("Clean local data folder {}", getLocalDataFolder()); - FileUtils.cleanDirectory(new File(getLocalDataFolder())); - - shellScriptLocation = ambariFolder + "/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh"; - LOG.info("Create new docker container for testing Ambari Infra Manager ..."); - runCommand(new String[]{shellScriptLocation, "start"}); - - dockerHost = getDockerHost(); - - solr = new Solr(); - solr.waitUntilSolrIsUp(); - - solr.createSolrCollection(AUDIT_LOGS_COLLECTION); - solr.createSolrCollection(HADOOP_LOGS_COLLECTION); - - LOG.info("Initializing s3 client"); - s3client = new AmazonS3Client(new BasicAWSCredentials("remote-identity", "remote-credential")); - s3client.setEndpoint(String.format("http://%s:%d", dockerHost, FAKE_S3_PORT)); - s3client.createBucket(S3_BUCKET_NAME); - - checkInfraManagerReachable(); - } - - private void checkInfraManagerReachable() throws Exception { - try (InfraClient httpClient = getInfraClient()) { - doWithin(30, "Start Ambari Infra Manager", httpClient::getJobs); - LOG.info("Ambari Infra Manager is up and running"); - } - } - - protected void addDocument(OffsetDateTime logtime) { - SolrInputDocument solrInputDocument = new SolrInputDocument(); - solrInputDocument.addField("logType", "HDFSAudit"); - solrInputDocument.addField("cluster", "cl1"); - solrInputDocument.addField("event_count", 1); - solrInputDocument.addField("repo", "hdfs"); - solrInputDocument.addField("reqUser", "ambari-qa"); - solrInputDocument.addField("type", "hdfs_audit"); - solrInputDocument.addField("seq_num", 9); - solrInputDocument.addField("result", 1); - solrInputDocument.addField("path", "/root/test-logs/hdfs-audit/hdfs-audit.log"); - solrInputDocument.addField("ugi", "ambari-qa (auth:SIMPLE)"); - solrInputDocument.addField("host", "logfeeder.apache.org"); - solrInputDocument.addField("action", "getfileinfo"); - solrInputDocument.addField("log_message", "allowed=true\tugi=ambari-qa (auth:SIMPLE)\tip=/192.168.64.102\tcmd=getfileinfo\tsrc=/ats/active\tdst=null\tperm=null\tproto=rpc\tcallerContext=HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f"); - solrInputDocument.addField("logger_name", "FSNamesystem.audit"); - solrInputDocument.addField("id", Integer.toString(documentId++)); - solrInputDocument.addField("authType", "SIMPLE"); - solrInputDocument.addField("logfile_line_number", 1); - solrInputDocument.addField("cliIP", "/192.168.64.102"); - solrInputDocument.addField("level", "INFO"); - solrInputDocument.addField("resource", "/ats/active"); - solrInputDocument.addField("ip", "172.18.0.2"); - solrInputDocument.addField("evtTime", "2017-12-08T10:23:16.452Z"); - solrInputDocument.addField("req_caller_id", "HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f"); - solrInputDocument.addField("repoType", 1); - solrInputDocument.addField("enforcer", "hadoop-acl"); - solrInputDocument.addField("cliType", "rpc"); - solrInputDocument.addField("message_md5", "-6778765776916226588"); - solrInputDocument.addField("event_md5", "5627261521757462732"); - solrInputDocument.addField("logtime", new Date(logtime.toInstant().toEpochMilli())); - solrInputDocument.addField("_ttl_", "+7DAYS"); - solrInputDocument.addField("_expire_at_", "2017-12-15T10:23:19.106Z"); - solr.add(solrInputDocument); - } - - @AfterStories - public void shutdownContainers() throws Exception { - Thread.sleep(2000); // sync with s3 server - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - ObjectListing objectListing = getS3client().listObjects(listObjectsRequest); - LOG.info("Found {} files on s3.", objectListing.getObjectSummaries().size()); - objectListing.getObjectSummaries().forEach(s3ObjectSummary -> LOG.info("Found file on s3 with key {}", s3ObjectSummary.getKey())); - - LOG.info("Listing files on hdfs."); - try (FileSystem fileSystem = getHdfs()) { - int count = 0; - RemoteIterator it = fileSystem.listFiles(new Path("/test_audit_logs"), true); - while (it.hasNext()) { - LOG.info("Found file on hdfs with name {}", it.next().getPath().getName()); - ++count; - } - LOG.info("{} files found on hfds", count); - } - - LOG.info("shutdown containers"); - runCommand(new String[]{shellScriptLocation, "stop"}); - } - - protected FileSystem getHdfs() throws IOException { - Configuration conf = new Configuration(); - conf.set("fs.defaultFS", String.format("hdfs://%s:%d/", dockerHost, HDFS_PORT)); - return FileSystem.get(conf); - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java b/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java deleted file mode 100644 index d84c23fb69f..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/java/org/apache/ambari/infra/steps/ExportJobsSteps.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.steps; - -import static java.util.Objects.requireNonNull; -import static org.apache.ambari.infra.OffsetDateTimeConverter.SOLR_DATETIME_FORMATTER; -import static org.apache.ambari.infra.TestUtil.doWithin; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasProperty; -import static org.hamcrest.core.IsCollectionContaining.hasItem; -import static org.junit.Assert.assertThat; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.time.Duration; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.infra.InfraClient; -import org.apache.ambari.infra.JobExecutionInfo; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocatedFileStatus; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; -import org.apache.solr.client.solrj.SolrQuery; -import org.jbehave.core.annotations.Given; -import org.jbehave.core.annotations.Then; -import org.jbehave.core.annotations.When; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.s3.AmazonS3Client; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.amazonaws.services.s3.model.ObjectListing; -import com.amazonaws.services.s3.model.ObjectMetadata; - -public class ExportJobsSteps extends AbstractInfraSteps { - private static final Logger LOG = LoggerFactory.getLogger(ExportJobsSteps.class); - - private Map launchedJobs = new HashMap<>(); - - @Given("$count documents in solr") - public void addDocuments(int count) { - OffsetDateTime intervalEnd = OffsetDateTime.now(); - for (int i = 0; i < count; ++i) { - addDocument(intervalEnd.minusMinutes(i % (count / 10))); - } - getSolr().commit(); - } - - @Given("$count documents in solr with logtime from $startLogtime to $endLogtime") - public void addDocuments(long count, OffsetDateTime startLogtime, OffsetDateTime endLogtime) { - Duration duration = Duration.between(startLogtime, endLogtime); - long increment = duration.toNanos() / count; - for (int i = 0; i < count; ++i) - addDocument(startLogtime.plusNanos(increment * i)); - getSolr().commit(); - } - - @Given("a file on s3 with key $key") - public void addFileToS3(String key) throws Exception { - try (ByteArrayInputStream inputStream = new ByteArrayInputStream("anything".getBytes())) { - getS3client().putObject(S3_BUCKET_NAME, key, inputStream, new ObjectMetadata()); - } - } - - @When("start $jobName job") - public void startJob(String jobName) throws Exception { - startJob(jobName, null, 0); - } - - @When("start $jobName job with parameters $parameters after $waitSec seconds") - public void startJob(String jobName, String parameters, int waitSec) throws Exception { - Thread.sleep(waitSec * 1000); - try (InfraClient httpClient = getInfraClient()) { - JobExecutionInfo jobExecutionInfo = httpClient.startJob(jobName, parameters); - LOG.info("Job {} started: {}", jobName, jobExecutionInfo); - launchedJobs.put(jobName, jobExecutionInfo); - } - } - - @When("restart $jobName job within $waitSec seconds") - public void restartJob(String jobName, int waitSec) { - doWithin(waitSec, "Restarting job " + jobName, () -> { - try (InfraClient httpClient = getInfraClient()) { - httpClient.restartJob(jobName, launchedJobs.get(jobName).getJobId()); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @When("stop job $jobName after at least $count file exists in s3 with filename containing text $text within $waitSec seconds") - public void stopJob(String jobName, int count, String text, int waitSec) throws Exception { - AmazonS3Client s3Client = getS3client(); - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) - && fileCountOnS3(text, s3Client, listObjectsRequest) > count); - - try (InfraClient httpClient = getInfraClient()) { - httpClient.stopJob(launchedJobs.get(jobName).getExecutionId()); - } - } - - @When("delete file with key $key from s3") - public void deleteFileFromS3(String key) { - getS3client().deleteObject(S3_BUCKET_NAME, key); - } - - @Then("Check filenames contains the text $text on s3 server after $waitSec seconds") - public void checkS3After(String text, int waitSec) { - AmazonS3Client s3Client = getS3client(); - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) - && !s3Client.listObjects(listObjectsRequest).getObjectSummaries().isEmpty()); - - ObjectListing objectListing = s3Client.listObjects(listObjectsRequest); - assertThat(objectListing.getObjectSummaries(), hasItem(hasProperty("key", containsString(text)))); - } - - @Then("Check $count files exists on s3 server with filenames containing the text $text after $waitSec seconds") - public void checkNumberOfFilesOnS3(long count, String text, int waitSec) { - AmazonS3Client s3Client = getS3client(); - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) - && fileCountOnS3(text, s3Client, listObjectsRequest) == count); - } - - private long fileCountOnS3(String text, AmazonS3Client s3Client, ListObjectsRequest listObjectsRequest) { - return s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream() - .filter(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text)) - .count(); - } - - @Then("Less than $count files exists on s3 server with filenames containing the text $text after $waitSec seconds") - public void checkLessThanFileExistsOnS3(long count, String text, int waitSec) { - AmazonS3Client s3Client = getS3client(); - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - doWithin(waitSec, "check uploaded files to s3", () -> s3Client.doesBucketExist(S3_BUCKET_NAME) && between( - fileCountOnS3(text, s3Client, listObjectsRequest), 1L, count - 1L)); - } - - private boolean between(long count, long from, long to) { - return from <= count && count <= to; - } - - @Then("No file exists on s3 server with filenames containing the text $text") - public void fileNotExistOnS3(String text) { - AmazonS3Client s3Client = getS3client(); - ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(S3_BUCKET_NAME); - assertThat(s3Client.listObjects(listObjectsRequest).getObjectSummaries().stream() - .anyMatch(s3ObjectSummary -> s3ObjectSummary.getKey().contains(text)), is(false)); - } - - @Then("solr contains $count documents between $startLogtime and $endLogtime") - public void documentCount(int count, OffsetDateTime startLogTime, OffsetDateTime endLogTime) { - SolrQuery query = new SolrQuery(); - query.setRows(count * 2); - query.setQuery(String.format("logtime:[\"%s\" TO \"%s\"]", SOLR_DATETIME_FORMATTER.format(startLogTime), SOLR_DATETIME_FORMATTER.format(endLogTime))); - assertThat(getSolr().query(query).getResults().size(), is(count)); - } - - @Then("solr does not contain documents between $startLogtime and $endLogtime after $waitSec seconds") - public void isSolrEmpty(OffsetDateTime startLogTime, OffsetDateTime endLogTime, int waitSec) { - SolrQuery query = new SolrQuery(); - query.setRows(1); - query.setQuery(String.format("logtime:[\"%s\" TO \"%s\"]", SOLR_DATETIME_FORMATTER.format(startLogTime), SOLR_DATETIME_FORMATTER.format(endLogTime))); - doWithin(waitSec, "check solr is empty", () -> isSolrEmpty(query)); - } - - private boolean isSolrEmpty(SolrQuery query) { - return getSolr().query(query).getResults().isEmpty(); - } - - @Then("Check $count files exists on hdfs with filenames containing the text $text in the folder $path after $waitSec seconds") - public void checkNumberOfFilesOnHdfs(int count, String text, String path, int waitSec) throws Exception { - try (FileSystem fileSystem = getHdfs()) { - doWithin(waitSec, "check uploaded files to hdfs", () -> { - try { - int fileCount = 0; - RemoteIterator it = fileSystem.listFiles(new Path(path), true); - while (it.hasNext()) { - if (it.next().getPath().getName().contains(text)) - ++fileCount; - } - return fileCount == count; - } - catch (IOException e) { - throw new UncheckedIOException(e); - } - }); - } - } - - @Then("Check $count files exists on local filesystem with filenames containing the text $text in the folder $path for job $jobName") - public void checkNumberOfFilesOnLocalFilesystem(long count, String text, String path, String jobName) { - File destinationDirectory = new File(getLocalDataFolder(), path.replace("${jobId}", launchedJobs.get(jobName).getJobId())); - LOG.info("Destination directory path: {}", destinationDirectory.getAbsolutePath()); - doWithin(5, "Destination directory exists", destinationDirectory::exists); - - File[] files = requireNonNull(destinationDirectory.listFiles(), - String.format("Path %s is not a directory or an I/O error occurred!", destinationDirectory.getAbsolutePath())); - assertThat(Arrays.stream(files) - .filter(file -> file.getName().contains(text)) - .count(), is(count)); - } -} diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties b/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties deleted file mode 100644 index 956bc6364e9..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/resources/log4j.properties +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story b/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story deleted file mode 100644 index 122a634dcb5..00000000000 --- a/ambari-infra/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story +++ /dev/null @@ -1,67 +0,0 @@ -Scenario: Exporting documents form solr and upload them to s3 using defult configuration - -Given 1000 documents in solr -When start archive_audit_logs job -Then Check filenames contains the text audit_logs on s3 server after 20 seconds - - -Scenario: Exporting 10 documents using writeBlockSize=3 produces 4 files - -Given 10 documents in solr with logtime from 2010-10-09T05:00:00.000Z to 2010-10-09T20:00:00.000Z -When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-10-09T00:00:00.000Z,end=2010-10-11T00:00:00.000Z after 2 seconds -Then Check 4 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-10-09 after 20 seconds -And solr does not contain documents between 2010-10-09T05:00:00.000Z and 2010-10-09T20:00:00.000Z after 5 seconds - - -Scenario: Running archiving job with a bigger start value than end value exports and deletes 0 documents - -Given 10 documents in solr with logtime from 2010-01-01T05:00:00.000Z to 2010-01-04T05:00:00.000Z -When start archive_audit_logs job with parameters writeBlockSize=3,start=2010-01-03T05:00:00.000Z,end=2010-01-02T05:00:00.000Z after 2 seconds -Then No file exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2010-01-0 -And solr contains 10 documents between 2010-01-01T05:00:00.000Z and 2010-01-04T05:00:00.000Z - - -Scenario: Archiving job fails when part of the data is exported. After resolving the issue and restarting the job exports the rest of the data. - -Given 200 documents in solr with logtime from 2011-10-09T05:00:00.000Z to 2011-10-09T20:00:00.000Z -And a file on s3 with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz -When start archive_audit_logs job with parameters writeBlockSize=20,start=2010-11-09T00:00:00.000Z,end=2011-10-11T00:00:00.000Z after 2 seconds -Then Check 3 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds -And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T07:59:59.999Z after 5 seconds -When delete file with key solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz from s3 -And restart archive_audit_logs job within 2 seconds -Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2011-10-09 after 20 seconds -And solr does not contain documents between 2011-10-09T05:00:00.000Z and 2011-10-09T20:00:00.000Z after 5 seconds - - -Scenario: After Deleting job deletes documents from solr no document found in the specified interval - -Given 10 documents in solr with logtime from 2012-10-09T05:00:00.000Z to 2012-10-09T20:00:00.000Z -When start delete_audit_logs job with parameters start=2012-10-09T05:00:00.000Z,end=2012-10-09T20:00:00.000Z after 2 seconds -Then solr does not contain documents between 2012-10-09T05:00:00.000Z and 2012-10-09T20:00:00.000Z after 5 seconds - - -Scenario: Archiving documents to hdfs - -Given 1000 documents in solr with logtime from 2014-01-04T05:00:00.000Z to 2014-01-06T20:00:00.000Z -When start archive_audit_logs job with parameters start=2014-01-04T05:00:00.000Z,end=2014-01-06T20:00:00.000Z,destination=HDFS after 2 seconds -Then Check 7 files exists on hdfs with filenames containing the text audit_logs_-_2014-01-0 in the folder /test_audit_logs after 10 seconds -And solr does not contain documents between 2014-01-04T05:00:00.000Z and 2014-01-06T20:00:00.000Z after 10 seconds - - -Scenario: Archiving documents to local filesystem - -Given 200 documents in solr with logtime from 2014-02-04T05:00:00.000Z to 2014-02-06T20:00:00.000Z -When start archive_audit_logs job with parameters start=2014-02-04T05:00:00.000Z,end=2014-02-06T20:00:00.000Z,destination=LOCAL,localDestinationDirectory=/root/archive after 2 seconds -Then Check 2 files exists on local filesystem with filenames containing the text audit_logs_-_2014-02-0 in the folder audit_logs_${jobId}_2014-02-06T20-00-00.000Z for job archive_audit_logs -And solr does not contain documents between 2014-02-04T05:00:00.000Z and 2014-02-06T20:00:00.000Z after 10 seconds - - -Scenario: Launch Archiving job. Initiate stop and check that part of the data is archived. After restart all data must be extracted. - -Given 200 documents in solr with logtime from 2014-03-09T05:00:00.000Z to 2014-03-09T20:00:00.000Z -When start archive_audit_logs job with parameters writeBlockSize=20,start=2014-03-09T05:00:00.000Z,end=2014-03-09T20:00:00.000Z after 2 seconds -And stop job archive_audit_logs after at least 1 file exists in s3 with filename containing text solr_archive_audit_logs_-_2014-03-09 within 10 seconds -Then Less than 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds -When restart archive_audit_logs job within 10 seconds -Then Check 10 files exists on s3 server with filenames containing the text solr_archive_audit_logs_-_2014-03-09 after 20 seconds diff --git a/ambari-infra/ambari-infra-manager/.gitignore b/ambari-infra/ambari-infra-manager/.gitignore deleted file mode 100644 index 94b38299dda..00000000000 --- a/ambari-infra/ambari-infra-manager/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -out/* -*.pid -Profile -.env -test-out \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/README.md b/ambari-infra/ambari-infra-manager/README.md deleted file mode 100644 index 4e38a69c61e..00000000000 --- a/ambari-infra/ambari-infra-manager/README.md +++ /dev/null @@ -1,117 +0,0 @@ - - -# Ambari Infra Manager - -## Overview - -Ambari Infra Manager is a REST based management application for Ambari Infra services (like Infra Solr). The API is built on top of [Spring Batch](http://docs.spring.io/spring-batch/reference/html/) - -### Architecture -![batch-1](docs/images/batch-1.png) - -### Job execution overview -![batch-2](docs/images/batch-2.png) - -### Job workflow -![batch-3](docs/images/batch-3.png) - -### Step workflow -![batch-4](docs/images/batch-4.png) - -(images originally from [here](http://docs.spring.io/spring-batch/reference/html/)) - -## API documentation - -Infra Manager uses [Swagger](http://swagger.io/), generated yaml file can be downloaded from [here](docs/api/swagger.yaml) - - -## Development guide - -### Adding a new custom job - -As Infra Manager is a Spring based application and using Java configurations, if it is needed to add a new custom Job, the Jobs/Steps/Configurations are need to be on the classpath. Spring beans are registered only in a specific package, so for writing a plugin, all the added Java classes needs to be added inside "org.apache.ambari.infra" package. - -For the plugin it will be needed to add all Spring & Spring batch dependencies. For adding a new Job you will need to define a new Configuration object. There you can define your own jobs/steps/writers/readers/processors, as you can see in that example: -```java -@Configuration -@EnableBatchProcessing -public class MyJobConfig { - - @Inject - private StepBuilderFactory steps; - - @Inject - private JobBuilderFactory jobs; - - - @Bean(name = "dummyStep") - protected Step dummyStep(ItemReader reader, - ItemProcessor processor, - ItemWriter writer) { - return steps.get("dummyStep").listener(new DummyStepListener()). chunk(2) - .reader(reader).processor(processor).writer(writer).build(); - } - - @Bean(name = "dummyJob") - public Job job(@Qualifier("dummyStep") Step dummyStep) { - return jobs.get("dummyJob").listener(new DummyJobListener()).start(dummyStep).build(); - } - -} -``` -As you can see it will require to implement [ItemWriter](https://docs.spring.io/spring-batch/apidocs/org/springframework/batch/item/ItemWriter.html), [ItemReader](http://docs.spring.io/spring-batch/trunk/apidocs/org/springframework/batch/item/ItemReader.html) and [ItemProcessor](https://docs.spring.io/spring-batch/apidocs/org/springframework/batch/item/ItemProcessor.html) - -### Schedule custom jobs - -It can be needed based on business requirements to schedule jobs (e.g. daily) instead of run manually through the REST API. It can be done with adding a custom bean to "org.apache.ambari.infra" package with using [@Scheduled](http://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/scheduling/annotation/Scheduled.html): -```java -@Named -public class MySchedulerObject { - - @Inject - private JobService jobService; // or JobOperator jobOperator if spring-batch-admin manager dependecy is not included - - @Value("${infra-manager.batch.my.param:defaultString}") - private String myParamFromLogSearchProperties; - - @Scheduled(cron = "*/5 * * * * MON-FRI") - public void doSomething() { - // setup job params - jobService.launch(jobName, jobParameters, TimeZone.getDefault()); - } - - @Scheduled(cron = "${infra.manager.my.prop}") - public void doSomethingBasedOnInfraProperty() { - // do something ... - } -} -``` - -You can put your cron expression inside infra-manager.properties file just make it configuratble. -### Build & Run Application -```bash -mvn clean package exec:java -``` - -### Build & Run Application in docker container -```bash -cd docker -./infra-manager-docker.sh -``` \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/build.xml b/ambari-infra/ambari-infra-manager/build.xml deleted file mode 100644 index 6df3767c821..00000000000 --- a/ambari-infra/ambari-infra-manager/build.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/docker/Dockerfile b/ambari-infra/ambari-infra-manager/docker/Dockerfile deleted file mode 100644 index eaefe956953..00000000000 --- a/ambari-infra/ambari-infra-manager/docker/Dockerfile +++ /dev/null @@ -1,52 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM centos:centos6 - -RUN echo root:changeme | chpasswd - -RUN yum clean all -y && yum update -y -RUN yum -y install vim wget rpm-build sudo which telnet tar openssh-server openssh-clients ntp git httpd lsof -RUN rpm -e --nodeps --justdb glibc-common -RUN yum -y install glibc-common - -ENV HOME /root - -#Install JAVA -ENV JAVA_VERSION 8u131 -ENV BUILD_VERSION b11 -RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm -RUN rpm -ivh jdk-8-linux-x64.rpm -ENV JAVA_HOME /usr/java/default/ - -#Install Maven -RUN mkdir -p /opt/maven -WORKDIR /opt/maven -RUN wget http://archive.apache.org/dist/maven/maven-3/3.3.1/binaries/apache-maven-3.3.1-bin.tar.gz -RUN tar -xvzf /opt/maven/apache-maven-3.3.1-bin.tar.gz -RUN rm -rf /opt/maven/apache-maven-3.3.1-bin.tar.gz - -ENV M2_HOME /opt/maven/apache-maven-3.3.1 -ENV MAVEN_OPTS -Xmx2048m -ENV PATH $PATH:$JAVA_HOME/bin:$M2_HOME/bin - -# SSH key -RUN ssh-keygen -f /root/.ssh/id_rsa -t rsa -N '' -RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys -RUN chmod 600 /root/.ssh/authorized_keys -RUN sed -ri 's/UsePAM yes/UsePAM no/g' /etc/ssh/sshd_config - -ADD bin/start.sh /root/start.sh -RUN chmod +x /root/start.sh - -WORKDIR /root -CMD /root/start.sh \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/docker/bin/start.sh b/ambari-infra/ambari-infra-manager/docker/bin/start.sh deleted file mode 100755 index 8c33d32b7e2..00000000000 --- a/ambari-infra/ambari-infra-manager/docker/bin/start.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -export INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n" -touch /root/infra-manager.log -/root/ambari-infra-manager/bin/infraManager.sh start > /root/infra-manager.log -tail -f /root/infra-manager.log - diff --git a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml b/ambari-infra/ambari-infra-manager/docker/docker-compose.yml deleted file mode 100644 index 2369d852b13..00000000000 --- a/ambari-infra/ambari-infra-manager/docker/docker-compose.yml +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - zookeeper: - image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10} - restart: always - hostname: zookeeper - networks: - - infra-network - ports: - - 2181:2181 - environment: - ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=zookeeper:2888:3888 - solr: -# TODO: use infra-solr - image: solr:${SOLR_VERSION:-7.4.0} - restart: always - hostname: solr - ports: - - "8983:8983" - networks: - - infra-network - env_file: - - Profile - entrypoint: - - docker-entrypoint.sh - - solr - - start - - "-f" - - "-c" - - "-z" - - ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION/ambari-logsearch/ambari-logsearch-server/src/main/configsets:/opt/solr/configsets - fakes3: - image: localstack/localstack - hostname: fakes3 - ports: - - "4569:4569" - environment: - - SERVICES=s3:4569 - networks: - infra-network: - aliases: - - testbucket.fakes3 - env_file: - - Profile - namenode: - image: flokkr/hadoop-hdfs-namenode:${HADOOP_VERSION:-3.0.0} - hostname: namenode - ports: - - 9870:9870 - - 9000:9000 - env_file: - - Profile - environment: - ENSURE_NAMENODE_DIR: "/tmp/hadoop-hdfs/dfs/name" - networks: - - infra-network - datanode: - image: flokkr/hadoop-hdfs-datanode:${HADOOP_VERSION:-3.0.0} - links: - - namenode - env_file: - - Profile - networks: - - infra-network - inframanager: - image: ambari-infra-manager:v1.0 - restart: always - hostname: infra-manager.apache.org - networks: - - infra-network - env_file: - - Profile - ports: - - 61890:61890 - - 5007:5007 - environment: - COMPONENT: infra-manager - COMPONENT_LOG: infra-manager - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - DISPLAY: $DOCKERIP:0 - volumes: - - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/target/package:/root/ambari-infra-manager - - $AMBARI_LOCATION/ambari-infra/ambari-infra-manager/docker/test-out:/root/archive -networks: - infra-network: - driver: bridge diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh deleted file mode 100755 index 52719095c04..00000000000 --- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker-compose.sh +++ /dev/null @@ -1,124 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -sdir="`dirname \"$0\"`" -: ${1:?"argument is missing: (start|stop)"} -command="$1" - -function start_containers() { - check_env_files - kill_containers - pushd $sdir/../ - local AMBARI_INFRA_MANAGER_LOCATION=$(pwd) - echo $AMBARI_INFRA_MANAGER_LOCATION - cd $AMBARI_INFRA_MANAGER_LOCATION/docker - echo "Start containers ..." - docker-compose up -d - popd - echo "Containers started" -} - -function check_env_files() { - local count=0; - - check_env_file .env setup_env - count=$((count + $?)); - check_env_file Profile setup_profile - count=$((count + $?)); - - if [[ "$count" -gt 0 ]] - then - echo "Exit" - exit; - fi -} - -function check_env_file() { - if [ -f "$sdir/$1" ]; - then - echo "$1 file exists" - return 0; - else - echo "$1 file does not exist, Creating a new one..." - $2 - echo "$1 file has been created. Check it out before starting Ambari Infra Manager. ($sdir/$1)" - return 1; - fi -} - -function setup_env() { - pushd $sdir/../../ - local AMBARI_LOCATION=$(pwd) - popd - local docker_ip=$(get_docker_ip) - cat << EOF > $sdir/.env -DOCKERIP=$docker_ip -MAVEN_REPOSITORY_LOCATION=$HOME/.m2 -AMBARI_LOCATION=$AMBARI_LOCATION - -ZOOKEEPER_VERSION=3.4.10 -ZOOKEEPER_CONNECTION_STRING=zookeeper:2181 - -SOLR_VERSION=7.4.0 - -HADOOP_VERSION=3.0.0 -EOF -} - -function get_docker_ip() { - local ip=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}') - echo $ip -} - -function setup_profile() { - cat << EOF > $sdir/Profile -AWS_ACCESS_KEY_ID=test -AWS_SECRET_ACCESS_KEY=test -HADOOP_USER_NAME=root - -CORE-SITE.XML_fs.default.name=hdfs://namenode:9000 -CORE-SITE.XML_fs.defaultFS=hdfs://namenode:9000 -HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000 -HDFS-SITE.XML_dfs.replication=1 -EOF -} - -function kill_containers() { - pushd $sdir/../ - local AMBARI_INFRA_MANAGER_LOCATION=$(pwd) - echo "Try to remove containers if exists ..." - echo $AMBARI_INFRA_MANAGER_LOCATION - cd $AMBARI_INFRA_MANAGER_LOCATION/docker - docker-compose rm -f -s inframanager - docker-compose rm -f -s solr - docker-compose rm -f -s zookeeper - docker-compose rm -f -s fakes3 - docker-compose rm -f -s namenode - docker-compose rm -f -s datanode - popd -} - -case $command in - "start") - start_containers - ;; - "stop") - kill_containers - ;; - *) - echo "Available commands: (start|stop)" - ;; -esac diff --git a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh b/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh deleted file mode 100755 index 87d6b8aa795..00000000000 --- a/ambari-infra/ambari-infra-manager/docker/infra-manager-docker.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -sdir="`dirname \"$0\"`" -: ${1:?"argument is missing: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"} -command="$1" - -function build_infra_manager_container() { - pushd $sdir - docker build -t ambari-infra-manager:v1.0 . - popd -} - -function build_infra_manager_project() { - pushd $sdir/../ - mvn clean package -DskipTests - popd -} - -function kill_infra_manager_container() { - echo "Try to remove infra manager container if exists ..." - docker rm -f infra-manager -} - -function start_infra_manager_container() { - echo "Start infra manager container ..." - pushd $sdir/../ - local AMBARI_INFRA_MANAGER_LOCATION=$(pwd) - popd - kill_infra_manager_container - docker run -d --name infra-manager --hostname infra-manager.apache.org \ - -v $AMBARI_INFRA_MANAGER_LOCATION/target/package:/root/ambari-infra-manager -p 61890:61890 -p 5007:5007 \ - ambari-infra-manager:v1.0 - ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' logsearch) - echo "Ambari Infra Manager container started on $ip_address (for Mac OSX route to boot2docker/docker-machine VM address, e.g.: 'sudo route add -net 172.17.0.0/16 192.168.59.103')" - echo "You can follow Log Search logs with 'docker logs -f infra-manager' command" -} - -case $command in - "build-and-run") - build_infra_manager_project - build_infra_manager_container - start_infra_manager_container - ;; - "build") - build_infra_manager_project - start_infra_manager_container - ;; - "build-docker-and-run") - build_infra_manager_container - start_infra_manager_container - ;; - "build-mvn-and-run") - build_infra_manager_project - build_infra_manager_container - ;; - "build-docker-only") - build_infra_manager_container - ;; - "build-mvn-only") - build_infra_manager_project - ;; - "start") - start_infra_manager_container - ;; - "stop") - kill_infra_manager_container - ;; - *) - echo "Available commands: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)" - ;; -esac \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml b/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml deleted file mode 100644 index 6fad22df98e..00000000000 --- a/ambari-infra/ambari-infra-manager/docs/api/swagger.yaml +++ /dev/null @@ -1,784 +0,0 @@ ---- -swagger: "2.0" -info: - description: "Manager component for Ambari Infra" - version: "1.0.0" - title: "Infra Manager REST API" - license: - name: "Apache 2.0" - url: "http://www.apache.org/licenses/LICENSE-2.0.html" -basePath: "/api/v1" -tags: -- name: "jobs" -schemes: -- "http" -- "https" -paths: - /jobs: - get: - tags: - - "jobs" - summary: "Get all jobs" - description: "" - operationId: "getAllJobs" - produces: - - "application/json" - parameters: - - name: "page" - in: "query" - required: false - type: "integer" - default: 0 - format: "int32" - - name: "size" - in: "query" - required: false - type: "integer" - default: 20 - format: "int32" - responses: - 200: - description: "successful operation" - schema: - type: "array" - items: - $ref: "#/definitions/JobInfo" - /jobs/executions: - delete: - tags: - - "jobs" - summary: "Stop all job executions." - description: "" - operationId: "stopAll" - produces: - - "application/json" - parameters: [] - responses: - 200: - description: "successful operation" - schema: - type: "integer" - format: "int32" - /jobs/executions/{jobExecutionId}: - get: - tags: - - "jobs" - summary: "Get job and step details for job execution instance." - description: "" - operationId: "getExecutionInfo" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/JobExecutionDetailsResponse" - delete: - tags: - - "jobs" - summary: "Stop or abandon a running job execution." - description: "" - operationId: "stopOrAbandonJobExecution" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - - name: "operation" - in: "query" - required: true - type: "string" - enum: - - "STOP" - - "ABANDON" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/JobExecutionInfoResponse" - /jobs/executions/{jobExecutionId}/context: - get: - tags: - - "jobs" - summary: "Get execution context for specific job." - description: "" - operationId: "getExecutionContextByJobExecId" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/ExecutionContextResponse" - /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}: - get: - tags: - - "jobs" - summary: "Get step execution details." - description: "" - operationId: "getStepExecution" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - - name: "stepExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/StepExecutionInfoResponse" - /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context: - get: - tags: - - "jobs" - summary: "Get the execution context of step execution." - description: "" - operationId: "getStepExecutionContext" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - - name: "stepExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/StepExecutionContextResponse" - /jobs/executions/{jobExecutionId}/steps/{stepExecutionId}/progress: - get: - tags: - - "jobs" - summary: "Get progress of step execution." - description: "" - operationId: "getStepExecutionProgress" - produces: - - "application/json" - parameters: - - name: "jobExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - - name: "stepExecutionId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/StepExecutionProgressResponse" - /jobs/info/names: - get: - tags: - - "jobs" - summary: "Get all job names" - description: "" - operationId: "getAllJobNames" - produces: - - "application/json" - parameters: [] - responses: - 200: - description: "successful operation" - schema: - type: "array" - uniqueItems: true - items: - type: "string" - /jobs/{jobName}: - post: - tags: - - "jobs" - summary: "Start a new job instance by job name." - description: "" - operationId: "startJob" - produces: - - "application/json" - parameters: - - name: "jobName" - in: "path" - required: true - type: "string" - - name: "params" - in: "query" - required: false - type: "string" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/JobExecutionInfoResponse" - /jobs/{jobName}/executions: - get: - tags: - - "jobs" - summary: "Get the id values of all the running job instances." - description: "" - operationId: "getExecutionIdsByJobName" - produces: - - "application/json" - parameters: - - name: "jobName" - in: "path" - required: true - type: "string" - responses: - 200: - description: "successful operation" - schema: - type: "array" - uniqueItems: true - items: - type: "integer" - format: "int64" - /jobs/{jobName}/info: - get: - tags: - - "jobs" - summary: "Get job details by job name." - description: "" - operationId: "getJobDetails" - produces: - - "application/json" - parameters: - - name: "page" - in: "query" - required: false - type: "integer" - default: 0 - format: "int32" - - name: "size" - in: "query" - required: false - type: "integer" - default: 20 - format: "int32" - - name: "jobName" - in: "path" - required: true - type: "string" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/JobDetailsResponse" - /jobs/{jobName}/{jobInstanceId}/executions: - get: - tags: - - "jobs" - summary: "Get execution for job instance." - description: "" - operationId: "getExecutionsForInstance" - produces: - - "application/json" - parameters: - - name: "jobName" - in: "path" - required: true - type: "string" - - name: "jobInstanceId" - in: "path" - required: true - type: "integer" - format: "int64" - responses: - 200: - description: "successful operation" - schema: - type: "array" - items: - $ref: "#/definitions/JobExecutionInfoResponse" - post: - tags: - - "jobs" - summary: "Restart job instance." - description: "" - operationId: "restartJobInstance" - produces: - - "application/json" - parameters: - - in: "body" - name: "body" - required: false - schema: - $ref: "#/definitions/JobExecutionRestartRequest" - responses: - 200: - description: "successful operation" - schema: - $ref: "#/definitions/JobExecutionInfoResponse" -definitions: - JobExecutionData: - type: "object" - properties: - id: - type: "integer" - format: "int64" - executionContext: - $ref: "#/definitions/ExecutionContext" - jobInstance: - $ref: "#/definitions/JobInstance" - jobId: - type: "integer" - format: "int64" - jobParameters: - $ref: "#/definitions/JobParameters" - failureExceptions: - type: "array" - items: - $ref: "#/definitions/Throwable" - endTime: - type: "string" - format: "date-time" - exitStatus: - $ref: "#/definitions/ExitStatus" - createTime: - type: "string" - format: "date-time" - lastUpdated: - type: "string" - format: "date-time" - jobConfigurationName: - type: "string" - startTime: - type: "string" - format: "date-time" - status: - type: "string" - enum: - - "COMPLETED" - - "STARTING" - - "STARTED" - - "STOPPING" - - "STOPPED" - - "FAILED" - - "ABANDONED" - - "UNKNOWN" - stepExecutionDataList: - type: "array" - items: - $ref: "#/definitions/StepExecutionData" - JobInstance: - type: "object" - properties: - id: - type: "integer" - format: "int64" - version: - type: "integer" - format: "int32" - jobName: - type: "string" - instanceId: - type: "integer" - format: "int64" - StepExecutionData: - type: "object" - properties: - id: - type: "integer" - format: "int64" - jobExecutionId: - type: "integer" - format: "int64" - executionContext: - $ref: "#/definitions/ExecutionContext" - stepName: - type: "string" - terminateOnly: - type: "boolean" - default: false - failureExceptions: - type: "array" - items: - $ref: "#/definitions/Throwable" - endTime: - type: "string" - format: "date-time" - exitStatus: - $ref: "#/definitions/ExitStatus" - lastUpdated: - type: "string" - format: "date-time" - commitCount: - type: "integer" - format: "int32" - readCount: - type: "integer" - format: "int32" - filterCount: - type: "integer" - format: "int32" - writeCount: - type: "integer" - format: "int32" - readSkipCount: - type: "integer" - format: "int32" - writeSkipCount: - type: "integer" - format: "int32" - processSkipCount: - type: "integer" - format: "int32" - rollbackCount: - type: "integer" - format: "int32" - startTime: - type: "string" - format: "date-time" - status: - type: "string" - enum: - - "COMPLETED" - - "STARTING" - - "STARTED" - - "STOPPING" - - "STOPPED" - - "FAILED" - - "ABANDONED" - - "UNKNOWN" - StackTraceElement: - type: "object" - properties: - methodName: - type: "string" - fileName: - type: "string" - lineNumber: - type: "integer" - format: "int32" - className: - type: "string" - nativeMethod: - type: "boolean" - default: false - JobExecutionDetailsResponse: - type: "object" - properties: - jobExecutionInfoResponse: - $ref: "#/definitions/JobExecutionInfoResponse" - stepExecutionInfoList: - type: "array" - items: - $ref: "#/definitions/StepExecutionInfoResponse" - StepExecutionContextResponse: - type: "object" - properties: - executionContextMap: - type: "object" - additionalProperties: - type: "object" - jobExecutionId: - type: "integer" - format: "int64" - stepExecutionId: - type: "integer" - format: "int64" - stepName: - type: "string" - StepExecutionProgress: - type: "object" - properties: - estimatedPercentCompleteMessage: - $ref: "#/definitions/MessageSourceResolvable" - estimatedPercentComplete: - type: "number" - format: "double" - ExitStatus: - type: "object" - properties: - exitCode: - type: "string" - exitDescription: - type: "string" - running: - type: "boolean" - default: false - ExecutionContextResponse: - type: "object" - properties: - jobExecutionId: - type: "integer" - format: "int64" - executionContextMap: - type: "object" - additionalProperties: - type: "object" - StepExecutionHistory: - type: "object" - properties: - stepName: - type: "string" - count: - type: "integer" - format: "int32" - commitCount: - $ref: "#/definitions/CumulativeHistory" - rollbackCount: - $ref: "#/definitions/CumulativeHistory" - readCount: - $ref: "#/definitions/CumulativeHistory" - writeCount: - $ref: "#/definitions/CumulativeHistory" - filterCount: - $ref: "#/definitions/CumulativeHistory" - readSkipCount: - $ref: "#/definitions/CumulativeHistory" - writeSkipCount: - $ref: "#/definitions/CumulativeHistory" - processSkipCount: - $ref: "#/definitions/CumulativeHistory" - duration: - $ref: "#/definitions/CumulativeHistory" - durationPerRead: - $ref: "#/definitions/CumulativeHistory" - TimeZone: - type: "object" - properties: - displayName: - type: "string" - id: - type: "string" - dstsavings: - type: "integer" - format: "int32" - rawOffset: - type: "integer" - format: "int32" - MessageSourceResolvable: - type: "object" - properties: - arguments: - type: "array" - items: - type: "object" - codes: - type: "array" - items: - type: "string" - defaultMessage: - type: "string" - ExecutionContext: - type: "object" - properties: - dirty: - type: "boolean" - default: false - empty: - type: "boolean" - default: false - StepExecutionInfoResponse: - type: "object" - properties: - id: - type: "integer" - format: "int64" - jobExecutionId: - type: "integer" - format: "int64" - jobName: - type: "string" - name: - type: "string" - startDate: - type: "string" - startTime: - type: "string" - duration: - type: "string" - durationMillis: - type: "integer" - format: "int64" - exitCode: - type: "string" - status: - type: "string" - JobExecutionInfoResponse: - type: "object" - properties: - id: - type: "integer" - format: "int64" - stepExecutionCount: - type: "integer" - format: "int32" - jobId: - type: "integer" - format: "int64" - jobName: - type: "string" - startDate: - type: "string" - startTime: - type: "string" - duration: - type: "string" - jobExecutionData: - $ref: "#/definitions/JobExecutionData" - jobParameters: - type: "object" - additionalProperties: - type: "object" - jobParametersString: - type: "string" - restartable: - type: "boolean" - default: false - abandonable: - type: "boolean" - default: false - stoppable: - type: "boolean" - default: false - timeZone: - $ref: "#/definitions/TimeZone" - JobInfo: - type: "object" - properties: - name: - type: "string" - executionCount: - type: "integer" - format: "int32" - launchable: - type: "boolean" - default: false - incrementable: - type: "boolean" - default: false - jobInstanceId: - type: "integer" - format: "int64" - JobExecutionRestartRequest: - type: "object" - properties: - jobName: - type: "string" - jobInstanceId: - type: "integer" - format: "int64" - operation: - type: "string" - enum: - - "RESTART" - Throwable: - type: "object" - properties: - cause: - $ref: "#/definitions/Throwable" - stackTrace: - type: "array" - items: - $ref: "#/definitions/StackTraceElement" - message: - type: "string" - localizedMessage: - type: "string" - suppressed: - type: "array" - items: - $ref: "#/definitions/Throwable" - JobParameters: - type: "object" - properties: - parameters: - type: "object" - additionalProperties: - $ref: "#/definitions/JobParameter" - empty: - type: "boolean" - default: false - CumulativeHistory: - type: "object" - properties: - count: - type: "integer" - format: "int32" - min: - type: "number" - format: "double" - max: - type: "number" - format: "double" - standardDeviation: - type: "number" - format: "double" - mean: - type: "number" - format: "double" - JobInstanceDetailsResponse: - type: "object" - properties: - jobInstance: - $ref: "#/definitions/JobInstance" - jobExecutionInfoResponseList: - type: "array" - items: - $ref: "#/definitions/JobExecutionInfoResponse" - JobParameter: - type: "object" - properties: - identifying: - type: "boolean" - default: false - value: - type: "object" - type: - type: "string" - enum: - - "STRING" - - "DATE" - - "LONG" - - "DOUBLE" - StepExecutionProgressResponse: - type: "object" - properties: - stepExecutionProgress: - $ref: "#/definitions/StepExecutionProgress" - stepExecutionHistory: - $ref: "#/definitions/StepExecutionHistory" - stepExecutionInfoResponse: - $ref: "#/definitions/StepExecutionInfoResponse" - JobDetailsResponse: - type: "object" - properties: - jobInfo: - $ref: "#/definitions/JobInfo" - jobInstanceDetailsResponseList: - type: "array" - items: - $ref: "#/definitions/JobInstanceDetailsResponse" diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-1.png b/ambari-infra/ambari-infra-manager/docs/images/batch-1.png deleted file mode 100644 index d763852cffe..00000000000 Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-1.png and /dev/null differ diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-2.png b/ambari-infra/ambari-infra-manager/docs/images/batch-2.png deleted file mode 100644 index 1de34795e86..00000000000 Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-2.png and /dev/null differ diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-3.png b/ambari-infra/ambari-infra-manager/docs/images/batch-3.png deleted file mode 100644 index 7f1123c7094..00000000000 Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-3.png and /dev/null differ diff --git a/ambari-infra/ambari-infra-manager/docs/images/batch-4.png b/ambari-infra/ambari-infra-manager/docs/images/batch-4.png deleted file mode 100644 index beb610ad94a..00000000000 Binary files a/ambari-infra/ambari-infra-manager/docs/images/batch-4.png and /dev/null differ diff --git a/ambari-infra/ambari-infra-manager/pom.xml b/ambari-infra/ambari-infra-manager/pom.xml deleted file mode 100644 index d25440f9967..00000000000 --- a/ambari-infra/ambari-infra-manager/pom.xml +++ /dev/null @@ -1,465 +0,0 @@ - - - - - ambari-infra - org.apache.ambari - 2.0.0.0-SNAPSHOT - - Ambari Infra Manager - http://maven.apache.org - 4.0.0 - - ambari-infra-manager - - - 4.3.17.RELEASE - 4.2.4.RELEASE - 2.2.0.RELEASE - 2.25.1 - 9.4.11.v20180605 - 3.0.7.RELEASE - 3.8.11.2 - 2.0.2.RELEASE - 1.5.13.RELEASE - 1.5.16 - 0.6.0 - - - - ambari-infra-manager_${project.version} - - - org.apache.maven.plugins - maven-compiler-plugin - 3.0 - - ${jdk.version} - ${jdk.version} - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - - - - java - - - - - org.apache.ambari.infra.InfraManager - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - copy-dependencies - package - - copy-dependencies - - - true - ${basedir}/target/libs - false - false - true - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - package - - - - - - - - - run - - - - - - - - - - junit - junit - test - - - org.easymock - easymock - 3.4 - test - - - org.hamcrest - hamcrest-all - 1.3 - test - - - - org.springframework - spring-beans - ${spring.version} - - - org.springframework - spring-context - ${spring.version} - - - org.springframework - spring-test - ${spring.version} - - - - org.springframework.security - spring-security-web - ${spring.security.version} - - - org.springframework.security - spring-security-core - ${spring.security.version} - - - org.springframework.security - spring-security-config - ${spring.security.version} - - - org.springframework.security - spring-security-ldap - ${spring.security.version} - - - - org.springframework.boot - spring-boot-starter - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-logging - - - - - org.springframework.boot - spring-boot-starter-log4j2 - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-web - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-security - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-actuator - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jetty - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jersey - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-freemarker - ${spring-boot.version} - - - org.springframework.boot - spring-boot-autoconfigure - ${spring-boot.version} - - - org.springframework.boot - spring-boot-configuration-processor - ${spring-boot.version} - - - org.glassfish.jersey.media - jersey-media-json-jettison - ${jersey.version} - - - - org.apache.solr - solr-solrj - ${solr.version} - - - org.apache.solr - solr-core - ${solr.version} - - - * - * - - - - - org.apache.lucene - lucene-core - ${solr.version} - - - org.apache.lucene - lucene-analyzers-common - ${solr.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - javax.servlet - servlet-api - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util - - - com.sun.jersey - jetty-util - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - org.slf4j - slf4j-log4j12 - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-security - - - - - org.apache.hadoop - hadoop-hdfs-client - ${hadoop.version} - - - com.fasterxml.jackson.core - jackson-annotations - - - - - commons-io - commons-io - 2.4 - - - commons-cli - commons-cli - - - commons-codec - commons-codec - - - commons-lang - commons-lang - - - org.springframework.security.kerberos - spring-security-kerberos-core - 1.0.1.RELEASE - - - org.springframework.security.kerberos - spring-security-kerberos-web - 1.0.1.RELEASE - - - org.springframework.security.kerberos - spring-security-kerberos-client - 1.0.1.RELEASE - - - com.thoughtworks.xstream - xstream - 1.4.10 - - - cglib - cglib - 3.2.4 - - - io.swagger - swagger-annotations - ${swagger.version} - - - io.swagger - swagger-core - ${swagger.version} - - - io.swagger - swagger-jersey2-jaxrs - ${swagger.version} - - - javax.ws.rs - jsr311-api - - - - - io.swagger - swagger-models - ${swagger.version} - - - org.webjars - swagger-ui - 2.2.2 - - - org.springframework - spring-context-support - ${spring.version} - - - org.springframework.batch - spring-batch-core - ${spring-batch.version} - - - org.springframework - spring-jdbc - ${spring.version} - - - io.jsonwebtoken - jjwt - ${jjwt.version} - - - org.xerial - sqlite-jdbc - ${sqlite.version} - - - org.springframework.batch - spring-batch-admin-manager - 1.3.1.RELEASE - - - org.slf4j - slf4j-log4j12 - - - - - guava - com.google.guava - 20.0 - - - com.amazonaws - aws-java-sdk-s3 - 1.11.5 - - - org.apache.commons - commons-csv - 1.5 - - - org.springframework.boot - spring-boot-starter-tomcat - ${spring-boot.version} - provided - - - - diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java deleted file mode 100644 index 938cfd0152b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra; - -import org.springframework.boot.Banner; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration; -import org.springframework.boot.autoconfigure.data.rest.RepositoryRestMvcAutoConfiguration; -import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; -import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration; -import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration; -import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.system.ApplicationPidFileWriter; - -@SpringBootApplication( - scanBasePackages = {"org.apache.ambari.infra"}, - exclude = { - RepositoryRestMvcAutoConfiguration.class, - WebMvcAutoConfiguration.class, - BatchAutoConfiguration.class, - SecurityAutoConfiguration.class, - DataSourceAutoConfiguration.class, - SolrAutoConfiguration.class - } -) -public class InfraManager { - - public static void main(String[] args) { - String pidFile = System.getenv("INFRA_MANAGER_PID_FILE") == null ? "infra-manager.pid" : System.getenv("INFRA_MANAGER_PID_FILE"); - new SpringApplicationBuilder(InfraManager.class) - .bannerMode(Banner.Mode.OFF) - .listeners(new ApplicationPidFileWriter(pidFile)) - .web(true) - .run(args); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java deleted file mode 100644 index 4c76742eff7..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerApiDocConfig.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import io.swagger.jaxrs.config.BeanConfig; -import io.swagger.jaxrs.listing.ApiListingResource; -import io.swagger.jaxrs.listing.SwaggerSerializers; -import io.swagger.models.Info; -import io.swagger.models.License; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class InfraManagerApiDocConfig { - - private static final String DESCRIPTION = "Manager component for Ambari Infra"; - private static final String VERSION = "1.0.0"; - private static final String TITLE = "Infra Manager REST API"; - private static final String LICENSE = "Apache 2.0"; - private static final String LICENSE_URL = "http://www.apache.org/licenses/LICENSE-2.0.html"; - private static final String RESOURCE_PACKAGE = "org.apache.ambari.infra.rest"; - private static final String BASE_PATH = "/api/v1"; - - @Bean - public ApiListingResource apiListingResource() { - return new ApiListingResource(); - } - - @Bean - public SwaggerSerializers swaggerSerializers() { - return new SwaggerSerializers(); - } - - @Bean - public BeanConfig swaggerConfig() { - BeanConfig beanConfig = new BeanConfig(); - beanConfig.setSchemes(new String[]{"http", "https"}); - beanConfig.setBasePath(BASE_PATH); - beanConfig.setTitle(TITLE); - beanConfig.setDescription(DESCRIPTION); - beanConfig.setLicense(LICENSE); - beanConfig.setLicenseUrl(LICENSE_URL); - beanConfig.setScan(true); - beanConfig.setVersion(VERSION); - beanConfig.setResourcePackage(RESOURCE_PACKAGE); - - License license = new License(); - license.setName(LICENSE); - license.setUrl(LICENSE_URL); - - Info info = new Info(); - info.setDescription(DESCRIPTION); - info.setTitle(TITLE); - info.setVersion(VERSION); - info.setLicense(license); - beanConfig.setInfo(info); - return beanConfig; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java deleted file mode 100644 index 86059a24a59..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerConfig.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.PropertySource; -import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; - -@Configuration -@ComponentScan("org.apache.ambari.infra") -@PropertySource(value = {"classpath:infra-manager.properties"}) -public class InfraManagerConfig { - - @Bean - public static PropertySourcesPlaceholderConfigurer propertyConfigurer() { - return new PropertySourcesPlaceholderConfigurer(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java deleted file mode 100644 index b5b215e31ee..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerDataConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class InfraManagerDataConfig { - - @Value("${infra-manager.server.data.folder:/opt/ambari-infra-manager/data}") - private String dataFolder; - - public String getDataFolder() { - return dataFolder; - } - - public void setDataFolder(String dataFolder) { - this.dataFolder = dataFolder; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java deleted file mode 100644 index 3a4c00f8656..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerJerseyResourceConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.apache.ambari.infra.rest.JobResource; -import org.glassfish.jersey.jackson.JacksonFeature; -import org.glassfish.jersey.server.ResourceConfig; -import org.glassfish.jersey.servlet.ServletProperties; - -import javax.ws.rs.ApplicationPath; - -@ApplicationPath("/api/v1") -public class InfraManagerJerseyResourceConfig extends ResourceConfig { - - public InfraManagerJerseyResourceConfig() { - packages(JobResource.class.getPackage().getName()); - register(JacksonFeature.class); - property(ServletProperties.FILTER_FORWARD_ON_404, true); - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java deleted file mode 100644 index bb495a20691..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerSchedulingConfig.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; - -@Configuration -public class InfraManagerSchedulingConfig { - @Bean - public TaskScheduler taskScheduler() { - return new ThreadPoolTaskScheduler(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java deleted file mode 100644 index 06aea79b698..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/InfraManagerServletConfig.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.DefaultServlet; -import org.glassfish.jersey.servlet.ServletContainer; -import org.glassfish.jersey.servlet.ServletProperties; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.autoconfigure.web.ServerProperties; -import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory; -import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainer; -import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory; -import org.springframework.boot.web.servlet.ServletRegistrationBean; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import javax.inject.Inject; - -@Configuration -public class InfraManagerServletConfig { - - private static final Integer SESSION_TIMEOUT = 60 * 30; - private static final String INFRA_MANAGER_SESSIONID = "INFRAMANAGER_SESSIONID"; - private static final String INFRA_MANAGER_APPLICATION_NAME = "infra-manager"; - - @Value("${infra-manager.server.port:61890}") - private int port; - - @Inject - private ServerProperties serverProperties; - - @Inject - private InfraManagerDataConfig infraManagerDataConfig; - - - @Bean - public ServletRegistrationBean jerseyServlet() { - ServletRegistrationBean jerseyServletBean = new ServletRegistrationBean(new ServletContainer(), "/api/v1/*"); - jerseyServletBean.addInitParameter(ServletProperties.JAXRS_APPLICATION_CLASS, InfraManagerJerseyResourceConfig.class.getName()); - return jerseyServletBean; - } - - @Bean - public ServletRegistrationBean dataServlet() { - ServletRegistrationBean dataServletBean = new ServletRegistrationBean(new DefaultServlet(), "/files/*"); - dataServletBean.addInitParameter("dirAllowed","true"); - dataServletBean.addInitParameter("pathInfoOnly","true"); - dataServletBean.addInitParameter("resourceBase", infraManagerDataConfig.getDataFolder()); - return dataServletBean; - } - - @Bean - public EmbeddedServletContainerFactory containerFactory() { - final JettyEmbeddedServletContainerFactory jettyEmbeddedServletContainerFactory = new JettyEmbeddedServletContainerFactory() { - @Override - protected JettyEmbeddedServletContainer getJettyEmbeddedServletContainer(Server server) { - return new JettyEmbeddedServletContainer(server); - } - }; - jettyEmbeddedServletContainerFactory.setSessionTimeout(SESSION_TIMEOUT); - serverProperties.getSession().getCookie().setName(INFRA_MANAGER_SESSIONID); - serverProperties.setDisplayName(INFRA_MANAGER_APPLICATION_NAME); - jettyEmbeddedServletContainerFactory.setPort(port); - return jettyEmbeddedServletContainerFactory; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java deleted file mode 100644 index f0cd3cf3ece..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/StaticResourceConfiguration.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf; - -import org.springframework.context.annotation.Configuration; -import org.springframework.web.servlet.config.annotation.EnableWebMvc; -import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; -import org.springframework.web.servlet.config.annotation.ViewControllerRegistry; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; - -@EnableWebMvc -@Configuration -public class StaticResourceConfiguration extends WebMvcConfigurerAdapter { - - private static final String[] CLASSPATH_RESOURCE_LOCATIONS = { - "classpath:/static/", "classpath:/swagger/","classpath:META-INF/resources/webjars/" - }; - - @Override - public void addResourceHandlers(ResourceHandlerRegistry registry) { - registry.addResourceHandler("/**") - .addResourceLocations(CLASSPATH_RESOURCE_LOCATIONS); - } - - @Override - public void addViewControllers(ViewControllerRegistry registry) { - registry.addViewController("/").setViewName( - "forward:/index.html"); - registry.addViewController("/docs").setViewName( - "forward:/swagger.html"); - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java deleted file mode 100644 index 706ed8b4f11..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/batch/InfraManagerBatchConfig.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.batch; - -import org.springframework.batch.admin.service.JdbcSearchableJobExecutionDao; -import org.springframework.batch.admin.service.JdbcSearchableJobInstanceDao; -import org.springframework.batch.admin.service.JdbcSearchableStepExecutionDao; -import org.springframework.batch.admin.service.JobService; -import org.springframework.batch.admin.service.SearchableJobExecutionDao; -import org.springframework.batch.admin.service.SearchableJobInstanceDao; -import org.springframework.batch.admin.service.SearchableStepExecutionDao; -import org.springframework.batch.admin.service.SimpleJobService; -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.launch.JobOperator; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.launch.support.SimpleJobOperator; -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer; -import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.Resource; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DriverManagerDataSource; -import org.springframework.jdbc.datasource.init.DataSourceInitializer; -import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; -import org.springframework.scheduling.annotation.EnableAsync; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.transaction.PlatformTransactionManager; - -import javax.inject.Inject; -import javax.sql.DataSource; - -@Configuration -@EnableBatchProcessing -@EnableScheduling -@EnableAsync -public class InfraManagerBatchConfig { - - @Value("classpath:org/springframework/batch/core/schema-drop-sqlite.sql") - private Resource dropRepositoryTables; - - @Value("classpath:org/springframework/batch/core/schema-sqlite.sql") - private Resource dataRepositorySchema; - - @Value("${infra-manager.batch.db.init:false}") - private boolean dropDatabaseOnStartup; - - @Value("${infra-manager.batch.db.file:/etc/ambari-inra-manager/conf/repository.db}") - private String sqliteDbFileLocation; - - @Value("${infra-manager.batch.db.username}") - private String databaseUsername; - - @Value("${infra-manager.batch.db.password}") - private String databasePassword; - - @Inject - private JobRegistry jobRegistry; - - @Bean - public DataSource dataSource() { - DriverManagerDataSource dataSource = new DriverManagerDataSource(); - dataSource.setDriverClassName("org.sqlite.JDBC"); - dataSource.setUrl("jdbc:sqlite:" + sqliteDbFileLocation); - dataSource.setUsername(databaseUsername); - dataSource.setPassword(databasePassword); - return dataSource; - } - - @Bean - public DataSourceInitializer dataSourceInitializer() { - ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); - if (dropDatabaseOnStartup) { - databasePopulator.addScript(dropRepositoryTables); - databasePopulator.setIgnoreFailedDrops(true); - } - databasePopulator.addScript(dataRepositorySchema); - databasePopulator.setContinueOnError(true); - - DataSourceInitializer initializer = new DataSourceInitializer(); - initializer.setDataSource(dataSource()); - initializer.setDatabasePopulator(databasePopulator); - - return initializer; - } - - @Bean - public ExecutionContextSerializer executionContextSerializer() { - return new Jackson2ExecutionContextStringSerializer(); - } - - @Bean - public JobRepository jobRepository() throws Exception { - JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); - factory.setDataSource(dataSource()); - factory.setTransactionManager(transactionManager()); - factory.setSerializer(executionContextSerializer()); - factory.afterPropertiesSet(); - return factory.getObject(); - } - - @Bean - public PlatformTransactionManager transactionManager() { - return new ResourcelessTransactionManager(); - } - - @Bean(name = "jobLauncher") - public JobLauncher jobLauncher() throws Exception { - SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(jobRepository()); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); - jobLauncher.afterPropertiesSet(); - return jobLauncher; - } - - @Bean - public JobOperator jobOperator() throws Exception { - SimpleJobOperator jobOperator = new SimpleJobOperator(); - jobOperator.setJobExplorer(jobExplorer()); - jobOperator.setJobLauncher(jobLauncher()); - jobOperator.setJobRegistry(jobRegistry); - jobOperator.setJobRepository(jobRepository()); - return jobOperator; - } - - @Bean - public JobExplorer jobExplorer() throws Exception { - JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean(); - factoryBean.setSerializer(executionContextSerializer()); - factoryBean.setDataSource(dataSource()); - factoryBean.afterPropertiesSet(); - return factoryBean.getObject(); - } - - @Bean - public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor() { - JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor(); - jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry); - return jobRegistryBeanPostProcessor; - } - - @Bean - public JdbcTemplate jdbcTemplate() { - return new JdbcTemplate(dataSource()); - } - - @Bean - public SearchableJobInstanceDao searchableJobInstanceDao() { - JdbcSearchableJobInstanceDao dao = new JdbcSearchableJobInstanceDao(); - dao.setJdbcTemplate(jdbcTemplate()); - return dao; - } - - @Bean - public SearchableJobExecutionDao searchableJobExecutionDao() { - JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao(); - dao.setJdbcTemplate(jdbcTemplate()); - dao.setDataSource(dataSource()); - return dao; - } - - @Bean - public SearchableStepExecutionDao searchableStepExecutionDao() { - JdbcSearchableStepExecutionDao dao = new JdbcSearchableStepExecutionDao(); - dao.setDataSource(dataSource()); - dao.setJdbcTemplate(jdbcTemplate()); - return dao; - } - - @Bean - public ExecutionContextDao executionContextDao() { - JdbcExecutionContextDao dao = new JdbcExecutionContextDao(); - dao.setSerializer(executionContextSerializer()); - dao.setJdbcTemplate(jdbcTemplate()); - return dao; - } - - @Bean - public JobService jobService() throws Exception { - return new - SimpleJobService(searchableJobInstanceDao(), searchableJobExecutionDao(), searchableStepExecutionDao(), - jobRepository(), jobLauncher(), jobRegistry, executionContextDao()); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java deleted file mode 100644 index 6d32963ecc3..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/CompositePasswordStore.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.security; - -import java.util.Optional; - -public class CompositePasswordStore implements PasswordStore { - private PasswordStore[] passwordStores; - - public CompositePasswordStore(PasswordStore... passwordStores) { - this.passwordStores = passwordStores; - } - - @Override - public Optional getPassword(String propertyName) { - for (PasswordStore passwordStore : passwordStores) { - Optional optionalPassword = passwordStore.getPassword(propertyName); - if (optionalPassword.isPresent()) - return optionalPassword; - } - return Optional.empty(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java deleted file mode 100644 index 9e1a17f8a06..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/HadoopCredentialStore.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.security; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Optional; - -import static org.apache.commons.lang.StringUtils.isBlank; -import static org.apache.commons.lang3.ArrayUtils.isNotEmpty; - -public class HadoopCredentialStore implements PasswordStore { - private static final Logger LOG = LoggerFactory.getLogger(InfraManagerSecurityConfig.class); - public static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path"; - - private final String credentialStoreProviderPath; - - public HadoopCredentialStore(String credentialStoreProviderPath) { - this.credentialStoreProviderPath = credentialStoreProviderPath; - } - - @Override - public Optional getPassword(String propertyName) { - try { - if (isBlank(credentialStoreProviderPath)) { - return Optional.empty(); - } - - org.apache.hadoop.conf.Configuration config = new org.apache.hadoop.conf.Configuration(); - config.set(CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY, credentialStoreProviderPath); - char[] passwordChars = config.getPassword(propertyName); - return (isNotEmpty(passwordChars)) ? Optional.of(new String(passwordChars)) : Optional.empty(); - } catch (Exception e) { - LOG.warn("Could not load password {} from credential store.", propertyName); - return Optional.empty(); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java deleted file mode 100644 index 45b79b36695..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/InfraManagerSecurityConfig.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.security; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.infra.conf.security.HadoopCredentialStore.CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY; - -@Configuration -public class InfraManagerSecurityConfig { - - @Value("${"+ CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY + ":}") - private String credentialStoreProviderPath; - - - @Bean - public PasswordStore passwords() { - return new CompositePasswordStore(new HadoopCredentialStore(credentialStoreProviderPath), new SecurityEnvironment()); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java deleted file mode 100644 index 19848feac86..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/PasswordStore.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.security; - -import java.util.Optional; - -public interface PasswordStore { - Optional getPassword(String propertyName); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java deleted file mode 100644 index 8e3387b4f51..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/conf/security/SecurityEnvironment.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.conf.security; - -import java.util.Optional; - -public class SecurityEnvironment implements PasswordStore { - @Override - public Optional getPassword(String propertyName) { - return Optional.ofNullable(System.getenv(propertyName)); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java deleted file mode 100644 index e536d9a39e8..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/doc/InfraManagerApiDocStorage.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.doc; - -import io.swagger.jaxrs.config.BeanConfig; -import io.swagger.models.Swagger; -import io.swagger.util.Yaml; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -@Named -public class InfraManagerApiDocStorage { - - private static final Logger LOG = LoggerFactory.getLogger(InfraManagerApiDocStorage.class); - - private final Map swaggerMap = new ConcurrentHashMap<>(); - - @Inject - private BeanConfig beanConfig; - - @PostConstruct - private void postConstruct() { - Thread loadApiDocThread = new Thread("load_swagger_api_doc") { - @Override - public void run() { - LOG.info("Start thread to scan REST API doc from endpoints."); - Swagger swagger = beanConfig.getSwagger(); - beanConfig.configure(swagger); - beanConfig.scanAndRead(); - setSwagger(swagger); - try { - if (swagger != null) { - String yaml = Yaml.mapper().writeValueAsString(swagger); - StringBuilder b = new StringBuilder(); - String[] parts = yaml.split("\n"); - for (String part : parts) { - b.append(part); - b.append("\n"); - } - setSwaggerYaml(b.toString()); - } - } catch (Exception e) { - e.printStackTrace(); - } - LOG.info("Scanning REST API endpoints and generating docs has been successful."); - } - }; - loadApiDocThread.setDaemon(true); - loadApiDocThread.start(); - } - - public Swagger getSwagger() { - return (Swagger) swaggerMap.get("swaggerObject"); - } - - public void setSwagger(final Swagger swagger) { - swaggerMap.put("swaggerObject", swagger); - } - - public void setSwaggerYaml(final String swaggerYaml) { - swaggerMap.put("swaggerYaml", swaggerYaml); - } - - public String getSwaggerYaml() { - return (String) swaggerMap.get("swaggerYaml"); - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java deleted file mode 100644 index 02a688560db..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/AbstractJobsConfiguration.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor; -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.context.event.EventListener; - -import javax.annotation.PostConstruct; -import java.util.Map; - -public abstract class AbstractJobsConfiguration> { - private static final Logger LOG = LoggerFactory.getLogger(AbstractJobsConfiguration.class); - - private final Map propertyMap; - private final JobScheduler scheduler; - private final JobBuilderFactory jobs; - private final JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor; - - protected AbstractJobsConfiguration(Map propertyMap, JobScheduler scheduler, JobBuilderFactory jobs, JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) { - this.propertyMap = propertyMap; - this.scheduler = scheduler; - this.jobs = jobs; - this.jobRegistryBeanPostProcessor = jobRegistryBeanPostProcessor; - } - - @PostConstruct - public void registerJobs() { - if (propertyMap == null) - return; - - propertyMap.keySet().stream() - .filter(key -> propertyMap.get(key).isEnabled()) - .forEach(jobName -> { - try { - propertyMap.get(jobName).validate(jobName); - LOG.info("Registering job {}", jobName); - JobBuilder jobBuilder = jobs.get(jobName).listener(new JobsPropertyMap<>(propertyMap)); - Job job = buildJob(jobBuilder); - jobRegistryBeanPostProcessor.postProcessAfterInitialization(job, jobName); - } - catch (Exception e) { - LOG.warn("Unable to register job " + jobName, e); - propertyMap.get(jobName).setEnabled(false); - } - }); - } - - @EventListener(ApplicationReadyEvent.class) - public void scheduleJobs() { - if (propertyMap == null) - return; - - propertyMap.keySet().stream() - .filter(key -> propertyMap.get(key).isEnabled()) - .forEach(jobName -> propertyMap.get(jobName).scheduling().ifPresent( - schedulingProperties -> scheduler.schedule(jobName, schedulingProperties))); - } - - protected abstract Job buildJob(JobBuilder jobBuilder); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java deleted file mode 100644 index 5fa29b00992..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/CloseableIterator.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import java.util.Iterator; - -public interface CloseableIterator extends Iterator, AutoCloseable { -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java deleted file mode 100644 index 8c16daac0ef..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobConfigurationException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -public class JobConfigurationException extends RuntimeException { - public JobConfigurationException(String message, Exception ex) { - super(message, ex); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java deleted file mode 100644 index eb7f7172897..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepository.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.springframework.batch.core.StepExecution; - -public interface JobContextRepository { - StepExecution getStepExecution(Long jobExecutionId, Long id); - void updateExecutionContext(StepExecution stepExecution); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java deleted file mode 100644 index fbb256f8f86..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobContextRepositoryImpl.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.springframework.batch.admin.service.JobService; -import org.springframework.batch.admin.service.NoSuchStepExecutionException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.repository.JobRepository; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class JobContextRepositoryImpl implements JobContextRepository { - - @Inject - private JobRepository jobRepository; - @Inject - private JobService jobService; - - - @Override - public StepExecution getStepExecution(Long jobExecutionId, Long id) { - try { - return jobService.getStepExecution(jobExecutionId, id); - } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { - throw new RuntimeException(e); - } - } - - @Override - public void updateExecutionContext(StepExecution stepExecution) { - jobRepository.updateExecutionContext(stepExecution); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java deleted file mode 100644 index 79406d017e5..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobProperties.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.batch.core.JobParameters; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.Optional; - -public abstract class JobProperties> { - - private SchedulingProperties scheduling; - private final Class clazz; - private boolean enabled; - - protected JobProperties(Class clazz) { - this.clazz = clazz; - } - - public SchedulingProperties getScheduling() { - return scheduling; - } - - public Optional scheduling() { - if (scheduling != null && scheduling.isEnabled()) - return Optional.of(scheduling); - return Optional.empty(); - } - - public void setScheduling(SchedulingProperties scheduling) { - this.scheduling = scheduling; - } - - public T deepCopy() { - try { - ObjectMapper objectMapper = new ObjectMapper(); - String json = objectMapper.writeValueAsString(this); - return objectMapper.readValue(json, clazz); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public abstract void apply(JobParameters jobParameters); - - public abstract void validate(); - - public void validate(String jobName) { - try { - validate(); - } - catch (Exception ex) { - throw new JobConfigurationException(String.format("Configuration of job %s is invalid: %s!", jobName, ex.getMessage()), ex); - } - } - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java deleted file mode 100644 index 324c0b36a89..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobScheduler.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.apache.ambari.infra.manager.Jobs; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.support.CronTrigger; - -import javax.inject.Inject; -import javax.inject.Named; -import java.time.Duration; -import java.time.OffsetDateTime; - -import static org.apache.ambari.infra.job.archive.FileNameSuffixFormatter.SOLR_DATETIME_FORMATTER; -import static org.apache.commons.lang.StringUtils.isBlank; - -@Named -public class JobScheduler { - private static final Logger LOG = LoggerFactory.getLogger(JobScheduler.class); - - private final TaskScheduler scheduler; - private final Jobs jobs; - - @Inject - public JobScheduler(TaskScheduler scheduler, Jobs jobs) { - this.scheduler = scheduler; - this.jobs = jobs; - } - - public void schedule(String jobName, SchedulingProperties schedulingProperties) { - try { - jobs.lastRun(jobName).ifPresent(this::restartIfFailed); - } catch (NoSuchJobException | NoSuchJobExecutionException e) { - throw new RuntimeException(e); - } - - scheduler.schedule(() -> launchJob(jobName, schedulingProperties.getIntervalEndDelta()), new CronTrigger(schedulingProperties.getCron())); - LOG.info("Job {} scheduled for running. Cron: {}", jobName, schedulingProperties.getCron()); - } - - private void restartIfFailed(JobExecution jobExecution) { - if (jobExecution.getExitStatus() == ExitStatus.FAILED) { - try { - jobs.restart(jobExecution.getId()); - } catch (JobInstanceAlreadyCompleteException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | NoSuchJobExecutionException e) { - throw new RuntimeException(e); - } - } - } - - private void launchJob(String jobName, String endDelta) { - try { - JobParametersBuilder jobParametersBuilder = new JobParametersBuilder(); - if (!isBlank(endDelta)) - jobParametersBuilder.addString("end", SOLR_DATETIME_FORMATTER.format(OffsetDateTime.now().minus(Duration.parse(endDelta)))); - - jobs.launchJob(jobName, jobParametersBuilder.toJobParameters()); - } catch (JobParametersInvalidException | NoSuchJobException | JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException e) { - throw new RuntimeException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java deleted file mode 100644 index 094e7978568..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/JobsPropertyMap.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; - -import java.util.Map; - -public class JobsPropertyMap> implements JobExecutionListener { - - private final Map propertyMap; - - public JobsPropertyMap(Map propertyMap) { - this.propertyMap = propertyMap; - } - - @Override - public void beforeJob(JobExecution jobExecution) { - try { - String jobName = jobExecution.getJobInstance().getJobName(); - T defaultProperties = propertyMap.get(jobName); - if (defaultProperties == null) - throw new UnsupportedOperationException("Properties not found for job " + jobName); - - T properties = defaultProperties.deepCopy(); - properties.apply(jobExecution.getJobParameters()); - properties.validate(jobName); - jobExecution.getExecutionContext().put("jobProperties", properties); - } - catch (UnsupportedOperationException | IllegalArgumentException ex) { - jobExecution.stop(); - jobExecution.setExitStatus(new ExitStatus(ExitStatus.FAILED.getExitCode(), ex.getMessage())); - throw ex; - } - } - - @Override - public void afterJob(JobExecution jobExecution) { - - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java deleted file mode 100644 index 98a1e0d55c1..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/ObjectSource.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -public interface ObjectSource { - CloseableIterator open(T current, int rows); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java deleted file mode 100644 index af81b4faedd..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SchedulingProperties.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -public class SchedulingProperties { - private boolean enabled = false; - private String cron; - private String intervalEndDelta; - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - public String getCron() { - return cron; - } - - public void setCron(String cron) { - this.cron = cron; - } - - public String getIntervalEndDelta() { - return intervalEndDelta; - } - - public void setIntervalEndDelta(String intervalEndDelta) { - this.intervalEndDelta = intervalEndDelta; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java deleted file mode 100644 index 3ac5b058806..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/SolrDAOBase.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job; - -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.UncheckedIOException; - -public abstract class SolrDAOBase { - private static final Logger LOG = LoggerFactory.getLogger(SolrDAOBase.class); - - private final String zooKeeperConnectionString; - private final String defaultCollection; - - protected SolrDAOBase(String zooKeeperConnectionString, String defaultCollection) { - this.zooKeeperConnectionString = zooKeeperConnectionString; - this.defaultCollection = defaultCollection; - } - - protected void delete(String deleteQueryText) { - try (CloudSolrClient client = createClient()) { - try { - LOG.info("Executing solr delete by query {}", deleteQueryText); - client.deleteByQuery(deleteQueryText); - client.commit(); - } catch (Exception e) { - try { - client.rollback(); - } catch (SolrServerException e1) { - LOG.warn("Unable to rollback after solr delete operation failure.", e1); - } - throw new RuntimeException(e); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - protected CloudSolrClient createClient() { - CloudSolrClient client = new CloudSolrClient.Builder().withZkHost(zooKeeperConnectionString).build(); - client.setDefaultCollection(defaultCollection); - return client; - } -} - diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java deleted file mode 100644 index 3df18b69b0f..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/AbstractFileAction.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; - -public abstract class AbstractFileAction implements FileAction { - private static final Logger LOG = LoggerFactory.getLogger(AbstractFileAction.class); - - @Override - public File perform(File inputFile) { - File outputFile = onPerform(inputFile); - if (!inputFile.delete()) - LOG.warn("File {} was not deleted. Exists: {}", inputFile.getAbsolutePath(), inputFile.exists()); - return outputFile; - } - - protected abstract File onPerform(File inputFile); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java deleted file mode 100644 index 99bc6d97d04..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; - -import static java.util.Arrays.asList; - -public class CompositeFileAction implements FileAction { - - private final List actions; - - public CompositeFileAction(FileAction... actions) { - this.actions = new ArrayList<>(asList(actions)); - } - - public void add(FileAction action) { - actions.add(action); - } - - @Override - public File perform(File inputFile) { - File file = inputFile; - for (FileAction action : actions) { - file = action.perform(file); - } - return file; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java deleted file mode 100644 index 5ff9587a89e..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; - -import java.util.HashMap; -import java.util.Map; - -import static java.util.Collections.unmodifiableMap; - -public class Document { - private final Map fieldMap; - - private Document() { - fieldMap = new HashMap<>(); - } - - public Document(Map fieldMap) { - this.fieldMap = unmodifiableMap(fieldMap); - } - - public String get(String key) { - return fieldMap.get(key); - } - - @JsonAnyGetter - public Map getFieldMap() { - return fieldMap; - } - - @JsonAnySetter - private void put(String key, String value) { - fieldMap.put(key, value); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java deleted file mode 100644 index 8358dd08a54..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.conf.InfraManagerDataConfig; -import org.apache.ambari.infra.conf.security.PasswordStore; -import org.apache.ambari.infra.job.AbstractJobsConfiguration; -import org.apache.ambari.infra.job.JobContextRepository; -import org.apache.ambari.infra.job.JobScheduler; -import org.apache.ambari.infra.job.ObjectSource; -import org.apache.hadoop.fs.Path; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.JobScope; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepScope; -import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor; -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import javax.inject.Inject; -import java.io.File; - -import static org.apache.commons.lang.StringUtils.isBlank; - -@Configuration -public class DocumentArchivingConfiguration extends AbstractJobsConfiguration { - private static final Logger LOG = LoggerFactory.getLogger(DocumentArchivingConfiguration.class); - private static final DocumentWiper NOT_DELETE = (firstDocument, lastDocument) -> { }; - - private final StepBuilderFactory steps; - private final Step exportStep; - - @Inject - public DocumentArchivingConfiguration( - DocumentArchivingPropertyMap jobsPropertyMap, - JobScheduler scheduler, - StepBuilderFactory steps, - JobBuilderFactory jobs, - @Qualifier("exportStep") Step exportStep, - JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor) { - super(jobsPropertyMap.getSolrDataArchiving(), scheduler, jobs, jobRegistryBeanPostProcessor); - this.exportStep = exportStep; - this.steps = steps; - } - - @Override - protected Job buildJob(JobBuilder jobBuilder) { - return jobBuilder.start(exportStep).build(); - } - - @Bean - @JobScope - public Step exportStep(DocumentExporter documentExporter) { - return steps.get("export") - .tasklet(documentExporter) - .build(); - } - - @Bean - @StepScope - public DocumentExporter documentExporter(DocumentItemReader documentItemReader, - @Value("#{stepExecution.jobExecution.jobId}") String jobId, - @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties, - InfraManagerDataConfig infraManagerDataConfig, - @Value("#{jobParameters[end]}") String intervalEnd, - DocumentWiper documentWiper, - JobContextRepository jobContextRepository, - PasswordStore passwordStore) { - - File baseDir = new File(infraManagerDataConfig.getDataFolder(), "exporting"); - CompositeFileAction fileAction = new CompositeFileAction(new TarGzCompressor()); - switch (properties.getDestination()) { - case S3: - fileAction.add(new S3Uploader( - properties.s3Properties().orElseThrow(() -> new IllegalStateException("S3 properties are not provided!")), - passwordStore)); - break; - case HDFS: - org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); - conf.set("fs.defaultFS", properties.getHdfsEndpoint()); - fileAction.add(new HdfsUploader(conf, new Path(properties.getHdfsDestinationDirectory()))); - break; - case LOCAL: - baseDir = new File(properties.getLocalDestinationDirectory()); - break; - } - - FileNameSuffixFormatter fileNameSuffixFormatter = FileNameSuffixFormatter.from(properties); - LocalItemWriterListener itemWriterListener = new LocalItemWriterListener(fileAction, documentWiper); - File destinationDirectory = new File( - baseDir, - String.format("%s_%s_%s", - properties.getSolr().getCollection(), - jobId, - isBlank(intervalEnd) ? "" : fileNameSuffixFormatter.format(intervalEnd))); - LOG.info("Destination directory path={}", destinationDirectory); - if (!destinationDirectory.exists()) { - if (!destinationDirectory.mkdirs()) { - LOG.warn("Unable to create directory {}", destinationDirectory); - } - } - - return new DocumentExporter( - documentItemReader, - firstDocument -> new LocalDocumentItemWriter( - outFile(properties.getSolr().getCollection(), destinationDirectory, fileNameSuffixFormatter.format(firstDocument)), itemWriterListener), - properties.getWriteBlockSize(), jobContextRepository); - } - - @Bean - @StepScope - public DocumentWiper documentWiper(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties, - SolrDAO solrDAO) { - if (isBlank(properties.getSolr().getDeleteQueryText())) - return NOT_DELETE; - return solrDAO; - } - - @Bean - @StepScope - public SolrDAO solrDAO(@Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) { - return new SolrDAO(properties.getSolr()); - } - - private File outFile(String collection, File directoryPath, String suffix) { - File file = new File(directoryPath, String.format("%s_-_%s.json", collection, suffix)); - LOG.info("Exporting to temp file {}", file.getAbsolutePath()); - return file; - } - - @Bean - @StepScope - public DocumentItemReader reader(ObjectSource documentSource, - @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentArchivingProperties properties) { - return new DocumentItemReader(documentSource, properties.getReadBlockSize()); - } - - @Bean - @StepScope - public ObjectSource logSource(@Value("#{jobParameters[start]}") String start, - @Value("#{jobParameters[end]}") String end, - SolrDAO solrDAO) { - - return new SolrDocumentSource(solrDAO, start, end); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java deleted file mode 100644 index b26da3656c8..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingProperties.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.JobProperties; -import org.springframework.batch.core.JobParameters; - -import java.util.Optional; - -import static java.util.Objects.requireNonNull; -import static org.apache.ambari.infra.job.archive.ExportDestination.HDFS; -import static org.apache.ambari.infra.job.archive.ExportDestination.LOCAL; -import static org.apache.ambari.infra.job.archive.ExportDestination.S3; -import static org.apache.commons.lang.StringUtils.isBlank; - -public class DocumentArchivingProperties extends JobProperties { - private int readBlockSize; - private int writeBlockSize; - private ExportDestination destination; - private String localDestinationDirectory; - private String fileNameSuffixColumn; - private String fileNameSuffixDateFormat; - private SolrProperties solr; - private String s3AccessFile; - private String s3KeyPrefix; - private String s3BucketName; - private String s3Endpoint; - - private String hdfsEndpoint; - private String hdfsDestinationDirectory; - - public DocumentArchivingProperties() { - super(DocumentArchivingProperties.class); - } - - public int getReadBlockSize() { - return readBlockSize; - } - - public void setReadBlockSize(int readBlockSize) { - this.readBlockSize = readBlockSize; - } - - public int getWriteBlockSize() { - return writeBlockSize; - } - - public void setWriteBlockSize(int writeBlockSize) { - this.writeBlockSize = writeBlockSize; - } - - public ExportDestination getDestination() { - return destination; - } - - public void setDestination(ExportDestination destination) { - this.destination = destination; - } - - public String getLocalDestinationDirectory() { - return localDestinationDirectory; - } - - public void setLocalDestinationDirectory(String localDestinationDirectory) { - this.localDestinationDirectory = localDestinationDirectory; - } - - public String getFileNameSuffixColumn() { - return fileNameSuffixColumn; - } - - public void setFileNameSuffixColumn(String fileNameSuffixColumn) { - this.fileNameSuffixColumn = fileNameSuffixColumn; - } - - public String getFileNameSuffixDateFormat() { - return fileNameSuffixDateFormat; - } - - public void setFileNameSuffixDateFormat(String fileNameSuffixDateFormat) { - this.fileNameSuffixDateFormat = fileNameSuffixDateFormat; - } - - public SolrProperties getSolr() { - return solr; - } - - public void setSolr(SolrProperties query) { - this.solr = query; - } - - public String getS3AccessFile() { - return s3AccessFile; - } - - public void setS3AccessFile(String s3AccessFile) { - this.s3AccessFile = s3AccessFile; - } - - public String getS3KeyPrefix() { - return s3KeyPrefix; - } - - public void setS3KeyPrefix(String s3KeyPrefix) { - this.s3KeyPrefix = s3KeyPrefix; - } - - public String getS3BucketName() { - return s3BucketName; - } - - public void setS3BucketName(String s3BucketName) { - this.s3BucketName = s3BucketName; - } - - public String getS3Endpoint() { - return s3Endpoint; - } - - public void setS3Endpoint(String s3Endpoint) { - this.s3Endpoint = s3Endpoint; - } - - public Optional s3Properties() { - if (isBlank(s3BucketName)) - return Optional.empty(); - - return Optional.of(new S3Properties( - s3AccessFile, - s3KeyPrefix, - s3BucketName, - s3Endpoint)); - } - - public String getHdfsEndpoint() { - return hdfsEndpoint; - } - - public void setHdfsEndpoint(String hdfsEndpoint) { - this.hdfsEndpoint = hdfsEndpoint; - } - - public String getHdfsDestinationDirectory() { - return hdfsDestinationDirectory; - } - - public void setHdfsDestinationDirectory(String hdfsDestinationDirectory) { - this.hdfsDestinationDirectory = hdfsDestinationDirectory; - } - - @Override - public void apply(JobParameters jobParameters) { - readBlockSize = getIntJobParameter(jobParameters, "readBlockSize", readBlockSize); - writeBlockSize = getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize); - destination = ExportDestination.valueOf(jobParameters.getString("destination", destination.name())); - localDestinationDirectory = jobParameters.getString("localDestinationDirectory", localDestinationDirectory); - s3AccessFile = jobParameters.getString("s3AccessFile", s3AccessFile); - s3BucketName = jobParameters.getString("s3BucketName", s3BucketName); - s3KeyPrefix = jobParameters.getString("s3KeyPrefix", s3KeyPrefix); - s3Endpoint = jobParameters.getString("s3Endpoint", s3Endpoint); - hdfsEndpoint = jobParameters.getString("hdfsEndpoint", hdfsEndpoint); - hdfsDestinationDirectory = jobParameters.getString("hdfsDestinationDirectory", hdfsDestinationDirectory); - solr.apply(jobParameters); - } - - private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) { - String valueText = jobParameters.getString(parameterName); - if (isBlank(valueText)) - return defaultValue; - return Integer.parseInt(valueText); - } - - @Override - public void validate() { - if (readBlockSize == 0) - throw new IllegalArgumentException("The property readBlockSize must be greater than 0!"); - - if (writeBlockSize == 0) - throw new IllegalArgumentException("The property writeBlockSize must be greater than 0!"); - - if (isBlank(fileNameSuffixColumn)) { - throw new IllegalArgumentException("The property fileNameSuffixColumn can not be null or empty string!"); - } - - requireNonNull(destination, "The property destination can not be null!"); - switch (destination) { - case LOCAL: - if (isBlank(localDestinationDirectory)) - throw new IllegalArgumentException(String.format( - "The property localDestinationDirectory can not be null or empty string when destination is set to %s!", LOCAL.name())); - break; - - case S3: - s3Properties() - .orElseThrow(() -> new IllegalArgumentException("S3 related properties must be set if the destination is " + S3.name())) - .validate(); - break; - - case HDFS: - if (isBlank(hdfsEndpoint)) - throw new IllegalArgumentException(String.format( - "The property hdfsEndpoint can not be null or empty string when destination is set to %s!", HDFS.name())); - if (isBlank(hdfsDestinationDirectory)) - throw new IllegalArgumentException(String.format( - "The property hdfsDestinationDirectory can not be null or empty string when destination is set to %s!", HDFS.name())); - } - - requireNonNull(solr, "No solr query was specified for archiving job!"); - solr.validate(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java deleted file mode 100644 index a009031e413..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingPropertyMap.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.context.annotation.Configuration; - -import java.util.Map; - -@Configuration -@ConfigurationProperties(prefix = "infra-manager.jobs") -public class DocumentArchivingPropertyMap { - private Map solrDataArchiving; - - public Map getSolrDataArchiving() { - return solrDataArchiving; - } - - public void setSolrDataArchiving(Map solrDataArchiving) { - this.solrDataArchiving = solrDataArchiving; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java deleted file mode 100644 index f647a36ec05..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public interface DocumentDestination { - DocumentItemWriter open(Document firstDocument); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java deleted file mode 100644 index d87fdea72a9..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.JobContextRepository; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.repeat.RepeatStatus; - -public class DocumentExporter implements Tasklet, StepExecutionListener { - - private static final Logger LOG = LoggerFactory.getLogger(DocumentExporter.class); - - private boolean complete = false; - private final ItemStreamReader documentReader; - private final DocumentDestination documentDestination; - private final int writeBlockSize; - private final JobContextRepository jobContextRepository; - - public DocumentExporter(ItemStreamReader documentReader, DocumentDestination documentDestination, int writeBlockSize, JobContextRepository jobContextRepository) { - this.documentReader = documentReader; - this.documentDestination = documentDestination; - this.writeBlockSize = writeBlockSize; - this.jobContextRepository = jobContextRepository; - } - - @Override - public void beforeStep(StepExecution stepExecution) { - - } - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - if (complete) { - return ExitStatus.COMPLETED; - } - else { - return ExitStatus.FAILED; - } - } - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - StepExecution stepExecution = chunkContext.getStepContext().getStepExecution(); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - documentReader.open(executionContext); - - DocumentItemWriter writer = null; - int writtenCount = 0; - try { - Document document; - while ((document = documentReader.read()) != null) { - if (writer != null && writtenCount >= writeBlockSize) { - stepExecution = jobContextRepository.getStepExecution(stepExecution.getJobExecutionId(), stepExecution.getId()); - if (stepExecution.getJobExecution().getStatus() == BatchStatus.STOPPING) { - LOG.info("Received stop signal."); - writer.revert(); - writer = null; - return RepeatStatus.CONTINUABLE; - } - - writer.close(); - writer = null; - writtenCount = 0; - documentReader.update(executionContext); - jobContextRepository.updateExecutionContext(stepExecution); - } - - if (writer == null) - writer = documentDestination.open(document); - - writer.write(document); - ++writtenCount; - } - } - catch (Exception e) { - if (writer != null) { - writer.revert(); - writer = null; - } - throw e; - } - finally { - if (writer != null) - writer.close(); - documentReader.close(); - } - - complete = true; - return RepeatStatus.FINISHED; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java deleted file mode 100644 index 3a6b869b23a..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.CloseableIterator; -import org.apache.ambari.infra.job.ObjectSource; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.context.RepeatContextSupport; -import org.springframework.util.ClassUtils; - -public class DocumentItemReader extends AbstractItemStreamItemReader implements CompletionPolicy { - - public final static String POSITION = "last-read"; - - private final ObjectSource documentSource; - private final int readBlockSize; - - private CloseableIterator documentIterator = null; - private int count = 0; - private boolean eof = false; - private Document current = null; - private Document previous = null; - - public DocumentItemReader(ObjectSource documentSource, int readBlockSize) { - this.documentSource = documentSource; - this.readBlockSize = readBlockSize; - setName(ClassUtils.getShortName(DocumentItemReader.class)); - } - - @Override - public Document read() throws Exception { - if (documentIterator == null) - openStream(); - Document next = getNext(); - if (next == null && count > readBlockSize) { - openStream(); - next = getNext(); - } - eof = next == null; - if (eof && documentIterator != null) - documentIterator.close(); - - previous = current; - current = next; - return current; - } - - private Document getNext() { - ++count; - return documentIterator.next(); - } - - private void openStream() { - closeStream(); - documentIterator = documentSource.open(current, readBlockSize); - count = 0; - } - - private void closeStream() { - if (documentIterator == null) - return; - try { - documentIterator.close(); - } - catch (Exception e) { - throw new RuntimeException(e); - } - documentIterator = null; - } - - @Override - public void open(ExecutionContext executionContext) { - super.open(executionContext); - current = null; - previous = null; - eof = false; - documentIterator = null; - if (!executionContext.containsKey(POSITION)) - return; - - current = (Document) executionContext.get(POSITION); - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (previous != null) - executionContext.put(POSITION, previous); - } - - @Override - public void close() { - closeStream(); - } - - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - return eof; - } - - @Override - public boolean isComplete(RepeatContext context) { - return eof; - } - - @Override - public RepeatContext start(RepeatContext parent) { - return new RepeatContextSupport(parent); - } - - @Override - public void update(RepeatContext context) { - if (eof) - context.setCompleteOnly(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java deleted file mode 100644 index e96f6f10e54..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public interface DocumentItemWriter { - void write(Document document); - void revert(); - void close(); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java deleted file mode 100644 index 2b2a355ca5b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentWiper.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public interface DocumentWiper { - void delete(Document firstDocument, Document lastDocument); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java deleted file mode 100644 index a143e4c546a..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ExportDestination.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public enum ExportDestination { - LOCAL, - HDFS, - S3 -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java deleted file mode 100644 index 26a8c6310fe..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import java.io.File; - -public interface FileAction { - File perform(File inputFile); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java deleted file mode 100644 index f9016e61bdc..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import java.time.OffsetDateTime; -import java.time.format.DateTimeFormatter; - -import static java.util.Objects.requireNonNull; -import static org.apache.ambari.infra.job.archive.SolrDocumentIterator.SOLR_DATE_FORMAT_TEXT; -import static org.apache.commons.lang.StringUtils.isBlank; - -public class FileNameSuffixFormatter { - public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern(SOLR_DATE_FORMAT_TEXT); - - public static FileNameSuffixFormatter from(DocumentArchivingProperties properties) { - return new FileNameSuffixFormatter(properties.getFileNameSuffixColumn(), properties.getFileNameSuffixDateFormat()); - } - - - private final String columnName; - - private final DateTimeFormatter dateFormat; - - public FileNameSuffixFormatter(String columnName, String dateTimeFormat) { - this.columnName = columnName; - dateFormat = isBlank(dateTimeFormat) ? null : DateTimeFormatter.ofPattern(dateTimeFormat); - } - - public String format(Document document) { - requireNonNull(document, "Can not format file name suffix: input document is null!"); - - if (isBlank(document.get(columnName))) - throw new IllegalArgumentException("The specified document does not have a column " + columnName + " or it's value is blank!"); - - return format(document.get(columnName)); - } - - public String format(String value) { - if (isBlank(value)) - throw new IllegalArgumentException("The specified value is blank!"); - - if (dateFormat == null) - return value; - OffsetDateTime date = OffsetDateTime.parse(value, SOLR_DATETIME_FORMATTER); - return date.format(dateFormat); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java deleted file mode 100644 index 0f7b99fcc24..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/HdfsUploader.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; - -import java.io.File; -import java.io.IOException; -import java.io.UncheckedIOException; - -public class HdfsUploader extends AbstractFileAction { - - private final Configuration configuration; - private final Path destinationDirectory; - - public HdfsUploader(Configuration configuration, Path destinationDirectory) { - this.destinationDirectory = destinationDirectory; - this.configuration = configuration; - } - - @Override - protected File onPerform(File inputFile) { - try (FileSystem fileSystem = FileSystem.get(configuration)) { - Path destination = new Path(destinationDirectory, inputFile.getName()); - if (fileSystem.exists(destination)) { - throw new UnsupportedOperationException(String.format("File '%s' already exists!", destination)); - } - - fileSystem.copyFromLocalFile(new Path(inputFile.getAbsolutePath()), destination); - - return inputFile; - } - catch (IOException e) { - throw new UncheckedIOException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java deleted file mode 100644 index 33a67cbb0bf..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/ItemWriterListener.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public interface ItemWriterListener { - void onCompleted(WriteCompletedEvent event); -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java deleted file mode 100644 index 531d2d57b44..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.io.IOUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; - -public class LocalDocumentItemWriter implements DocumentItemWriter { - private static final Logger LOG = LoggerFactory.getLogger(LocalDocumentItemWriter.class); - - private static final ObjectMapper json = new ObjectMapper(); - private static final String ENCODING = "UTF-8"; - - private final File outFile; - private final BufferedWriter bufferedWriter; - private final ItemWriterListener itemWriterListener; - private Document firstDocument = null; - private Document lastDocument = null; - - public LocalDocumentItemWriter(File outFile, ItemWriterListener itemWriterListener) { - this.itemWriterListener = itemWriterListener; - this.outFile = outFile; - try { - this.bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outFile), ENCODING)); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } catch (FileNotFoundException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public void write(Document document) { - try { - bufferedWriter.write(json.writeValueAsString(document)); - bufferedWriter.newLine(); - - if (firstDocument == null) - firstDocument = document; - - lastDocument = document; - } - catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public void revert() { - IOUtils.closeQuietly(bufferedWriter); - if (!outFile.delete()) - LOG.warn("File {} was not deleted. Exists: {}", outFile.getAbsolutePath(), outFile.exists()); - } - - @Override - public void close() { - try { - bufferedWriter.close(); - if (itemWriterListener != null) - itemWriterListener.onCompleted(new WriteCompletedEvent(outFile, firstDocument, lastDocument)); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java deleted file mode 100644 index a24d524cd93..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalItemWriterListener.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public class LocalItemWriterListener implements ItemWriterListener { - private final FileAction fileAction; - private final DocumentWiper documentWiper; - - public LocalItemWriterListener(FileAction fileAction, DocumentWiper documentWiper) { - this.fileAction = fileAction; - this.documentWiper = documentWiper; - } - - - @Override - public void onCompleted(WriteCompletedEvent event) { - fileAction.perform(event.getOutFile()); - documentWiper.delete(event.getFirstDocument(), event.getLastDocument()); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java deleted file mode 100644 index 879b58bc8f2..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsv.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.conf.security.PasswordStore; -import org.apache.commons.csv.CSVParser; -import org.apache.commons.csv.CSVRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.io.UncheckedIOException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Optional; - -import static org.apache.commons.csv.CSVFormat.DEFAULT; - -public class S3AccessCsv implements PasswordStore { - private static final Logger LOG = LoggerFactory.getLogger(S3AccessCsv.class); - - public static S3AccessCsv file(String path) { - try { - return new S3AccessCsv(new FileReader(path)); - } catch (FileNotFoundException e) { - throw new UncheckedIOException(e); - } - } - - private Map passwordMap = new HashMap<>(); - - public S3AccessCsv(Reader reader) { - try (CSVParser csvParser = CSVParser.parse(reader, DEFAULT.withHeader( - S3AccessKeyNames.AccessKeyId.getCsvName(), S3AccessKeyNames.SecretAccessKey.getCsvName()))) { - Iterator iterator = csvParser.iterator(); - if (!iterator.hasNext()) { - throw new S3AccessCsvFormatException("Csv file is empty!"); - } - - CSVRecord record = iterator.next(); - if (record.size() < 2) { - throw new S3AccessCsvFormatException("Csv file contains less than 2 columns!"); - } - - checkColumnExists(record, S3AccessKeyNames.AccessKeyId); - checkColumnExists(record, S3AccessKeyNames.SecretAccessKey); - - if (!iterator.hasNext()) { - throw new S3AccessCsvFormatException("Csv file contains header only!"); - } - - record = iterator.next(); - - Map header = csvParser.getHeaderMap(); - for (S3AccessKeyNames keyNames : S3AccessKeyNames.values()) - passwordMap.put(keyNames.getEnvVariableName(), record.get(header.get(keyNames.getCsvName()))); - } catch (IOException e) { - throw new UncheckedIOException(e); - } catch (S3AccessCsvFormatException e) { - LOG.warn("Unable to parse csv file: {}", e.getMessage()); - } - } - - private void checkColumnExists(CSVRecord record, S3AccessKeyNames s3AccessKeyName) { - if (!s3AccessKeyName.getCsvName().equals(record.get(s3AccessKeyName.getCsvName()))) { - throw new S3AccessCsvFormatException(String.format("Csv file does not contain the required column: '%s'", s3AccessKeyName.getCsvName())); - } - } - - @Override - public Optional getPassword(String propertyName) { - return Optional.ofNullable(passwordMap.get(propertyName)); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java deleted file mode 100644 index ef9d53918fb..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessCsvFormatException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public class S3AccessCsvFormatException extends RuntimeException { - public S3AccessCsvFormatException(String message) { - super(message); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java deleted file mode 100644 index e840d3b329b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3AccessKeyNames.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -public enum S3AccessKeyNames { - AccessKeyId("AWS_ACCESS_KEY_ID", "Access key ID"), - SecretAccessKey("AWS_SECRET_ACCESS_KEY", "Secret access key"); - - private final String envVariableName; - private final String csvName; - - S3AccessKeyNames(String envVariableName, String csvName) { - this.envVariableName = envVariableName; - this.csvName = csvName; - } - - public String getEnvVariableName() { - return envVariableName; - } - - public String getCsvName() { - return csvName; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java deleted file mode 100644 index 59a4469e945..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import static org.apache.commons.lang.StringUtils.isBlank; - -public class S3Properties { - private final String s3AccessFile; - private final String s3KeyPrefix; - private final String s3BucketName; - private final String s3EndPoint; - - public S3Properties(String s3AccessFile, String s3KeyPrefix, String s3BucketName, String s3EndPoint) { - this.s3AccessFile = s3AccessFile; - this.s3KeyPrefix = s3KeyPrefix; - this.s3BucketName = s3BucketName; - this.s3EndPoint = s3EndPoint; - } - - public String getS3KeyPrefix() { - return s3KeyPrefix; - } - - public String getS3BucketName() { - return s3BucketName; - } - - public String getS3EndPoint() { - return s3EndPoint; - } - - public String getS3AccessFile() { - return s3AccessFile; - } - - @Override - public String toString() { - return "S3Properties{" + - "s3AccessFile='" + s3AccessFile + '\'' + - ", s3KeyPrefix='" + s3KeyPrefix + '\'' + - ", s3BucketName='" + s3BucketName + '\'' + - ", s3EndPoint='" + s3EndPoint + '\'' + - '}'; - } - - public void validate() { - if (isBlank(s3BucketName)) - throw new IllegalArgumentException("The property s3BucketName can not be null or empty string!"); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java deleted file mode 100644 index 2536cb5dfac..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java +++ /dev/null @@ -1,74 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.services.s3.AmazonS3Client; -import org.apache.ambari.infra.conf.security.CompositePasswordStore; -import org.apache.ambari.infra.conf.security.PasswordStore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; - -import static org.apache.commons.lang.StringUtils.isBlank; -import static org.apache.commons.lang.StringUtils.isNotBlank; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class S3Uploader extends AbstractFileAction { - - private static final Logger LOG = LoggerFactory.getLogger(S3Uploader.class); - - private final AmazonS3Client client; - private final String keyPrefix; - private final String bucketName; - - public S3Uploader(S3Properties s3Properties, PasswordStore passwordStore) { - LOG.info("Initializing S3 client with " + s3Properties); - - this.keyPrefix = s3Properties.getS3KeyPrefix(); - this.bucketName = s3Properties.getS3BucketName(); - - PasswordStore compositePasswordStore = passwordStore; - if (isNotBlank((s3Properties.getS3AccessFile()))) - compositePasswordStore = new CompositePasswordStore(passwordStore, S3AccessCsv.file(s3Properties.getS3AccessFile())); - - BasicAWSCredentials credentials = new BasicAWSCredentials( - compositePasswordStore.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()) - .orElseThrow(() -> new IllegalArgumentException("Access key Id is not present!")), - compositePasswordStore.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()) - .orElseThrow(() -> new IllegalArgumentException("Secret Access Key is not present!"))); - client = new AmazonS3Client(credentials); - if (!isBlank(s3Properties.getS3EndPoint())) - client.setEndpoint(s3Properties.getS3EndPoint()); -// Note: without pathStyleAccess=true endpoint going to be .: -// client.setS3ClientOptions(S3ClientOptions.builder().setPathStyleAccess(true).build()); - } - - @Override - public File onPerform(File inputFile) { - String key = keyPrefix + inputFile.getName(); - - if (client.doesObjectExist(bucketName, key)) { - throw new UnsupportedOperationException(String.format("Object '%s' already exists in bucket '%s'", key, bucketName)); - } - - client.putObject(bucketName, key, inputFile); - return inputFile; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java deleted file mode 100644 index fba08e74957..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDAO.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.SolrDAOBase; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.UncheckedIOException; - -public class SolrDAO extends SolrDAOBase implements DocumentWiper { - private static final Logger LOG = LoggerFactory.getLogger(SolrDAO.class); - - private final SolrProperties queryProperties; - - public SolrDAO(SolrProperties queryProperties) { - super(queryProperties.getZooKeeperConnectionString(), queryProperties.getCollection()); - this.queryProperties = queryProperties; - } - - @Override - public void delete(Document firstDocument, Document lastDocument) { - delete(new SolrParametrizedString(queryProperties.getDeleteQueryText()) - .set("start", firstDocument.getFieldMap()) - .set("end", lastDocument.getFieldMap()).toString()); - } - - public SolrDocumentIterator query(String start, String end, Document subIntervalFrom, int rows) { - SolrQuery query = queryProperties.toQueryBuilder() - .setInterval(start, end) - .setDocument(subIntervalFrom) - .build(); - query.setRows(rows); - - LOG.info("Executing solr query {}", query.toLocalParamsString()); - - try { - CloudSolrClient client = createClient(); - QueryResponse response = client.query(query); - return new SolrDocumentIterator(response, client); - } catch (SolrServerException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java deleted file mode 100644 index f8d8382a3f4..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.CloseableIterator; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrDocument; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; -import java.util.TimeZone; - -public class SolrDocumentIterator implements CloseableIterator { - - public static final String SOLR_DATE_FORMAT_TEXT = "yyyy-MM-dd'T'HH:mm:ss.SSSX"; - private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat(SOLR_DATE_FORMAT_TEXT); - - static { - SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC")); - } - - private final Iterator documentIterator; - private final CloudSolrClient client; - - - public SolrDocumentIterator(QueryResponse response, CloudSolrClient client) { - documentIterator = response.getResults().iterator(); - this.client = client; - } - - @Override - public Document next() { - if (!documentIterator.hasNext()) - return null; - - SolrDocument document = documentIterator.next(); - HashMap fieldMap = new HashMap<>(); - for (String key : document.getFieldNames()) { - fieldMap.put(key, toString(document.get(key))); - } - - return new Document(fieldMap); - } - - private String toString(Object value) { - if (value == null) { - return null; - } - else if (value instanceof Date) { - return SOLR_DATE_FORMAT.format(value); - } - else { - return value.toString(); - } - } - - @Override - public void close() { - try { - client.close(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public boolean hasNext() { - return documentIterator.hasNext(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java deleted file mode 100644 index 39ddd1e21ad..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.CloseableIterator; -import org.apache.ambari.infra.job.ObjectSource; - -public class SolrDocumentSource implements ObjectSource { - private final SolrDAO solrDAO; - private final String start; - private final String end; - - public SolrDocumentSource(SolrDAO solrDAO, String start, String end) { - this.solrDAO = solrDAO; - this.start = start; - this.end = end; - } - - @Override - public CloseableIterator open(Document current, int rows) { - return solrDAO.query(start, end, current, rows); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java deleted file mode 100644 index 9770982f0ef..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.solr.client.solrj.util.ClientUtils; - -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class SolrParametrizedString { - private static final String PARAMETER_PATTERN = "\\$\\{%s[a-z0-9A-Z]+}"; - private static final Pattern NO_PREFIX_PARAMETER_PATTERN = Pattern.compile(String.format(PARAMETER_PATTERN, "")); - - private final String string; - - public SolrParametrizedString(String string) { - this.string = string; - } - - private Set collectParamNames(Pattern regExPattern) { - Matcher matcher = regExPattern.matcher(string); - Set parameters = new HashSet<>(); - while (matcher.find()) - parameters.add(matcher.group().replace("${", "").replace("}", "")); - return parameters; - } - - @Override - public String toString() { - return string; - } - - public SolrParametrizedString set(Map parameterMap) { - return set(NO_PREFIX_PARAMETER_PATTERN, null, parameterMap); - } - - public SolrParametrizedString set(String prefix, Map parameterMap) { - String dottedPrefix = prefix + "."; - return set(Pattern.compile(String.format(PARAMETER_PATTERN, dottedPrefix)), dottedPrefix, parameterMap); - } - - private SolrParametrizedString set(Pattern regExPattern, String prefix, Map parameterMap) { - String newString = string; - for (String paramName : collectParamNames(regExPattern)) { - String paramSuffix = prefix == null ? paramName : paramName.replace(prefix, ""); - if (parameterMap.get(paramSuffix) != null) - newString = newString.replace(String.format("${%s}", paramName), getValue(parameterMap, paramSuffix)); - } - return new SolrParametrizedString(newString); - } - - private String getValue(Map parameterMap, String paramSuffix) { - String value = parameterMap.get(paramSuffix); - if ("*".equals(value)) - return value; - return ClientUtils.escapeQueryChars(value); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java deleted file mode 100644 index a2a78c24e20..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrProperties.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.springframework.batch.core.JobParameters; - -import java.util.ArrayList; -import java.util.List; - -import static org.apache.commons.lang.StringUtils.isBlank; - -public class SolrProperties { - private String zooKeeperConnectionString; - private String collection; - private String queryText; - private String filterQueryText; - private String[] sortColumn; - private String deleteQueryText; - - public String getZooKeeperConnectionString() { - return zooKeeperConnectionString; - } - - public void setZooKeeperConnectionString(String zooKeeperConnectionString) { - this.zooKeeperConnectionString = zooKeeperConnectionString; - } - - public String getCollection() { - return collection; - } - - public void setCollection(String collection) { - this.collection = collection; - } - - public String getQueryText() { - return queryText; - } - - public void setQueryText(String queryText) { - this.queryText = queryText; - } - - public String getFilterQueryText() { - return filterQueryText; - } - - public void setFilterQueryText(String filterQueryText) { - this.filterQueryText = filterQueryText; - } - - public String[] getSortColumn() { - return sortColumn; - } - - public void setSortColumn(String[] sortColumn) { - this.sortColumn = sortColumn; - } - - public String getDeleteQueryText() { - return deleteQueryText; - } - - public void setDeleteQueryText(String deleteQueryText) { - this.deleteQueryText = deleteQueryText; - } - - public SolrQueryBuilder toQueryBuilder() { - return new SolrQueryBuilder(). - setQueryText(queryText) - .setFilterQueryText(filterQueryText) - .addSort(sortColumn); - } - - public void apply(JobParameters jobParameters) { - zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString); - collection = jobParameters.getString("collection", collection); - queryText = jobParameters.getString("queryText", queryText); - filterQueryText = jobParameters.getString("filterQueryText", filterQueryText); - deleteQueryText = jobParameters.getString("deleteQueryText", deleteQueryText); - - String sortValue; - List sortColumns = new ArrayList<>(); - int i = 0; - while ((sortValue = jobParameters.getString(String.format("sortColumn[%d]", i))) != null) { - sortColumns.add(sortValue); - ++i; - } - - if (sortColumns.size() > 0) - sortColumn = sortColumns.toArray(new String[sortColumns.size()]); - } - - public void validate() { - if (isBlank(zooKeeperConnectionString)) - throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!"); - - if (isBlank(collection)) - throw new IllegalArgumentException("The property collection can not be null or empty string!"); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java deleted file mode 100644 index 0e41169eaa4..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.solr.client.solrj.SolrQuery; - -import java.util.HashMap; -import java.util.Map; - -import static org.apache.commons.lang.StringUtils.isBlank; -import static org.apache.solr.client.solrj.SolrQuery.ORDER.asc; - -public class SolrQueryBuilder { - - private static final String INTERVAL_START = "start"; - private static final String INTERVAL_END = "end"; - private String queryText; - private final Map interval; - private String filterQueryText; - private Document document; - private String[] sortFields; - - public SolrQueryBuilder() { - this.queryText = "*:*"; - interval = new HashMap<>(); - interval.put(INTERVAL_START, "*"); - interval.put(INTERVAL_END, "*"); - } - - public SolrQueryBuilder setQueryText(String queryText) { - this.queryText = queryText; - return this; - } - - public SolrQueryBuilder setInterval(String startValue, String endValue) { - if (isBlank(startValue)) - startValue = "*"; - if (isBlank(endValue)) - endValue = "*"; - this.interval.put(INTERVAL_START, startValue); - this.interval.put(INTERVAL_END, endValue); - return this; - } - - public SolrQueryBuilder setFilterQueryText(String filterQueryText) { - this.filterQueryText = filterQueryText; - return this; - } - - - public SolrQueryBuilder setDocument(Document document) { - this.document = document; - return this; - } - - public SolrQueryBuilder addSort(String... sortBy) { - this.sortFields = sortBy; - return this; - } - - public SolrQuery build() { - SolrQuery solrQuery = new SolrQuery(); - - SolrParametrizedString queryText = new SolrParametrizedString(this.queryText).set(interval); - solrQuery.setQuery(queryText.toString()); - - if (filterQueryText != null) { - SolrParametrizedString filterQuery = new SolrParametrizedString(filterQueryText) - .set(interval); - - if (document != null) { - filterQuery = filterQuery.set(document.getFieldMap()); - solrQuery.setFilterQueries(filterQuery.toString()); - } - } - - if (sortFields != null) { - for (String field : sortFields) - solrQuery.addSort(field, asc); - } - - return solrQuery; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java deleted file mode 100644 index f062879f93e..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.hibernate.validator.constraints.NotBlank; -import org.springframework.batch.core.JobParameters; - -import java.util.ArrayList; -import java.util.List; - -import static org.apache.commons.lang.StringUtils.isBlank; - -public class SolrQueryProperties { - @NotBlank - private String collection; - @NotBlank - private String queryText; - private String filterQueryText; - private String[] sortColumn; - - public String getCollection() { - return collection; - } - - public void setCollection(String collection) { - this.collection = collection; - } - - public String getQueryText() { - return queryText; - } - - public void setQueryText(String queryText) { - this.queryText = queryText; - } - - public String getFilterQueryText() { - return filterQueryText; - } - - public void setFilterQueryText(String filterQueryText) { - this.filterQueryText = filterQueryText; - } - - public String[] getSortColumn() { - return sortColumn; - } - - public void setSortColumn(String[] sortColumn) { - this.sortColumn = sortColumn; - } - - public SolrQueryBuilder toQueryBuilder() { - return new SolrQueryBuilder(). - setQueryText(queryText) - .setFilterQueryText(filterQueryText) - .addSort(sortColumn); - } - - public void apply(JobParameters jobParameters) { - collection = jobParameters.getString("collection", collection); - queryText = jobParameters.getString("queryText", queryText); - filterQueryText = jobParameters.getString("filterQueryText", filterQueryText); - - String sortValue; - List sortColumns = new ArrayList<>(); - int i = 0; - while ((sortValue = jobParameters.getString(String.format("sortColumn[%d]", i))) != null) { - sortColumns.add(sortValue); - ++i; - } - - if (sortColumns.size() > 0) - sortColumn = sortColumns.toArray(new String[sortColumns.size()]); - } - - public void validate() { - if (isBlank(collection)) - throw new IllegalArgumentException("The property collection can not be null or empty string!"); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java deleted file mode 100644 index 8f9d6732de9..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.commons.compress.archivers.tar.TarArchiveEntry; -import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; -import org.apache.commons.io.IOUtils; - -import java.io.*; - -public class TarGzCompressor extends AbstractFileAction { - @Override - public File onPerform(File inputFile) { - File tarGzFile = new File(inputFile.getParent(), inputFile.getName() + ".tar.gz"); - try (TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream( - new GzipCompressorOutputStream(new FileOutputStream(tarGzFile)))) { - TarArchiveEntry archiveEntry = new TarArchiveEntry(inputFile.getName()); - archiveEntry.setSize(inputFile.length()); - tarArchiveOutputStream.putArchiveEntry(archiveEntry); - - try (FileInputStream fileInputStream = new FileInputStream(inputFile)) { - IOUtils.copy(fileInputStream, tarArchiveOutputStream); - } - - tarArchiveOutputStream.closeArchiveEntry(); - } - catch (IOException ex) { - throw new UncheckedIOException(ex); - } - - return tarGzFile; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java deleted file mode 100644 index 49abe22873b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/WriteCompletedEvent.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import java.io.File; - -public class WriteCompletedEvent { - private final File outFile; - private final Document firstDocument; - private final Document lastDocument; - - public WriteCompletedEvent(File outFile, Document firstDocument, Document lastDocument) { - this.outFile = outFile; - this.firstDocument = firstDocument; - this.lastDocument = lastDocument; - } - - public File getOutFile() { - return outFile; - } - - public Document getFirstDocument() { - return firstDocument; - } - - public Document getLastDocument() { - return lastDocument; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java deleted file mode 100644 index 4a68c494238..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingConfiguration.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.deleting; - -import org.apache.ambari.infra.job.AbstractJobsConfiguration; -import org.apache.ambari.infra.job.JobScheduler; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.JobScope; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepScope; -import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor; -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import javax.inject.Inject; - -@Configuration -public class DocumentDeletingConfiguration extends AbstractJobsConfiguration { - - private final StepBuilderFactory steps; - private final Step deleteStep; - - @Inject - public DocumentDeletingConfiguration( - DocumentDeletingPropertyMap documentDeletingPropertyMap, - JobScheduler scheduler, - StepBuilderFactory steps, - JobBuilderFactory jobs, - JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor, - @Qualifier("deleteStep") Step deleteStep) { - super(documentDeletingPropertyMap.getSolrDataDeleting(), scheduler, jobs, jobRegistryBeanPostProcessor); - this.steps = steps; - this.deleteStep = deleteStep; - } - - @Override - protected Job buildJob(JobBuilder jobBuilder) { - return jobBuilder.start(deleteStep).build(); - } - - @Bean - @JobScope - public Step deleteStep(DocumentWiperTasklet tasklet) { - return steps.get("delete") - .tasklet(tasklet) - .build(); - } - - @Bean - @StepScope - public DocumentWiperTasklet documentWiperTasklet( - @Value("#{stepExecution.jobExecution.executionContext.get('jobProperties')}") DocumentDeletingProperties properties, - @Value("#{jobParameters[start]}") String start, - @Value("#{jobParameters[end]}") String end) { - return new DocumentWiperTasklet(properties, start, end); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java deleted file mode 100644 index 63b7dd22975..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingProperties.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.deleting; - -import org.apache.ambari.infra.job.JobProperties; -import org.springframework.batch.core.JobParameters; - -import static org.apache.commons.lang.StringUtils.isBlank; - -public class DocumentDeletingProperties extends JobProperties { - private String zooKeeperConnectionString; - private String collection; - private String filterField; - - public DocumentDeletingProperties() { - super(DocumentDeletingProperties.class); - } - - public String getZooKeeperConnectionString() { - return zooKeeperConnectionString; - } - - public void setZooKeeperConnectionString(String zooKeeperConnectionString) { - this.zooKeeperConnectionString = zooKeeperConnectionString; - } - - public String getCollection() { - return collection; - } - - public void setCollection(String collection) { - this.collection = collection; - } - - public String getFilterField() { - return filterField; - } - - public void setFilterField(String filterField) { - this.filterField = filterField; - } - - @Override - public void apply(JobParameters jobParameters) { - zooKeeperConnectionString = jobParameters.getString("zooKeeperConnectionString", zooKeeperConnectionString); - collection = jobParameters.getString("collection", collection); - filterField = jobParameters.getString("filterField", filterField); - } - - @Override - public void validate() { - if (isBlank(zooKeeperConnectionString)) - throw new IllegalArgumentException("The property zooKeeperConnectionString can not be null or empty string!"); - - if (isBlank(collection)) - throw new IllegalArgumentException("The property collection can not be null or empty string!"); - - if (isBlank(filterField)) - throw new IllegalArgumentException("The property filterField can not be null or empty string!"); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java deleted file mode 100644 index 1dc0caf9e7b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentDeletingPropertyMap.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.deleting; - -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.context.annotation.Configuration; - -import java.util.Map; - -@Configuration -@ConfigurationProperties(prefix = "infra-manager.jobs") -public class DocumentDeletingPropertyMap { - private Map solrDataDeleting; - - public Map getSolrDataDeleting() { - return solrDataDeleting; - } - - public void setSolrDataDeleting(Map solrDataDeleting) { - this.solrDataDeleting = solrDataDeleting; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java deleted file mode 100644 index 463e6e002e7..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/deleting/DocumentWiperTasklet.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.deleting; - -import org.apache.ambari.infra.job.SolrDAOBase; -import org.apache.solr.client.solrj.util.ClientUtils; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; - -public class DocumentWiperTasklet extends SolrDAOBase implements Tasklet { - private final String filterField; - private final String start; - private final String end; - - public DocumentWiperTasklet(DocumentDeletingProperties properties, String start, String end) { - super(properties.getZooKeeperConnectionString(), properties.getCollection()); - this.filterField = properties.getFilterField(); - this.start = start; - this.end = end; - } - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { - delete(String.format("%s:[%s TO %s]", filterField, getValue(start), getValue(end))); - return RepeatStatus.FINISHED; - } - - private String getValue(String value) { - return "*".equals(value) ? value : ClientUtils.escapeQueryChars(value); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java deleted file mode 100644 index a124e4d16fe..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemProcessor.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.dummy; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.item.ItemProcessor; - -public class DummyItemProcessor implements ItemProcessor { - - private static final Logger LOG = LoggerFactory.getLogger(DummyItemProcessor.class); - - @Override - public String process(DummyObject input) throws Exception { - LOG.info("Dummy processing, f1: {}, f2: {}. wait 10 seconds", input.getF1(), input.getF2()); - Thread.sleep(10000); - return String.format("%s, %s", input.getF1(), input.getF2()); - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java deleted file mode 100644 index 89ad0130222..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyItemWriter.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.dummy; - -import org.apache.ambari.infra.conf.InfraManagerDataConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.annotation.BeforeStep; -import org.springframework.batch.item.ItemWriter; - -import javax.inject.Inject; -import java.io.File; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Date; -import java.util.List; - -public class DummyItemWriter implements ItemWriter { - - private static final Logger LOG = LoggerFactory.getLogger(DummyItemWriter.class); - - private StepExecution stepExecution; - - @Inject - private InfraManagerDataConfig infraManagerDataConfig; - - @Override - public void write(List values) throws Exception { - LOG.info("DummyItem writer called (values: {})... wait 1 seconds", values.toString()); - Thread.sleep(1000); - String outputDirectoryLocation = String.format("%s%s%s%s", infraManagerDataConfig.getDataFolder(), File.separator, "dummyOutput-", new Date().getTime()); - Path pathToDirectory = Paths.get(outputDirectoryLocation); - Path pathToFile = Paths.get(String.format("%s%s%s", outputDirectoryLocation, File.separator, "dummyOutput.txt")); - Files.createDirectories(pathToDirectory); - LOG.info("Write location to step execution context..."); - stepExecution.getExecutionContext().put("stepOutputLocation", pathToFile.toAbsolutePath().toString()); - LOG.info("Write location to job execution context..."); - stepExecution.getJobExecution().getExecutionContext().put("jobOutputLocation", pathToFile.toAbsolutePath().toString()); - LOG.info("Write to file: {}", pathToFile.toAbsolutePath()); - Files.write(pathToFile, values.toString().getBytes()); - } - - @BeforeStep - public void saveStepExecution(StepExecution stepExecution) { - this.stepExecution = stepExecution; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java deleted file mode 100644 index a4f53696b78..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobConfiguration.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.job.dummy; - -import javax.inject.Inject; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.file.FlatFileItemReader; -import org.springframework.batch.item.file.LineMapper; -import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper; -import org.springframework.batch.item.file.mapping.DefaultLineMapper; -import org.springframework.batch.item.file.mapping.FieldSetMapper; -import org.springframework.batch.item.file.transform.DelimitedLineTokenizer; -import org.springframework.batch.item.file.transform.LineTokenizer; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; - -@Configuration -public class DummyJobConfiguration { - @Inject - private StepBuilderFactory steps; - - @Inject - private JobBuilderFactory jobs; - - @Bean(name = "dummyStep") - protected Step dummyStep(ItemReader reader, - ItemProcessor processor, - @Qualifier("dummyItemWriter") ItemWriter writer) { - return steps.get("dummyStep").listener(new DummyStepListener()). chunk(2) - .reader(reader).processor(processor).writer(writer).build(); - } - - @Bean(name = "dummyJob") - public Job job(@Qualifier("dummyStep") Step dummyStep) { - return jobs.get("dummyJob").listener(new DummyJobListener()).start(dummyStep).build(); - } - - @Bean - public ItemReader dummyItemReader() { - FlatFileItemReader csvFileReader = new FlatFileItemReader<>(); - csvFileReader.setResource(new ClassPathResource("dummy/dummy.txt")); - csvFileReader.setLinesToSkip(1); - LineMapper lineMapper = dummyLineMapper(); - csvFileReader.setLineMapper(lineMapper); - return csvFileReader; - } - - @Bean - public ItemProcessor dummyItemProcessor() { - return new DummyItemProcessor(); - } - - @Bean(name = "dummyItemWriter") - public ItemWriter dummyItemWriter() { - return new DummyItemWriter(); - } - - private LineMapper dummyLineMapper() { - DefaultLineMapper lineMapper = new DefaultLineMapper<>(); - - LineTokenizer dummyTokenizer = dummyTokenizer(); - lineMapper.setLineTokenizer(dummyTokenizer); - - FieldSetMapper dummyFieldSetMapper = dummyFieldSetMapper(); - lineMapper.setFieldSetMapper(dummyFieldSetMapper); - - return lineMapper; - } - - private FieldSetMapper dummyFieldSetMapper() { - BeanWrapperFieldSetMapper studentInformationMapper = new BeanWrapperFieldSetMapper<>(); - studentInformationMapper.setTargetType(DummyObject.class); - return studentInformationMapper; - } - - private LineTokenizer dummyTokenizer() { - DelimitedLineTokenizer studentLineTokenizer = new DelimitedLineTokenizer(); - studentLineTokenizer.setDelimiter(","); - studentLineTokenizer.setNames(new String[]{"f1", "f2"}); - return studentLineTokenizer; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java deleted file mode 100644 index 99c50e804e9..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyJobListener.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.dummy; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; - -public class DummyJobListener implements JobExecutionListener { - - private static final Logger LOG = LoggerFactory.getLogger(DummyJobListener.class); - - @Override - public void beforeJob(JobExecution jobExecution) { - LOG.info("Dummy - before job execution"); - } - - @Override - public void afterJob(JobExecution jobExecution) { - LOG.info("Dummy - after job execution"); - if (jobExecution.getExecutionContext().get("jobOutputLocation") != null) { - String jobOutputLocation = (String) jobExecution.getExecutionContext().get("jobOutputLocation"); - String exitDescription = "file://" + jobOutputLocation; - LOG.info("Add exit description '{}'", exitDescription); - jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode(), exitDescription)); - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java deleted file mode 100644 index ce087dd15cf..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyObject.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.dummy; - -public class DummyObject { - private String f1; - private String f2; - - public String getF1() { - return f1; - } - - public void setF1(String f1) { - this.f1 = f1; - } - - public String getF2() { - return f2; - } - - public void setF2(String f2) { - this.f2 = f2; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java deleted file mode 100644 index 548e6504eaf..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/dummy/DummyStepListener.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.dummy; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; - -public class DummyStepListener implements StepExecutionListener { - - private static final Logger LOG = LoggerFactory.getLogger(DummyStepListener.class); - - @Override - public void beforeStep(StepExecution stepExecution) { - LOG.info("Dummy step - before step execution"); - } - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - LOG.info("Dummy step - after step execution"); - return stepExecution.getExitStatus(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java deleted file mode 100644 index f35387d468f..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.manager; - -import com.google.common.collect.Lists; -import org.apache.ambari.infra.model.ExecutionContextResponse; -import org.apache.ambari.infra.model.JobDetailsResponse; -import org.apache.ambari.infra.model.JobExecutionDetailsResponse; -import org.apache.ambari.infra.model.JobExecutionInfoResponse; -import org.apache.ambari.infra.model.JobInstanceDetailsResponse; -import org.apache.ambari.infra.model.JobOperationParams; -import org.apache.ambari.infra.model.StepExecutionContextResponse; -import org.apache.ambari.infra.model.StepExecutionInfoResponse; -import org.apache.ambari.infra.model.StepExecutionProgressResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.admin.history.StepExecutionHistory; -import org.springframework.batch.admin.service.JobService; -import org.springframework.batch.admin.service.NoSuchStepExecutionException; -import org.springframework.batch.admin.web.JobInfo; -import org.springframework.batch.admin.web.StepExecutionProgress; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobExecutionNotRunningException; -import org.springframework.batch.core.launch.JobOperator; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; - -import javax.inject.Inject; -import javax.inject.Named; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.TimeZone; - -@Named -public class JobManager implements Jobs { - - private static final Logger LOG = LoggerFactory.getLogger(JobManager.class); - - @Inject - private JobService jobService; - - @Inject - private JobOperator jobOperator; - - @Inject - private JobExplorer jobExplorer; - - private TimeZone timeZone = TimeZone.getDefault(); - - public Set getAllJobNames() { - return jobOperator.getJobNames(); - } - - /** - * Launch a new job instance (based on job name) and applies customized parameters to it. - * Also add a new date parameter to make sure the job instance will be unique - */ - @Override - public JobExecutionInfoResponse launchJob(String jobName, JobParameters jobParameters) - throws JobParametersInvalidException, NoSuchJobException, - JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { - - Set running = jobExplorer.findRunningJobExecutions(jobName); - if (!running.isEmpty()) - throw new JobExecutionAlreadyRunningException("An instance of this job is already active: "+jobName); - - return new JobExecutionInfoResponse(jobService.launch(jobName, jobParameters), timeZone); - } - - @Override - public void restart(Long jobExecutionId) - throws JobInstanceAlreadyCompleteException, NoSuchJobException, JobExecutionAlreadyRunningException, - JobParametersInvalidException, JobRestartException, NoSuchJobExecutionException { - jobService.restart(jobExecutionId); - } - - @Override - public Optional lastRun(String jobName) throws NoSuchJobException { - return jobService.listJobExecutionsForJob(jobName, 0, 1).stream().findFirst(); - } - - /** - * Get all executions ids that mapped to specific job name, - */ - public Set getExecutionIdsByJobName(String jobName) throws NoSuchJobException { - return jobOperator.getRunningExecutions(jobName); - } - - /** - * Stop all running job executions and returns with the number of stopped jobs. - */ - public Integer stopAllJobs() { - return jobService.stopAll(); - } - - /** - * Gather job execution details by job execution id. - */ - public JobExecutionDetailsResponse getExecutionInfo(Long jobExecutionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = jobService.getJobExecution(jobExecutionId); - List stepExecutionInfoList = new ArrayList<>(); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - stepExecutionInfoList.add(new StepExecutionInfoResponse(stepExecution, timeZone)); - } - stepExecutionInfoList.sort(Comparator.comparing(StepExecutionInfoResponse::getId)); - return new JobExecutionDetailsResponse(new JobExecutionInfoResponse(jobExecution, timeZone), stepExecutionInfoList); - } - - /** - * Stop or abandon a running job execution by job execution id - */ - public JobExecutionInfoResponse stopOrAbandonJobByExecutionId(Long jobExecutionId, JobOperationParams.JobStopOrAbandonOperationParam operation) - throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException { - JobExecution jobExecution; - if (JobOperationParams.JobStopOrAbandonOperationParam.STOP.equals(operation)) { - jobExecution = jobService.stop(jobExecutionId); - } else if (JobOperationParams.JobStopOrAbandonOperationParam.ABANDON.equals(operation)) { - jobExecution = jobService.abandon(jobExecutionId); - } else { - throw new UnsupportedOperationException("Unsupported operaration"); - } - LOG.info("Job {} was marked {}", jobExecution.getJobInstance().getJobName(), operation.name()); - return new JobExecutionInfoResponse(jobExecution, timeZone); - } - - /** - * Get execution context for a job execution instance. (context can be shipped between job executions) - */ - public ExecutionContextResponse getExecutionContextByJobExecutionId(Long executionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = jobService.getJobExecution(executionId); - Map executionMap = new HashMap<>(); - for (Map.Entry entry : jobExecution.getExecutionContext().entrySet()) { - executionMap.put(entry.getKey(), entry.getValue()); - } - return new ExecutionContextResponse(executionId, executionMap); - } - - /** - * Restart a specific job instance with the same parameters. (only restart operation is supported here) - */ - public JobExecutionInfoResponse restart(Long jobInstanceId, String jobName, - JobOperationParams.JobRestartOperationParam operation) throws NoSuchJobException, JobParametersInvalidException, - JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobExecutionException { - if (JobOperationParams.JobRestartOperationParam.RESTART.equals(operation)) { - Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstanceId); - JobExecution jobExecution = jobExecutions.iterator().next(); - Long jobExecutionId = jobExecution.getId(); - return new JobExecutionInfoResponse(jobService.restart(jobExecutionId), timeZone); - } else { - throw new UnsupportedOperationException("Unsupported operation (try: RESTART)"); - } - } - - /** - * Get all job details. (paged) - */ - public List getAllJobs(int start, int pageSize) { - List jobs = new ArrayList<>(); - Collection names = jobService.listJobs(start, pageSize); - for (String name : names) { - int count = 0; - try { - count = jobService.countJobExecutionsForJob(name); - } - catch (NoSuchJobException e) { - // shouldn't happen - } - boolean launchable = jobService.isLaunchable(name); - boolean incrementable = jobService.isIncrementable(name); - jobs.add(new JobInfo(name, count, null, launchable, incrementable)); - } - return jobs; - } - - /** - * Get all executions for unique job instance. - */ - public List getExecutionsForJobInstance(String jobName, Long jobInstanceId) throws NoSuchJobInstanceException, NoSuchJobException { - List result = Lists.newArrayList(); - JobInstance jobInstance = jobService.getJobInstance(jobInstanceId); - Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getInstanceId()); - for (JobExecution jobExecution : jobExecutions) { - result.add(new JobExecutionInfoResponse(jobExecution, timeZone)); - } - return result; - } - - /** - * Get job details for a specific job. (paged) - */ - public JobDetailsResponse getJobDetails(String jobName, int page, int size) throws NoSuchJobException { - List jobInstanceResponses = Lists.newArrayList(); - Collection jobInstances = jobService.listJobInstances(jobName, page, size); - - int count = jobService.countJobExecutionsForJob(jobName); - boolean launchable = jobService.isLaunchable(jobName); - boolean isIncrementable = jobService.isIncrementable(jobName); - - for (JobInstance jobInstance: jobInstances) { - List executionInfos = Lists.newArrayList(); - Collection jobExecutions = jobService.getJobExecutionsForJobInstance(jobName, jobInstance.getId()); - if (jobExecutions != null) { - for (JobExecution jobExecution : jobExecutions) { - executionInfos.add(new JobExecutionInfoResponse(jobExecution, timeZone)); - } - } - jobInstanceResponses.add(new JobInstanceDetailsResponse(jobInstance, executionInfos)); - } - return new JobDetailsResponse(new JobInfo(jobName, count, launchable, isIncrementable), jobInstanceResponses); - } - - /** - * Get step execution details based for job execution id and step execution id. - */ - public StepExecutionInfoResponse getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); - return new StepExecutionInfoResponse(stepExecution, timeZone); - } - - /** - * Get step execution context details. (execution context can be shipped between steps) - */ - public StepExecutionContextResponse getStepExecutionContext(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); - Map executionMap = new HashMap<>(); - for (Map.Entry entry : stepExecution.getExecutionContext().entrySet()) { - executionMap.put(entry.getKey(), entry.getValue()); - } - return new StepExecutionContextResponse(executionMap, jobExecutionId, stepExecutionId, stepExecution.getStepName()); - } - - /** - * Get step execution progress status detauls. - */ - public StepExecutionProgressResponse getStepExecutionProgress(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); - StepExecutionInfoResponse stepExecutionInfoResponse = new StepExecutionInfoResponse(stepExecution, timeZone); - String stepName = stepExecution.getStepName(); - if (stepName.contains(":partition")) { - stepName = stepName.replaceAll("(:partition).*", "$1*"); - } - String jobName = stepExecution.getJobExecution().getJobInstance().getJobName(); - StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName); - StepExecutionProgress stepExecutionProgress = new StepExecutionProgress(stepExecution, stepExecutionHistory); - - return new StepExecutionProgressResponse(stepExecutionProgress, stepExecutionHistory, stepExecutionInfoResponse); - - } - - private StepExecutionHistory computeHistory(String jobName, String stepName) { - int total = jobService.countStepExecutionsForStep(jobName, stepName); - StepExecutionHistory stepExecutionHistory = new StepExecutionHistory(stepName); - for (int i = 0; i < total; i += 1000) { - for (StepExecution stepExecution : jobService.listStepExecutionsForStep(jobName, stepName, i, 1000)) { - stepExecutionHistory.append(stepExecution); - } - } - return stepExecutionHistory; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java deleted file mode 100644 index b2ca605b61b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/Jobs.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.manager; - -import org.apache.ambari.infra.model.JobExecutionInfoResponse; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; - -import java.util.Optional; - -public interface Jobs { - JobExecutionInfoResponse launchJob(String jobName, JobParameters params) - throws JobParametersInvalidException, NoSuchJobException, - JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException; - void restart(Long jobExecutionId) - throws JobInstanceAlreadyCompleteException, NoSuchJobException, JobExecutionAlreadyRunningException, - JobParametersInvalidException, JobRestartException, NoSuchJobExecutionException; - - Optional lastRun(String jobName) throws NoSuchJobException, NoSuchJobExecutionException; -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java deleted file mode 100644 index 2d46c5475c2..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/ExecutionContextResponse.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import java.util.Map; - -public class ExecutionContextResponse { - - private final Long jobExecutionId; - private final Map executionContextMap; - - public ExecutionContextResponse(Long jobExecutionId, Map executionContextMap) { - this.jobExecutionId = jobExecutionId; - this.executionContextMap = executionContextMap; - } - - public Long getJobExecutionId() { - return jobExecutionId; - } - - public Map getExecutionContextMap() { - return executionContextMap; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java deleted file mode 100644 index cd34fefdbc3..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobDetailsResponse.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import org.springframework.batch.admin.web.JobInfo; - -import java.util.List; - -public class JobDetailsResponse { - - private JobInfo jobInfo; - private List jobInstanceDetailsResponseList; - - public JobDetailsResponse() { - } - - public JobDetailsResponse(JobInfo jobInfo, List jobInstanceDetailsResponseList) { - this.jobInfo = jobInfo; - this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList; - } - - public JobInfo getJobInfo() { - return jobInfo; - } - - public void setJobInfo(JobInfo jobInfo) { - this.jobInfo = jobInfo; - } - - public List getJobInstanceDetailsResponseList() { - return jobInstanceDetailsResponseList; - } - - public void setJobInstanceDetailsResponseList(List jobInstanceDetailsResponseList) { - this.jobInstanceDetailsResponseList = jobInstanceDetailsResponseList; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java deleted file mode 100644 index 695b57f0bc7..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionDetailsResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import java.util.List; - -public class JobExecutionDetailsResponse { - - private JobExecutionInfoResponse jobExecutionInfoResponse; - - private List stepExecutionInfoList; - - public JobExecutionDetailsResponse(JobExecutionInfoResponse jobExecutionInfoResponse, List stepExecutionInfoList) { - this.jobExecutionInfoResponse = jobExecutionInfoResponse; - this.stepExecutionInfoList = stepExecutionInfoList; - } - - public JobExecutionInfoResponse getJobExecutionInfoResponse() { - return jobExecutionInfoResponse; - } - - public void setJobExecutionInfoResponse(JobExecutionInfoResponse jobExecutionInfoResponse) { - this.jobExecutionInfoResponse = jobExecutionInfoResponse; - } - - public List getStepExecutionInfoList() { - return stepExecutionInfoList; - } - - public void setStepExecutionInfoList(List stepExecutionInfoList) { - this.stepExecutionInfoList = stepExecutionInfoList; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java deleted file mode 100644 index a7e4a4f0c44..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionInfoResponse.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import org.apache.ambari.infra.model.wrapper.JobExecutionData; -import org.springframework.batch.admin.web.JobParametersExtractor; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.converter.DefaultJobParametersConverter; -import org.springframework.batch.core.converter.JobParametersConverter; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Properties; -import java.util.TimeZone; - -public class JobExecutionInfoResponse { - private Long id; - private int stepExecutionCount; - private Long jobId; - private String jobName; - private String startDate = ""; - private String startTime = ""; - private String duration = ""; - private JobExecutionData jobExecutionData; - private Properties jobParameters; - private String jobParametersString; - private boolean restartable = false; - private boolean abandonable = false; - private boolean stoppable = false; - private final TimeZone timeZone; - - - public JobExecutionInfoResponse(JobExecution jobExecution, TimeZone timeZone) { - JobParametersConverter converter = new DefaultJobParametersConverter(); - this.jobExecutionData = new JobExecutionData(jobExecution); - this.timeZone = timeZone; - this.id = jobExecutionData.getId(); - this.jobId = jobExecutionData.getJobId(); - this.stepExecutionCount = jobExecutionData.getStepExecutions().size(); - this.jobParameters = converter.getProperties(jobExecutionData.getJobParameters()); - this.jobParametersString = (new JobParametersExtractor()).fromJobParameters(jobExecutionData.getJobParameters()); - JobInstance jobInstance = jobExecutionData.getJobInstance(); - if(jobInstance != null) { - this.jobName = jobInstance.getJobName(); - BatchStatus endTime = jobExecutionData.getStatus(); - this.restartable = endTime.isGreaterThan(BatchStatus.STOPPING) && endTime.isLessThan(BatchStatus.ABANDONED); - this.abandonable = endTime.isGreaterThan(BatchStatus.STARTED) && endTime != BatchStatus.ABANDONED; - this.stoppable = endTime.isLessThan(BatchStatus.STOPPING); - } else { - this.jobName = "?"; - } - - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); - SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss"); - SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss"); - - durationFormat.setTimeZone(TimeZone.getTimeZone("GMT")); - timeFormat.setTimeZone(timeZone); - dateFormat.setTimeZone(timeZone); - if(jobExecutionData.getStartTime() != null) { - this.startDate = dateFormat.format(jobExecutionData.getStartTime()); - this.startTime = timeFormat.format(jobExecutionData.getStartTime()); - Date endTime1 = jobExecutionData.getEndTime() != null? jobExecutionData.getEndTime():new Date(); - this.duration = durationFormat.format(new Date(endTime1.getTime() - jobExecutionData.getStartTime().getTime())); - } - } - - public Long getId() { - return id; - } - - public int getStepExecutionCount() { - return stepExecutionCount; - } - - public Long getJobId() { - return jobId; - } - - public String getJobName() { - return jobName; - } - - public String getStartDate() { - return startDate; - } - - public String getStartTime() { - return startTime; - } - - public String getDuration() { - return duration; - } - - public JobExecutionData getJobExecutionData() { - return jobExecutionData; - } - - public Properties getJobParameters() { - return jobParameters; - } - - public String getJobParametersString() { - return jobParametersString; - } - - public boolean isRestartable() { - return restartable; - } - - public boolean isAbandonable() { - return abandonable; - } - - public boolean isStoppable() { - return stoppable; - } - - public TimeZone getTimeZone() { - return timeZone; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java deleted file mode 100644 index b4c20e9ffba..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRequest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.ws.rs.PathParam; - -public class JobExecutionRequest { - - @PathParam("jobName") - private String jobName; - - @PathParam("jobInstanceId") - private Long jobInstanceId; - - public String getJobName() { - return jobName; - } - - public Long getJobInstanceId() { - return jobInstanceId; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } - - public void setJobInstanceId(Long jobInstanceId) { - this.jobInstanceId = jobInstanceId; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java deleted file mode 100644 index 3eab25f4e42..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionRestartRequest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.validation.constraints.NotNull; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; - -public class JobExecutionRestartRequest { - - @PathParam("jobName") - @NotNull - private String jobName; - - @PathParam("jobInstanceId") - @NotNull - private Long jobInstanceId; - - @QueryParam("operation") - @NotNull - private JobOperationParams.JobRestartOperationParam operation; - - public String getJobName() { - return jobName; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } - - public Long getJobInstanceId() { - return jobInstanceId; - } - - public void setJobExecutionId(Long jobExecutionId) { - this.jobInstanceId = jobExecutionId; - } - - public JobOperationParams.JobRestartOperationParam getOperation() { - return operation; - } - - public void setOperation(JobOperationParams.JobRestartOperationParam operation) { - this.operation = operation; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java deleted file mode 100644 index b176f125fae..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobExecutionStopRequest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.validation.constraints.NotNull; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; - -public class JobExecutionStopRequest { - - @PathParam("jobExecutionId") - @NotNull - private Long jobExecutionId; - - @QueryParam("operation") - @NotNull - private JobOperationParams.JobStopOrAbandonOperationParam operation; - - public Long getJobExecutionId() { - return jobExecutionId; - } - - public void setJobExecutionId(Long jobExecutionId) { - this.jobExecutionId = jobExecutionId; - } - - public JobOperationParams.JobStopOrAbandonOperationParam getOperation() { - return operation; - } - - public void setOperation(JobOperationParams.JobStopOrAbandonOperationParam operation) { - this.operation = operation; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java deleted file mode 100644 index af886545d9d..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceDetailsResponse.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import org.springframework.batch.core.JobInstance; - -import java.util.List; - -public class JobInstanceDetailsResponse { - - private JobInstance jobInstance; - - private List jobExecutionInfoResponseList; - - public JobInstanceDetailsResponse() { - } - - public JobInstanceDetailsResponse(JobInstance jobInstance, List jobExecutionInfoResponseList) { - this.jobInstance = jobInstance; - this.jobExecutionInfoResponseList = jobExecutionInfoResponseList; - } - - public JobInstance getJobInstance() { - return jobInstance; - } - - public void setJobInstance(JobInstance jobInstance) { - this.jobInstance = jobInstance; - } - - public List getJobExecutionInfoResponseList() { - return jobExecutionInfoResponseList; - } - - public void setJobExecutionInfoResponseList(List jobExecutionInfoResponseList) { - this.jobExecutionInfoResponseList = jobExecutionInfoResponseList; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java deleted file mode 100644 index 905a4fa6f67..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobInstanceStartRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.validation.constraints.NotNull; -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; - -public class JobInstanceStartRequest { - - @PathParam("jobName") - @NotNull - private String jobName; - - @QueryParam("params") - String params; - - public String getJobName() { - return jobName; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } - - public String getParams() { - return params; - } - - public void setParams(String params) { - this.params = params; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java deleted file mode 100644 index e286debee1b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobOperationParams.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -public class JobOperationParams { - - public enum JobStopOrAbandonOperationParam { - STOP, ABANDON; - } - - public enum JobRestartOperationParam { - RESTART; - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java deleted file mode 100644 index b4fd4785e1a..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/JobRequest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.validation.constraints.NotNull; -import javax.ws.rs.PathParam; - -public class JobRequest extends PageRequest { - - @NotNull - @PathParam("jobName") - private String jobName; - - public String getJobName() { - return jobName; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java deleted file mode 100644 index 679d4fd8220..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/PageRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.ws.rs.DefaultValue; -import javax.ws.rs.QueryParam; - -public class PageRequest { - - @QueryParam("page") - @DefaultValue("0") - private int page; - - @QueryParam("size") - @DefaultValue("20") - private int size; - - public int getPage() { - return page; - } - - public void setPage(int page) { - this.page = page; - } - - public int getSize() { - return size; - } - - public void setSize(int size) { - this.size = size; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java deleted file mode 100644 index 0e67a8795e9..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionContextResponse.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import java.util.Map; - -public class StepExecutionContextResponse { - - private Map executionContextMap; - - private Long jobExecutionId; - - private Long stepExecutionId; - - private String stepName; - - public StepExecutionContextResponse() { - } - - public StepExecutionContextResponse(Map executionContextMap, Long jobExecutionId, Long stepExecutionId, String stepName) { - this.executionContextMap = executionContextMap; - this.jobExecutionId = jobExecutionId; - this.stepExecutionId = stepExecutionId; - this.stepName = stepName; - } - - public Map getExecutionContextMap() { - return executionContextMap; - } - - public Long getJobExecutionId() { - return jobExecutionId; - } - - public Long getStepExecutionId() { - return stepExecutionId; - } - - public String getStepName() { - return stepName; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java deleted file mode 100644 index ed04767bd41..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionInfoResponse.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.apache.ambari.infra.model.wrapper.StepExecutionData; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.TimeZone; - -public class StepExecutionInfoResponse { - private Long id; - private Long jobExecutionId; - private String jobName; - private String name; - private String startDate = "-"; - private String startTime = "-"; - private String duration = "-"; - private StepExecutionData stepExecutionData; - private long durationMillis; - - public StepExecutionInfoResponse(String jobName, Long jobExecutionId, String name, TimeZone timeZone) { - this.jobName = jobName; - this.jobExecutionId = jobExecutionId; - this.name = name; - this.stepExecutionData = new StepExecutionData(new StepExecution(name, new JobExecution(jobExecutionId))); - } - - public StepExecutionInfoResponse(StepExecution stepExecution, TimeZone timeZone) { - this.stepExecutionData = new StepExecutionData(stepExecution); - this.id = stepExecutionData.getId(); - this.name = stepExecutionData.getStepName(); - this.jobName = stepExecutionData.getJobExecution() != null && stepExecutionData.getJobExecution().getJobInstance() != null? stepExecutionData.getJobExecution().getJobInstance().getJobName():"?"; - this.jobExecutionId = stepExecutionData.getJobExecutionId(); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); - SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss"); - SimpleDateFormat durationFormat = new SimpleDateFormat("HH:mm:ss"); - - durationFormat.setTimeZone(TimeZone.getTimeZone("GMT")); - timeFormat.setTimeZone(timeZone); - dateFormat.setTimeZone(timeZone); - if(stepExecutionData.getStartTime() != null) { - this.startDate = dateFormat.format(stepExecutionData.getStartTime()); - this.startTime = timeFormat.format(stepExecutionData.getStartTime()); - Date endTime = stepExecutionData.getEndTime() != null? stepExecutionData.getEndTime():new Date(); - this.durationMillis = endTime.getTime() - stepExecutionData.getStartTime().getTime(); - this.duration = durationFormat.format(new Date(this.durationMillis)); - } - - } - - public Long getId() { - return this.id; - } - - public Long getJobExecutionId() { - return this.jobExecutionId; - } - - public String getName() { - return this.name; - } - - public String getJobName() { - return this.jobName; - } - - public String getStartDate() { - return this.startDate; - } - - public String getStartTime() { - return this.startTime; - } - - public String getDuration() { - return this.duration; - } - - public long getDurationMillis() { - return this.durationMillis; - } - - public String getStatus() { - return this.id != null?this.stepExecutionData.getStatus().toString():"NONE"; - } - - public String getExitCode() { - return this.id != null?this.stepExecutionData.getExitStatus().getExitCode():"NONE"; - } - - @JsonIgnore - public StepExecutionData getStepExecution() { - return this.stepExecutionData; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java deleted file mode 100644 index 26f9ed4f9ba..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionProgressResponse.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import org.springframework.batch.admin.history.StepExecutionHistory; -import org.springframework.batch.admin.web.StepExecutionProgress; - -public class StepExecutionProgressResponse { - - private StepExecutionProgress stepExecutionProgress; - - private StepExecutionHistory stepExecutionHistory; - - private StepExecutionInfoResponse stepExecutionInfoResponse; - - public StepExecutionProgressResponse() { - } - - public StepExecutionProgressResponse(StepExecutionProgress stepExecutionProgress, StepExecutionHistory stepExecutionHistory, - StepExecutionInfoResponse stepExecutionInfoResponse) { - this.stepExecutionProgress = stepExecutionProgress; - this.stepExecutionHistory = stepExecutionHistory; - this.stepExecutionInfoResponse = stepExecutionInfoResponse; - } - - public StepExecutionProgress getStepExecutionProgress() { - return stepExecutionProgress; - } - - public StepExecutionHistory getStepExecutionHistory() { - return stepExecutionHistory; - } - - public StepExecutionInfoResponse getStepExecutionInfoResponse() { - return stepExecutionInfoResponse; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java deleted file mode 100644 index 2228171271b..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/StepExecutionRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model; - -import javax.validation.constraints.NotNull; -import javax.ws.rs.PathParam; - -public class StepExecutionRequest { - - @PathParam("jobExecutionId") - @NotNull - private Long jobExecutionId; - - @PathParam("stepExecutionId") - @NotNull - private Long stepExecutionId; - - public Long getJobExecutionId() { - return jobExecutionId; - } - - public void setJobExecutionId(Long jobExecutionId) { - this.jobExecutionId = jobExecutionId; - } - - public Long getStepExecutionId() { - return stepExecutionId; - } - - public void setStepExecutionId(Long stepExecutionId) { - this.stepExecutionId = stepExecutionId; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java deleted file mode 100644 index 28e262ae6db..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/JobExecutionData.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model.wrapper; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.google.common.collect.Lists; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; - -import java.util.Collection; -import java.util.Date; -import java.util.List; - -/** - * Wrapper for #{{@link JobExecution}} - */ -public class JobExecutionData { - - private JobExecution jobExecution; - - public JobExecutionData(JobExecution jobExecution) { - this.jobExecution = jobExecution; - } - - @JsonIgnore - public JobExecution getJobExecution() { - return jobExecution; - } - - @JsonIgnore - public Collection getStepExecutions() { - return jobExecution.getStepExecutions(); - } - - public JobParameters getJobParameters() { - return jobExecution.getJobParameters(); - } - - public JobInstance getJobInstance() { - return jobExecution.getJobInstance(); - } - - public Collection getStepExecutionDataList() { - List stepExecutionDataList = Lists.newArrayList(); - Collection stepExecutions = getStepExecutions(); - if (stepExecutions != null) { - for (StepExecution stepExecution : stepExecutions) { - stepExecutionDataList.add(new StepExecutionData(stepExecution)); - } - } - return stepExecutionDataList; - } - - public BatchStatus getStatus() { - return jobExecution.getStatus(); - } - - public Date getStartTime() { - return jobExecution.getStartTime(); - } - - public Date getCreateTime() { - return jobExecution.getCreateTime(); - } - - public Date getEndTime() { - return jobExecution.getEndTime(); - } - - public Date getLastUpdated() { - return jobExecution.getLastUpdated(); - } - - public ExitStatus getExitStatus() { - return jobExecution.getExitStatus(); - } - - public ExecutionContext getExecutionContext() { - return jobExecution.getExecutionContext(); - } - - public List getFailureExceptions() { - return jobExecution.getFailureExceptions(); - } - - public String getJobConfigurationName() { - return jobExecution.getJobConfigurationName(); - } - - public Long getId() { - return jobExecution.getId(); - } - - public Long getJobId() { - return jobExecution.getJobId(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java deleted file mode 100644 index 26552ae6b85..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/model/wrapper/StepExecutionData.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.model.wrapper; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; - -import java.util.Date; -import java.util.List; - -/** - * Wrapper for #{{@link StepExecution}} - */ -public class StepExecutionData { - - @JsonIgnore - private final JobExecution jobExecution; - - @JsonIgnore - private final StepExecution stepExecution; - - - public StepExecutionData(StepExecution stepExecution) { - this.stepExecution = stepExecution; - this.jobExecution = stepExecution.getJobExecution(); - } - - @JsonIgnore - public JobExecution getJobExecution() { - return jobExecution; - } - - @JsonIgnore - public StepExecution getStepExecution() { - return stepExecution; - } - - public String getStepName() { - return stepExecution.getStepName(); - } - - public int getReadCount() { - return stepExecution.getReadCount(); - } - - public BatchStatus getStatus() { - return stepExecution.getStatus(); - } - - public int getWriteCount() { - return stepExecution.getWriteCount(); - } - - public int getCommitCount() { - return stepExecution.getCommitCount(); - } - - public int getRollbackCount() { - return stepExecution.getRollbackCount(); - } - - public int getReadSkipCount() { - return stepExecution.getReadSkipCount(); - } - - public int getProcessSkipCount() { - return stepExecution.getProcessSkipCount(); - } - - public Date getStartTime() { - return stepExecution.getStartTime(); - } - - public int getWriteSkipCount() { - return stepExecution.getWriteSkipCount(); - } - - public Date getEndTime() { - return stepExecution.getEndTime(); - } - - public Date getLastUpdated() { - return stepExecution.getLastUpdated(); - } - - public ExecutionContext getExecutionContext() { - return stepExecution.getExecutionContext(); - } - - public ExitStatus getExitStatus() { - return stepExecution.getExitStatus(); - } - - public boolean isTerminateOnly() { - return stepExecution.isTerminateOnly(); - } - - public int getFilterCount() { - return stepExecution.getFilterCount(); - } - - public List getFailureExceptions() { - return stepExecution.getFailureExceptions(); - } - - public Long getId() { - return stepExecution.getId(); - } - - public Long getJobExecutionId() { - return stepExecution.getJobExecutionId(); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java deleted file mode 100644 index 18dfdd9d21c..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/ApiDocResource.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.rest; - -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.infra.doc.InfraManagerApiDocStorage; -import org.springframework.context.annotation.Scope; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -@Path("swagger.{type:json|yaml}") -@Named -@Scope("request") -public class ApiDocResource { - - @Inject - private InfraManagerApiDocStorage infraManagerApiDocStorage; - - @GET - @Produces({MediaType.APPLICATION_JSON, "application/yaml"}) - @ApiOperation(value = "The swagger definition in either JSON or YAML", hidden = true) - public Response swaggerDefinitionResponse(@PathParam("type") String type) { - Response response = Response.status(404).build(); - if (infraManagerApiDocStorage.getSwagger() != null) { - if ("yaml".equalsIgnoreCase(type)) { - response = Response.ok().entity(infraManagerApiDocStorage.getSwaggerYaml()).type("application/yaml").build(); - } else { - response = Response.ok().entity(infraManagerApiDocStorage.getSwagger()).build(); - } - } - return response; - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java deleted file mode 100644 index 079cce3e115..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobExceptionMapper.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.rest; - - -import com.google.common.collect.Maps; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.admin.service.NoSuchStepExecutionException; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.launch.JobExecutionNotFailedException; -import org.springframework.batch.core.launch.JobExecutionNotRunningException; -import org.springframework.batch.core.launch.JobExecutionNotStoppedException; -import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException; -import org.springframework.batch.core.launch.JobParametersNotFoundException; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.web.bind.MethodArgumentNotValidException; - -import javax.batch.operations.JobExecutionAlreadyCompleteException; -import javax.inject.Named; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; -import java.util.Map; - -@Named -@Provider -public class JobExceptionMapper implements ExceptionMapper { - - private static final Logger LOG = LoggerFactory.getLogger(JobExceptionMapper.class); - - private static final Map exceptionStatusCodeMap = Maps.newHashMap(); - - static { - exceptionStatusCodeMap.put(MethodArgumentNotValidException.class, Response.Status.BAD_REQUEST); - exceptionStatusCodeMap.put(NoSuchJobException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(NoSuchStepException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(NoSuchStepExecutionException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(NoSuchJobExecutionException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(NoSuchJobInstanceException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(JobExecutionNotRunningException.class, Response.Status.INTERNAL_SERVER_ERROR); - exceptionStatusCodeMap.put(JobExecutionNotStoppedException.class, Response.Status.INTERNAL_SERVER_ERROR); - exceptionStatusCodeMap.put(JobInstanceAlreadyExistsException.class, Response.Status.ACCEPTED); - exceptionStatusCodeMap.put(JobInstanceAlreadyCompleteException.class, Response.Status.ACCEPTED); - exceptionStatusCodeMap.put(JobExecutionAlreadyRunningException.class, Response.Status.ACCEPTED); - exceptionStatusCodeMap.put(JobExecutionAlreadyCompleteException.class, Response.Status.ACCEPTED); - exceptionStatusCodeMap.put(JobParametersNotFoundException.class, Response.Status.NOT_FOUND); - exceptionStatusCodeMap.put(JobExecutionNotFailedException.class, Response.Status.INTERNAL_SERVER_ERROR); - exceptionStatusCodeMap.put(JobRestartException.class, Response.Status.INTERNAL_SERVER_ERROR); - exceptionStatusCodeMap.put(JobParametersInvalidException.class, Response.Status.BAD_REQUEST); - } - - @Override - public Response toResponse(Throwable throwable) { - LOG.error("REST Exception occurred:", throwable); - Response.Status status = Response.Status.INTERNAL_SERVER_ERROR; - - for (Map.Entry entry : exceptionStatusCodeMap.entrySet()) { - if (throwable.getClass().isAssignableFrom(entry.getKey())) { - status = entry.getValue(); - LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode()); - break; - } - } - - return Response.status(status).entity(new StatusMessage(throwable.getMessage(), status.getStatusCode())) - .type(MediaType.APPLICATION_JSON_TYPE).build(); - } - - private class StatusMessage { - private String message; - private int statusCode; - - StatusMessage(String message, int statusCode) { - this.message = message; - this.statusCode = statusCode; - } - - public String getMessage() { - return message; - } - - public int getStatusCode() { - return statusCode; - } - } -} diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java deleted file mode 100644 index 502057e078f..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/rest/JobResource.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.rest; - -import com.google.common.base.Splitter; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.infra.manager.JobManager; -import org.apache.ambari.infra.model.ExecutionContextResponse; -import org.apache.ambari.infra.model.JobDetailsResponse; -import org.apache.ambari.infra.model.JobExecutionDetailsResponse; -import org.apache.ambari.infra.model.JobExecutionInfoResponse; -import org.apache.ambari.infra.model.JobExecutionRequest; -import org.apache.ambari.infra.model.JobExecutionRestartRequest; -import org.apache.ambari.infra.model.JobExecutionStopRequest; -import org.apache.ambari.infra.model.JobInstanceStartRequest; -import org.apache.ambari.infra.model.JobRequest; -import org.apache.ambari.infra.model.PageRequest; -import org.apache.ambari.infra.model.StepExecutionContextResponse; -import org.apache.ambari.infra.model.StepExecutionInfoResponse; -import org.apache.ambari.infra.model.StepExecutionProgressResponse; -import org.apache.ambari.infra.model.StepExecutionRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.admin.service.NoSuchStepExecutionException; -import org.springframework.batch.admin.web.JobInfo; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.launch.JobExecutionNotRunningException; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.context.annotation.Scope; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.validation.Valid; -import javax.validation.constraints.NotNull; -import javax.ws.rs.BeanParam; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import java.util.List; -import java.util.Set; - -@Api(value = "jobs", description = "Job operations") -@Path("jobs") -@Named -@Scope("request") -public class JobResource { - private static final Logger LOG = LoggerFactory.getLogger(JobResource.class); - - @Inject - private JobManager jobManager; - - @GET - @Produces({"application/json"}) - @ApiOperation("Get all jobs") - public List getAllJobs(@BeanParam @Valid PageRequest request) { - return jobManager.getAllJobs(request.getPage(), request.getSize()); - } - - @POST - @Produces({"application/json"}) - @Path("{jobName}") - @ApiOperation("Start a new job instance by job name.") - public JobExecutionInfoResponse startJob(@BeanParam @Valid JobInstanceStartRequest request) - throws JobParametersInvalidException, NoSuchJobException, JobExecutionAlreadyRunningException, - JobRestartException, JobInstanceAlreadyCompleteException { - - String jobName = request.getJobName(); - String params = request.getParams(); - JobParametersBuilder jobParametersBuilder = new JobParametersBuilder(); - if (params != null) { - LOG.info("Parsing parameters of job {} '{}'", jobName, params); - Splitter.on(',') - .trimResults() - .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults()) - .split(params).forEach(jobParametersBuilder::addString); - } - - return jobManager.launchJob(jobName, jobParametersBuilder.toJobParameters()); - } - - @GET - @Produces({"application/json"}) - @Path("/info/names") - @ApiOperation("Get all job names") - public Set getAllJobNames() { - return jobManager.getAllJobNames(); - } - - @GET - @Produces({"application/json"}) - @Path("{jobName}/info") - @ApiOperation("Get job details by job name.") - public JobDetailsResponse getJobDetails(@BeanParam @Valid JobRequest jobRequest) throws NoSuchJobException { - return jobManager.getJobDetails(jobRequest.getJobName(), jobRequest.getPage(), jobRequest.getSize()); - } - - @GET - @Path("{jobName}/executions") - @Produces({"application/json"}) - @ApiOperation("Get the id values of all the running job instances.") - public Set getExecutionIdsByJobName(@PathParam("jobName") @NotNull @Valid String jobName) throws NoSuchJobException { - return jobManager.getExecutionIdsByJobName(jobName); - } - - @GET - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}") - @ApiOperation("Get job and step details for job execution instance.") - public JobExecutionDetailsResponse getExectionInfo(@PathParam("jobExecutionId") @Valid Long jobExecutionId) throws NoSuchJobExecutionException { - return jobManager.getExecutionInfo(jobExecutionId); - } - - @GET - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}/context") - @ApiOperation("Get execution context for specific job.") - public ExecutionContextResponse getExecutionContextByJobExecId(@PathParam("jobExecutionId") Long executionId) throws NoSuchJobExecutionException { - return jobManager.getExecutionContextByJobExecutionId(executionId); - } - - - @DELETE - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}") - @ApiOperation("Stop or abandon a running job execution.") - public JobExecutionInfoResponse stopOrAbandonJobExecution(@BeanParam @Valid JobExecutionStopRequest request) - throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobExecutionAlreadyRunningException { - return jobManager.stopOrAbandonJobByExecutionId(request.getJobExecutionId(), request.getOperation()); - } - - @DELETE - @Produces({"application/json"}) - @Path("/executions") - @ApiOperation("Stop all job executions.") - public Integer stopAll() { - return jobManager.stopAllJobs(); - } - - @GET - @Produces({"application/json"}) - @Path("/{jobName}/{jobInstanceId}/executions") - @ApiOperation("Get execution for job instance.") - public List getExecutionsForInstance(@BeanParam @Valid JobExecutionRequest request) throws - NoSuchJobException, NoSuchJobInstanceException { - return jobManager.getExecutionsForJobInstance(request.getJobName(), request.getJobInstanceId()); - } - - @POST - @Produces({"application/json"}) - @Path("/{jobName}/{jobInstanceId}/executions") - @ApiOperation("Restart job instance.") - public JobExecutionInfoResponse restartJobInstance(@BeanParam @Valid JobExecutionRestartRequest request) throws JobInstanceAlreadyCompleteException, - NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException, NoSuchJobException { - return jobManager.restart(request.getJobInstanceId(), request.getJobName(), request.getOperation()); - } - - @GET - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}") - @ApiOperation("Get step execution details.") - public StepExecutionInfoResponse getStepExecution(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return jobManager.getStepExecution(request.getJobExecutionId(), request.getStepExecutionId()); - } - - @GET - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/execution-context") - @ApiOperation("Get the execution context of step execution.") - public StepExecutionContextResponse getStepExecutionContext(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return jobManager.getStepExecutionContext(request.getJobExecutionId(), request.getStepExecutionId()); - } - - @GET - @Produces({"application/json"}) - @Path("/executions/{jobExecutionId}/steps/{stepExecutionId}/progress") - @ApiOperation("Get progress of step execution.") - public StepExecutionProgressResponse getStepExecutionProgress(@BeanParam @Valid StepExecutionRequest request) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return jobManager.getStepExecutionProgress(request.getJobExecutionId(), request.getStepExecutionId()); - } - -} diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt b/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt deleted file mode 100644 index 41da7250cb6..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/dummy/dummy.txt +++ /dev/null @@ -1,3 +0,0 @@ -f1,f2 -v1,v2 -v3,v4 \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh deleted file mode 100644 index 9a371fd7cc0..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager-env.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Extend with java options or system properties. e.g.: INFRA_MANAGER_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=5007,server=y,suspend=n" -export INFRA_MANAGER_OPTS="" - -# Log Search debug options -# export INFRA_MANAGER_DEBUG=true -# export INFRA_MANAGER_DEBUG_SUSPEND=n -export INFRA_MANAGER_DEBUG_PORT=5005 - -# Log Search memory -# export INFRA_MANAGER_JAVA_MEM="--Xmx1024m" - -# export LOG_PATH=/var/log/ambari-logsearch-logfeeder/ -# export LOG_FILE=logsearch.log - -# Pid file of the application -# export INFRA_MANAGER_PID_DIR=/var/run/ambari-infra-manager -# export INFRA_MANAGER_PID_FILE=infra-manager.pid - -# SSL settings" -# export INFRA_MANAGER_SSL="true" -# export INFRA_MANAGER_KEYSTORE_LOCATION="/my/path/keystore.jks" -# export INFRA_MANAGER_KEYSTORE_TYPE="jks" -# export INFRA_MANAGER_TRUSTSTORE_LOCATION="/my/path/trutstore.jks" -# export INFRA_MANAGER_TRUSTSTORE_TYPE="jks" \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties deleted file mode 100644 index a0712bae047..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -infra-manager.batch.db.file=job-repository.db -infra-manager.batch.db.init=false -infra-manager.batch.db.username=admin -infra-manager.batch.db.password=admin -management.security.enabled=false -management.health.solr.enabled=false -infra-manager.server.data.folder=/tmp/ambariInfraManager - -infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181 -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logtime:[${start} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id -infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100 -infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150 -infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL -infra-manager.jobs.solr_data_archiving.archive_service_logs.local_destination_directory=/tmp/ambariInfraManager -infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime -infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX -infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=true -infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 * * * * ? -infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.intervalEndDelta=PT24H -infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181 -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=logtime:[${start} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=logtime -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id -infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}]) -infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100 -infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150 -infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3 -# TODO: logtime may not be enough: The same filename can be generated when more than write_block_size count docs has the same logtime value -infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=logtime -infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX -infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/ -infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/test_audit_logs -#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=.csv -infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_ -infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket -infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569 -# TODO: configure ranger audit logs -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.zoo_keeper_connection_string=zookeeper:2181 -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.read_block_size=100 -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.write_block_size=150 -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.file_name_suffix_column=logtime -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.destination_directory_path=/tmp/ambariInfraManager -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.collection=hadoop_logs -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.query_text=logtime:[* TO "${end}"] -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.filter_query_text=(logtime:"${logtime}" AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"] -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[0]=logtime -#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[1]=id -infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true -infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181 -infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs -infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=logtime diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh deleted file mode 100644 index 5ac32e30bbb..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh +++ /dev/null @@ -1,272 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "$INFRA_MANAGER_JAVA_MEM" = "" ]; then - INFRA_MANAGER_JAVA_MEM="-Xmx1g" -fi - -readlinkf(){ - # get real path on mac OSX - perl -MCwd -e 'print Cwd::abs_path shift' "$1"; -} - -if [ "$(uname -s)" = 'Linux' ]; then - SCRIPT_DIR="`dirname "$(readlink -f "$0")"`" -else - SCRIPT_DIR="`dirname "$(readlinkf "$0")"`" -fi - -INFRA_MANAGER_ROOT_DIR="`dirname \"$SCRIPT_DIR\"`" -INFRA_MANAGER_LIBS_DIR="$INFRA_MANAGER_ROOT_DIR/libs" - -if [ "$INFRA_MANAGER_CONF_DIR" = "" ]; then - if [ -d "$INFRA_MANAGER_ROOT_DIR/conf" ]; then - INFRA_MANAGER_CONF_DIR="$INFRA_MANAGER_ROOT_DIR/conf" - fi -fi - -if [ -f "$INFRA_MANAGER_CONF_DIR/infra-manager-env.sh" ]; then - source $INFRA_MANAGER_CONF_DIR/infra-manager-env.sh -fi - -JVM="java" - -if [ -x $JAVA_HOME/bin/java ]; then - JVM=$JAVA_HOME/bin/java -fi - -if [ ! -z "$INFRA_MANAGER_SOLR_CLIENT_SSL_INCLUDE" ]; then - source $INFRA_MANAGER_SOLR_CLIENT_SSL_INCLUDE -fi - -if [ -z "$INFRA_MANAGER_PID_FILE" ]; then - INFRA_MANAGER_PID_DIR=$HOME - export INFRA_MANAGER_PID_FILE=$INFRA_MANAGER_PID_DIR/infra-manager.pid -fi - -if [ -z "$LOG_FILE" ]; then - export LOG_FILE="infra-manager.log" -fi - -INFRA_MANAGER_GC_LOGFILE="infra-manager-gc.log" - -if [ -z "$LOG_PATH" ]; then - LOG_FILE="$HOME/$LOG_FILE" - INFRA_MANAGER_GC_LOGFILE="$HOME/$INFRA_MANAGER_GC_LOGFILE" -else - LOG_PATH_WITHOUT_SLASH=${LOG_PATH%/} - LOG_FILE="$LOG_PATH_WITHOUT_SLASH/$LOG_FILE" - INFRA_MANAGER_GC_LOGFILE="$LOG_PATH_WITHOUT_SLASH/$INFRA_MANAGER_GC_LOGFILE" -fi - -INFRA_MANAGER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$INFRA_MANAGER_GC_LOGFILE" - -function print_usage() { - cat << EOF - - Usage: [] [] - - commands: - start Start Infra Manager - stop Stop Infra Manager - status Check Infra Manager is running or not (pid file) - help Print usage - - - start command arguments: - -d, --debug Start java process in debug mode - -f, --foreground Start java process in foreground - -EOF -} - -function spinner() { - local pid=$1 - local delay=0.5 - local spinstr='|/-\' - while [ "$(ps aux | awk '{print $2}' | grep -w $pid)" ]; do - local temp=${spinstr#?} - printf " [%c] " "$spinstr" - local spinstr=$temp${spinstr%"$temp"} - sleep $delay - printf "\b\b\b\b\b\b" - done - printf " \b\b\b\b" -} - -function status() { - echo "Checking Infra Manager status ..." >&2 - if [ -f "$INFRA_MANAGER_PID_FILE" ]; then - INFRA_MANAGER_PID=`cat "$INFRA_MANAGER_PID_FILE"` - else - echo "Infra Manager pid not exists. (probably the process is not running)" >&2 - return 1 - fi - - if ps -p $INFRA_MANAGER_PID > /dev/null - then - echo "Infra Manager process is running. (pid: $INFRA_MANAGER_PID)" >&2 - return 0 - else - echo "Infra Manager process is not running." >&2 - return 1 - fi -} - -function start() { - exit_status=$(status; echo $?) - if [ "$exit_status" = "0" ]; then - echo "Skipping start process." - exit 0 - fi - - FG="false" - INFRA_MANAGER_DEBUG_SUSPEND=${INFRA_MANAGER_DEBUG_SUSPEND:-n} - INFRA_MANAGER_DEBUG_PORT=${INFRA_MANAGER_DEBUG_PORT:-"5005"} - - if [ "$INFRA_MANAGER_DEBUG" = "true" ]; then - INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_PORT,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND " - fi - - if [ "$INFRA_MANAGER_SSL" = "true" ]; then - INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Djavax.net.ssl.keyStore=$INFRA_MANAGER_KEYSTORE_LOCATION -Djavax.net.ssl.keyStoreType=$INFRA_MANAGER_KEYSTORE_TYPE -Djavax.net.ssl.trustStore=$INFRA_MANAGER_TRUSTSTORE_LOCATION -Djavax.net.ssl.trustStoreType=$INFRA_MANAGER_TRUSTSTORE_TYPE" - fi - - if [ "$INFRA_MANAGER_JMX" = "true" ]; then - INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=2099" - fi - - if [ $# -gt 0 ]; then - while true; do - case "$1" in - -f|--foreground) - FG="true" - shift - ;; - -d|--debug) - if [ "$INFRA_MANAGER_DEBUG" != "true" ]; then - INFRA_MANAGER_JAVA_OPTS="$INFRA_MANAGER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$INFRA_MANAGER_DEBUG_PORT,server=y,suspend=$INFRA_MANAGER_DEBUG_SUSPEND " - fi - shift - ;; - *) - if [ "${1:0:2}" == "-D" ]; then - # pass thru any opts that begin with -D (java system props) - INFRA_MANAGER_JAVA_OPTS+=("$1") - echo "$INFRA_MANAGER_JAVA_OPTS" - shift - else - if [ "$1" != "" ]; then - print_usage - exit 1 - else - break - fi - fi - ;; - esac - done - fi - - if [ $FG == "true" ]; then - echo "Starting Infra Manager... (foreground) pid_file=$INFRA_MANAGER_PID_FILE" - echo "Run command $JVM -cp '$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*' $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager" - $JVM -cp "$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*" $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager - else - echo "Starting Infra Manager... Output file=$LOG_FILE pid_file=$INFRA_MANAGER_PID_FILE" - echo "Run command nohup $JVM -cp '$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*' $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager" - nohup $JVM -cp "$INFRA_MANAGER_CONF_DIR:$INFRA_MANAGER_LIBS_DIR/*" $INFRA_MANAGER_GC_OPTS $INFRA_MANAGER_JAVA_OPTS $INFRA_MANAGER_JAVA_MEM org.apache.ambari.infra.InfraManager > $LOG_FILE 2>&1 & - fi -} - -function stop() { - INFRA_MANAGER_STOP_WAIT=3 - if [ -f "$INFRA_MANAGER_PID_FILE" ]; then - INFRA_MANAGER_PID=`cat "$INFRA_MANAGER_PID_FILE"` - fi - - if [ "$INFRA_MANAGER_PID" != "" ]; then - echo -e "Sending stop command to Infra Manager... Checking PID: $INFRA_MANAGER_PID." - kill $INFRA_MANAGER_PID - (loops=0 - while true - do - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - slept=$((loops * 2)) - if [ $slept -lt $INFRA_MANAGER_STOP_WAIT ]; then - sleep 2 - loops=$[$loops+1] - else - exit # subshell! - fi - else - exit # subshell! - fi - done) & - spinner $! - rm -f "$INFRA_MANAGER_PID_FILE" - else - echo -e "No Infra Manager process found to stop." - exit 0 - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo -e "Infra Manager process $INFRA_MANAGER_PID is still running; forcefully killing it now." - kill -9 $INFRA_MANAGER_PID - echo "Killed process $INFRA_MANAGER_PID" - rm -f "$INFRA_MANAGER_PID_FILE" - sleep 1 - else - echo "Infra Manager is stopped." - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $INFRA_MANAGER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo "ERROR: Failed to kill Infra Manager Java process $INFRA_MANAGER_PID ... script fails." - exit 1 - fi -} - -if [ $# -gt 0 ]; then - SCRIPT_CMD="$1" - shift -else - print_usage - exit 1 -fi - -case $SCRIPT_CMD in - start) - start ${1+"$@"} - ;; - stop) - stop - ;; - status) - status - ;; - help) - print_usage - exit 0 - ;; - *) - print_usage - exit 1 - ;; - -esac \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml deleted file mode 100644 index d3db3d7ed36..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - - - target/log/infra-manager.log - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html b/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html deleted file mode 100644 index 3e648674c8c..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/static/index.html +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - -

Welcome!

- - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html b/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html deleted file mode 100644 index 4d261e7a279..00000000000 --- a/ambari-infra/ambari-infra-manager/src/main/resources/swagger/swagger.html +++ /dev/null @@ -1,136 +0,0 @@ - - - - - - Infra Manager REST API - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 
-
- - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java deleted file mode 100644 index 26a6953d54a..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/conf/security/CompositePasswordStoreTest.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.apache.ambari.infra.conf.security; - -import org.junit.Test; - -import java.util.Optional; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class CompositePasswordStoreTest { - @Test - public void testGetPasswordReturnNullIfNoPasswordStoresWereAdded() { - assertThat(new CompositePasswordStore().getPassword("any").isPresent(), is(false)); - } - - @Test - public void testGetPasswordReturnNullIfPasswordNotFoundInAnyStore() { - assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.empty()).getPassword("any").isPresent(), is(false)); - } - - @Test - public void testGetPasswordReturnPasswordFromFirstStoreIfExists() { - assertThat(new CompositePasswordStore((prop) -> Optional.of("Pass"), (prop) -> Optional.empty()).getPassword("any").get(), is("Pass")); - } - - @Test - public void testGetPasswordReturnPasswordFromSecondStoreIfNotExistsInFirst() { - assertThat(new CompositePasswordStore((prop) -> Optional.empty(), (prop) -> Optional.of("Pass")).getPassword("any").get(), is("Pass")); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java deleted file mode 100644 index 3b7caabd02e..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobPropertiesTest.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.apache.ambari.infra.job; - -import org.apache.ambari.infra.job.archive.DocumentArchivingProperties; -import org.apache.ambari.infra.job.archive.SolrProperties; -import org.junit.Test; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class JobPropertiesTest { - @Test - public void testDeepCopy() throws Exception { - DocumentArchivingProperties documentArchivingProperties = new DocumentArchivingProperties(); - documentArchivingProperties.setLocalDestinationDirectory("/tmp"); - documentArchivingProperties.setFileNameSuffixColumn(".json"); - documentArchivingProperties.setReadBlockSize(10); - documentArchivingProperties.setWriteBlockSize(20); - SolrProperties solr = new SolrProperties(); - solr.setZooKeeperConnectionString("localhost:2181"); - solr.setFilterQueryText("id:1167"); - solr.setQueryText("name:'Joe'"); - solr.setCollection("Users"); - solr.setSortColumn(new String[] {"name"}); - documentArchivingProperties.setSolr(solr); - - DocumentArchivingProperties parsed = documentArchivingProperties.deepCopy(); - - assertThat(parsed.getLocalDestinationDirectory(), is(documentArchivingProperties.getLocalDestinationDirectory())); - assertThat(parsed.getFileNameSuffixColumn(), is(documentArchivingProperties.getFileNameSuffixColumn())); - assertThat(parsed.getReadBlockSize(), is(documentArchivingProperties.getReadBlockSize())); - assertThat(parsed.getWriteBlockSize(), is(documentArchivingProperties.getWriteBlockSize())); - assertThat(parsed.getSolr().getZooKeeperConnectionString(), is(documentArchivingProperties.getSolr().getZooKeeperConnectionString())); - assertThat(parsed.getSolr().getQueryText(), is(solr.getQueryText())); - assertThat(parsed.getSolr().getFilterQueryText(), is(solr.getFilterQueryText())); - assertThat(parsed.getSolr().getCollection(), is(solr.getCollection())); - assertThat(parsed.getSolr().getSortColumn(), is(solr.getSortColumn())); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java deleted file mode 100644 index ba1150f77ad..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/JobSchedulerTest.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.apache.ambari.infra.job; - -import org.apache.ambari.infra.manager.Jobs; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.support.CronTrigger; - -import javax.batch.operations.NoSuchJobException; -import java.util.Optional; -import java.util.concurrent.ScheduledFuture; - -import static org.easymock.EasyMock.eq; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.easymock.EasyMock.isA; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -@RunWith(EasyMockRunner.class) -public class JobSchedulerTest extends EasyMockSupport { - - @Mock - private TaskScheduler taskScheduler; - @Mock - private Jobs jobs; - @Mock - private ScheduledFuture scheduledFuture; - private JobScheduler jobScheduler; - - @Before - public void setUp() throws Exception { - jobScheduler = new JobScheduler(taskScheduler, jobs); - } - - @After - public void tearDown() throws Exception { - verifyAll(); - } - - @Test(expected = NoSuchJobException.class) - public void testScheduleWhenJobNotExistsThrowsException() throws Exception { - String jobName = "notFoundJob"; - expect(jobs.lastRun(jobName)).andThrow(new NoSuchJobException()); - replayAll(); - - jobScheduler.schedule(jobName, null); - } - - @Test - public void testScheduleWhenNoPreviousExecutionExistsJobIsScheduled() throws Exception { - String jobName = "job0"; - SchedulingProperties schedulingProperties = new SchedulingProperties(); - schedulingProperties.setCron("* * * * * ?"); - expect(jobs.lastRun(jobName)).andReturn(Optional.empty()); - expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture); - replayAll(); - - jobScheduler.schedule(jobName, schedulingProperties); - } - - @Test - public void testScheduleWhenPreviousExecutionWasSuccessfulJobIsScheduled() throws Exception { - String jobName = "job0"; - SchedulingProperties schedulingProperties = new SchedulingProperties(); - schedulingProperties.setCron("* * * * * ?"); - JobExecution jobExecution = new JobExecution(1L, new JobParameters()); - jobExecution.setExitStatus(ExitStatus.COMPLETED); - expect(jobs.lastRun(jobName)).andReturn(Optional.of(jobExecution)); - expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture); - replayAll(); - - jobScheduler.schedule(jobName, schedulingProperties); - } - - @Test - public void testScheduleWhenPreviousExecutionFailedJobIsRestartedAndScheduled() throws Exception { - String jobName = "job0"; - SchedulingProperties schedulingProperties = new SchedulingProperties(); - schedulingProperties.setCron("* * * * * ?"); - JobExecution jobExecution = new JobExecution(1L, new JobParameters()); - jobExecution.setExitStatus(ExitStatus.FAILED); - expect(jobs.lastRun(jobName)).andReturn(Optional.of(jobExecution)); - jobs.restart(1L); expectLastCall(); - expect(taskScheduler.schedule(isA(Runnable.class), eq(new CronTrigger(schedulingProperties.getCron())))).andReturn(scheduledFuture); - replayAll(); - - jobScheduler.schedule(jobName, schedulingProperties); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java deleted file mode 100644 index b31110cf84a..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.JobContextRepository; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.repeat.RepeatStatus; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.HashMap; - -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -@RunWith(EasyMockRunner.class) -public class DocumentExporterTest extends EasyMockSupport { - - private static final long JOB_EXECUTION_ID = 1L; - private static final long STEP_EXECUTION_ID = 1L; - private static final Document DOCUMENT_2 = new Document(new HashMap() {{ - put("id", "2"); - }}); - private static final Document DOCUMENT_3 = new Document(new HashMap() {{ - put("id", "3"); - }}); - private DocumentExporter documentExporter; - @Mock - private ItemStreamReader reader; - @Mock - private DocumentDestination documentDestination; - @Mock - private DocumentItemWriter documentItemWriter; - @Mock - private DocumentItemWriter documentItemWriter2; - @Mock - private DocumentItemWriter documentItemWriter3; - @Mock - private JobContextRepository jobContextRepository; - -// private ExecutionContext executionContext; - private ChunkContext chunkContext; - private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }}); - - @Before - public void setUp() throws Exception { - chunkContext = chunkContext(BatchStatus.STARTED); - documentExporter = documentExporter(2); - } - - private DocumentExporter documentExporter(int writeBlockSize) { - return new DocumentExporter(reader, documentDestination, writeBlockSize, jobContextRepository); - } - - private ChunkContext chunkContext(BatchStatus batchStatus) { - StepExecution stepExecution = new StepExecution("exportDoc", new JobExecution(JOB_EXECUTION_ID)); - stepExecution.setId(STEP_EXECUTION_ID); - stepExecution.getJobExecution().setStatus(batchStatus); - return new ChunkContext(new StepContext(stepExecution)); - } - - @After - public void tearDown() throws Exception { - verifyAll(); - } - - @Test - public void testNothingToRead() throws Exception { - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(null); - reader.close(); expectLastCall(); - replayAll(); - - documentExporter.execute(null, chunkContext); - } - - private ExecutionContext executionContext(ChunkContext chunkContext) { - return chunkContext.getStepContext().getStepExecution().getExecutionContext(); - } - - @Test - public void testWriteLessDocumentsThanWriteBlockSize() throws Exception { - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT); - expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter); - documentItemWriter.write(DOCUMENT); expectLastCall(); - expect(reader.read()).andReturn(null); - reader.close(); expectLastCall(); - documentItemWriter.close(); expectLastCall(); - replayAll(); - - assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED)); - } - - @Test - public void testWriteMoreDocumentsThanWriteBlockSize() throws Exception { - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT); - expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter); - documentItemWriter.write(DOCUMENT); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT_2); - documentItemWriter.write(DOCUMENT_2); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT_3); - documentItemWriter.close(); expectLastCall(); - jobContextRepository.updateExecutionContext(chunkContext.getStepContext().getStepExecution()); - expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(chunkContext.getStepContext().getStepExecution()); - expect(documentDestination.open(DOCUMENT_3)).andReturn(documentItemWriter2); - documentItemWriter2.write(DOCUMENT_3); expectLastCall(); - expect(reader.read()).andReturn(null); - reader.update(executionContext(chunkContext)); - reader.close(); expectLastCall(); - documentItemWriter2.close(); expectLastCall(); - replayAll(); - - assertThat(documentExporter.execute(null, chunkContext), is(RepeatStatus.FINISHED)); - } - - @Test(expected = IOException.class) - public void testReadError() throws Exception { - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT); - expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter); - documentItemWriter.write(DOCUMENT); expectLastCall(); - expect(reader.read()).andThrow(new IOException("TEST")); - documentItemWriter.revert(); expectLastCall(); - reader.close(); expectLastCall(); - replayAll(); - - documentExporter.execute(null, chunkContext); - } - - @Test(expected = UncheckedIOException.class) - public void testWriteError() throws Exception { - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT); - expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter); - documentItemWriter.write(DOCUMENT); expectLastCall().andThrow(new UncheckedIOException(new IOException("TEST"))); - documentItemWriter.revert(); expectLastCall(); - reader.close(); expectLastCall(); - replayAll(); - - documentExporter.execute(null, chunkContext); - } - - @Test - public void testStopAndRestartExportsAllDocuments() throws Exception { - ChunkContext stoppingChunkContext = chunkContext(BatchStatus.STOPPING); - DocumentExporter documentExporter = documentExporter(1); - - reader.open(executionContext(chunkContext)); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT); - - expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter); - documentItemWriter.write(DOCUMENT); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT_2); - expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(chunkContext.getStepContext().getStepExecution()); - documentItemWriter.close(); expectLastCall(); - reader.update(executionContext(this.chunkContext)); - jobContextRepository.updateExecutionContext(this.chunkContext.getStepContext().getStepExecution()); - - expect(documentDestination.open(DOCUMENT_2)).andReturn(documentItemWriter2); - documentItemWriter2.write(DOCUMENT_2); expectLastCall(); - expect(reader.read()).andReturn(DOCUMENT_3); - expect(jobContextRepository.getStepExecution(JOB_EXECUTION_ID, STEP_EXECUTION_ID)).andReturn(stoppingChunkContext.getStepContext().getStepExecution()); - documentItemWriter2.revert(); expectLastCall(); - reader.close(); expectLastCall(); - - reader.open(executionContext(chunkContext)); - expect(reader.read()).andReturn(DOCUMENT_3); - expect(documentDestination.open(DOCUMENT_3)).andReturn(documentItemWriter3); - documentItemWriter3.write(DOCUMENT_3); expectLastCall(); - documentItemWriter3.close(); expectLastCall(); - - expect(reader.read()).andReturn(null); - reader.close(); expectLastCall(); - replayAll(); - - RepeatStatus repeatStatus = documentExporter.execute(null, this.chunkContext); - assertThat(repeatStatus, is(RepeatStatus.CONTINUABLE)); - repeatStatus = documentExporter.execute(null, this.chunkContext); - assertThat(repeatStatus, is(RepeatStatus.FINISHED)); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java deleted file mode 100644 index 0776c3cf182..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.job.archive; - -import org.apache.ambari.infra.job.CloseableIterator; -import org.apache.ambari.infra.job.ObjectSource; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.ExecutionContext; - -import java.util.HashMap; - -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsNull.nullValue; -import static org.junit.Assert.assertThat; - -@RunWith(EasyMockRunner.class) -public class DocumentItemReaderTest extends EasyMockSupport { - private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }}); - private static final Document DOCUMENT_2 = new Document(new HashMap() {{ put("id", "2"); }}); - private static final Document DOCUMENT_3 = new Document(new HashMap() {{ put("id", "3"); }}); - private static final int READ_BLOCK_SIZE = 2; - - private DocumentItemReader documentItemReader; - @Mock - private ObjectSource documentSource; - @Mock - private CloseableIterator documentIterator; - @Mock - private CloseableIterator documentIterator2; - - @Before - public void setUp() throws Exception { - documentItemReader = new DocumentItemReader(documentSource, READ_BLOCK_SIZE); - } - - @After - public void tearDown() throws Exception { - verifyAll(); - } - - @Test - public void testReadWhenCollectionIsEmpty() throws Exception { - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentIterator.next()).andReturn(null); - documentIterator.close(); expectLastCall(); - replayAll(); - - assertThat(documentItemReader.read(), is(nullValue())); - assertThat(documentItemReader.isComplete(null), is(true)); - assertThat(documentItemReader.isComplete(null, null), is(true)); - } - - @Test - public void testReadWhenCollectionContainsLessElementsThanReadBlockSize() throws Exception { - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentIterator.next()).andReturn(DOCUMENT); - expect(documentIterator.next()).andReturn(null); - documentIterator.close(); expectLastCall(); - replayAll(); - - assertThat(documentItemReader.read(), is(DOCUMENT)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - assertThat(documentItemReader.read(), is(nullValue())); - assertThat(documentItemReader.isComplete(null), is(true)); - assertThat(documentItemReader.isComplete(null, null), is(true)); - } - - @Test - public void testReadWhenCollectionContainsExactlySameCountElementsAsReadBlockSize() throws Exception { - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2); - expect(documentIterator.next()).andReturn(DOCUMENT); - expect(documentIterator.next()).andReturn(DOCUMENT_2); - expect(documentIterator.next()).andReturn(null); - documentIterator.close(); expectLastCall(); - - expect(documentIterator2.next()).andReturn(null); - documentIterator2.close(); expectLastCall(); - replayAll(); - - assertThat(documentItemReader.read(), is(DOCUMENT)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - assertThat(documentItemReader.read(), is(DOCUMENT_2)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - assertThat(documentItemReader.read(), is(nullValue())); - assertThat(documentItemReader.isComplete(null), is(true)); - assertThat(documentItemReader.isComplete(null, null), is(true)); - } - - @Test - public void testReadWhenCollectionContainsMoreElementsThanReadBlockSize() throws Exception { - Document document3 = new Document(new HashMap() {{ put("id", "2"); }}); - - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2); - expect(documentIterator.next()).andReturn(DOCUMENT); - expect(documentIterator.next()).andReturn(DOCUMENT_2); - expect(documentIterator.next()).andReturn(null); - documentIterator.close(); expectLastCall(); - expect(documentIterator2.next()).andReturn(document3); - expect(documentIterator2.next()).andReturn(null); - documentIterator2.close(); expectLastCall(); - - replayAll(); - - assertThat(documentItemReader.read(), is(DOCUMENT)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - - assertThat(documentItemReader.read(), is(DOCUMENT_2)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - - assertThat(documentItemReader.read(), is(document3)); - assertThat(documentItemReader.isComplete(null), is(false)); - assertThat(documentItemReader.isComplete(null, null), is(false)); - - assertThat(documentItemReader.read(), is(nullValue())); - assertThat(documentItemReader.isComplete(null), is(true)); - assertThat(documentItemReader.isComplete(null, null), is(true)); - } - - @Test - public void testContinueWhenOnlyFirstElementWasRead() throws Exception { - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentIterator.next()).andReturn(DOCUMENT); - documentIterator.close(); expectLastCall(); - expect(documentSource.open(null, 2)).andReturn(documentIterator2); - expect(documentIterator2.next()).andReturn(DOCUMENT); - documentIterator2.close(); expectLastCall(); - replayAll(); - - ExecutionContext executionContext = new ExecutionContext(); - documentItemReader.open(executionContext); - assertThat(documentItemReader.read(), is(DOCUMENT)); - documentItemReader.update(executionContext); - assertThat(executionContext.containsKey(DocumentItemReader.POSITION), is(false)); - documentItemReader.close(); - - documentItemReader.open(executionContext); - assertThat(documentItemReader.read(), is(DOCUMENT)); - documentItemReader.close(); - } - - @Test - public void testContinueWhenMoreThanOneElementWasRead() throws Exception { - expect(documentSource.open(null, 2)).andReturn(documentIterator); - expect(documentIterator.next()).andReturn(DOCUMENT); - expect(documentIterator.next()).andReturn(DOCUMENT_2); - documentIterator.close(); expectLastCall(); - expect(documentSource.open(DOCUMENT, 2)).andReturn(documentIterator2); - expect(documentIterator2.next()).andReturn(DOCUMENT_2); - expect(documentIterator2.next()).andReturn(DOCUMENT_3); - documentIterator2.close(); expectLastCall(); - - replayAll(); - - ExecutionContext executionContext = new ExecutionContext(); - documentItemReader.open(executionContext); - assertThat(documentItemReader.read(), is(DOCUMENT)); - assertThat(documentItemReader.read(), is(DOCUMENT_2)); - documentItemReader.update(executionContext); - assertThat(executionContext.get(DocumentItemReader.POSITION), is(DOCUMENT)); - documentItemReader.close(); - - documentItemReader.open(executionContext); - assertThat(documentItemReader.read(), is(DOCUMENT_2)); - assertThat(documentItemReader.read(), is(DOCUMENT_3)); - documentItemReader.close(); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java deleted file mode 100644 index cca2c1a503e..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import org.junit.Test; - -import java.util.HashMap; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -public class FileNameSuffixFormatterTest { - - private FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", "yyyy-MM-dd'T'hh-mm-ss-SSSX"); - - @Test(expected = NullPointerException.class) - public void testFormatWhenDocumentIsNullThrowingException() throws Exception { - formatter.format((Document) null); - } - - @Test(expected = IllegalArgumentException.class) - public void testFormatWhenSpecifiedColumnDoesNotExistsInTheDocumentThrowingException() throws Exception { - formatter.format(new Document(new HashMap<>())); - } - - @Test(expected = IllegalArgumentException.class) - public void testFormatWhenSpecifiedColumnContainsBlankValueThrowingException() throws Exception { - formatter.format(new Document(new HashMap() {{ put("logtime", " "); }})); - } - - @Test - public void testFormatWhenNoDateFormatSpecifiedRawColumnValueReturned() throws Exception { - FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", null); - assertThat(formatter.format(new Document(new HashMap() {{ put("logtime", "Monday"); }})), is("Monday")); - } - - @Test - public void testFormatWhenDateFormatIsSpecifiedAFormattedValueReturned() throws Exception { - assertThat(formatter.format(new Document(new HashMap() {{ put("logtime", "2017-12-15T10:12:33.453Z"); }})), is("2017-12-15T10-12-33-453Z")); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java deleted file mode 100644 index 85e79e18277..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.job.archive; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.io.FileUtils; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; - -import static org.easymock.EasyMock.cmp; -import static org.easymock.EasyMock.expectLastCall; -import static org.easymock.LogicalOperator.EQUAL; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - -@RunWith(EasyMockRunner.class) -public class LocalDocumentItemWriterTest extends EasyMockSupport { - - private static final Document DOCUMENT = new Document(new HashMap() {{ put("id", "1"); }}); - private static final Document DOCUMENT2 = new Document(new HashMap() {{ put("id", "2"); }}); - private static final Document DOCUMENT3 = new Document(new HashMap() {{ put("id", "3"); }}); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - private LocalDocumentItemWriter localDocumentItemWriter; - private File outFile; - @Mock - private ItemWriterListener itemWriterListener; - - @Before - public void setUp() throws Exception { - outFile = File.createTempFile("LocalDocumentItemWriterTest", "json.tmp"); - localDocumentItemWriter = new LocalDocumentItemWriter(outFile, itemWriterListener); - } - - @After - public void tearDown() throws Exception { - outFile.delete(); - verifyAll(); - } - - @Test - public void testWrite() throws Exception { - itemWriterListener.onCompleted( - cmp(new WriteCompletedEvent(outFile, DOCUMENT, DOCUMENT3), writeCompletedEventEqualityComparator(), EQUAL)); expectLastCall(); - replayAll(); - - localDocumentItemWriter.write(DOCUMENT); - localDocumentItemWriter.write(DOCUMENT2); - localDocumentItemWriter.write(DOCUMENT3); - localDocumentItemWriter.close(); - - List documentList = readBack(outFile); - assertThat(documentList.size(), is(3)); - assertThat(documentList.get(0).get("id"), is(DOCUMENT.get("id"))); - assertThat(documentList.get(1).get("id"), is(DOCUMENT2.get("id"))); - assertThat(documentList.get(2).get("id"), is(DOCUMENT3.get("id"))); - } - - private Comparator writeCompletedEventEqualityComparator() { - return (o1, o2) -> { - if (o1.getOutFile().equals(o2.getOutFile()) && - o1.getFirstDocument().equals(o2.getFirstDocument()) && - o1.getLastDocument().equals(o2.getLastDocument())) - return 0; - return 1; - }; - } - - private List readBack(File file) throws IOException { - List documentList = new ArrayList<>(); - for (String line : FileUtils.readLines(file)) { - documentList.add(OBJECT_MAPPER.readValue(line, Document.class)); - } - return documentList; - } - - @Test - public void testRevert() throws Exception { - replayAll(); - - localDocumentItemWriter.write(DOCUMENT); - localDocumentItemWriter.revert(); - - assertThat(outFile.exists(), is(false)); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java deleted file mode 100644 index e34a222cd70..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/S3AccessCsvTest.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import org.junit.Test; - -import java.io.StringReader; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class S3AccessCsvTest { - - private static final String VALID_ACCESS_FILE = "Access key ID,Secret access key\n" + - "someKey,someSecret\n"; - - private static final String ANY_CSV_FILE = "Column1,Column2\n" + - "Foo,Bar\n"; - - @Test - public void testGetPasswordReturnsNullIfInputIsEmpty() { - S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("")); - assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false)); - } - - @Test - public void testGetPasswordReturnsAccessAndSecretKeyIfInputIsAValidS3AccessFile() { - S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(VALID_ACCESS_FILE)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).get(), is("someKey")); - assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).get(), is("someSecret")); - } - - @Test - public void testGetPasswordReturnsNullIfNotAValidS3AccessFileProvided() { - S3AccessCsv accessCsv = new S3AccessCsv(new StringReader(ANY_CSV_FILE)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false)); - } - - @Test - public void testGetPasswordReturnsNullIfAHeaderOnlyS3AccessFileProvided() { - S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Secret access key\n")); - assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false)); - } - - @Test - public void testGetPasswordReturnsNullIfOnlyOneValidColumnProvided() { - S3AccessCsv accessCsv = new S3AccessCsv(new StringReader("Access key ID,Column\n")); - assertThat(accessCsv.getPassword(S3AccessKeyNames.AccessKeyId.getEnvVariableName()).isPresent(), is(false)); - assertThat(accessCsv.getPassword(S3AccessKeyNames.SecretAccessKey.getEnvVariableName()).isPresent(), is(false)); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java deleted file mode 100644 index 018c993b429..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class SolrParametrizedStringTest { - - private static final Map PARAMETERS_1 = new HashMap() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); }}; - private static final Map PARAMETERS_START = new HashMap() {{ put("price", "1000"); }}; - private static final Map PARAMETERS_END = new HashMap() {{ put("price", "2000"); }}; - - @Test - public void testToStringEmptyStringResultsEmptyString() { - assertThat(new SolrParametrizedString("").set(PARAMETERS_1).toString(), is("")); - } - - @Test - public void testParameterlessStringResultsItself() { - assertThat(new SolrParametrizedString("Hello World!").set(PARAMETERS_1).toString(), is("Hello World!")); - } - - @Test - public void testParametersAreReplacedIfFoundInString() { - assertThat(new SolrParametrizedString("Hello ${name}!").set(PARAMETERS_1).toString(), is("Hello User!")); - } - - @Test - public void testWhenStringContainsPrefixedParamtersOnlyPrefixedParametersAreSet() { - assertThat(new SolrParametrizedString("The ${product} price is between $${start.price} and $${end.price}.") - .set(PARAMETERS_1) - .set("start", PARAMETERS_START) - .set("end", PARAMETERS_END).toString(), is("The Computer price is between $1000 and $2000.")); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java deleted file mode 100644 index be8a2260f96..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrPropertiesTest.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class SolrPropertiesTest { - @Test - public void testApplySortColumns() throws Exception { - JobParameters jobParameters = new JobParametersBuilder() - .addString("sortColumn[0]", "logtime") - .addString("sortColumn[1]", "id") - .toJobParameters(); - - SolrProperties solrProperties = new SolrProperties(); - solrProperties.setSortColumn(new String[] {"testColumn"}); - solrProperties.apply(jobParameters); - assertThat(solrProperties.getSortColumn().length, is(2)); - assertThat(solrProperties.getSortColumn()[0], is("logtime")); - assertThat(solrProperties.getSortColumn()[1], is("id")); - } - - @Test - public void testApplyWhenNoSortIsDefined() throws Exception { - JobParameters jobParameters = new JobParametersBuilder() - .toJobParameters(); - - SolrProperties solrProperties = new SolrProperties(); - solrProperties.setSortColumn(new String[] {"testColumn"}); - solrProperties.apply(jobParameters); - assertThat(solrProperties.getSortColumn().length, is(1)); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java deleted file mode 100644 index ee0827965c2..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.job.archive; - -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Test; - -import java.util.HashMap; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; - -public class SolrQueryBuilderTest { - private static final Document DOCUMENT = new Document(new HashMap() {{ - put("logtime", "2017-10-02'T'10:00:11.634Z"); - put("id", "1"); - }}); - - @Test - public void testDefaultQuery() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .build(); - assertThat(solrQuery.getQuery(), is("*:*")); - } - - @Test - public void testSetQueryReplacesTheDefaultQueryTextAndParameterPlaceholdersAreReplacedToValues() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .setQueryText("logtime:[* TO ${end}]") - .setInterval(null, "2017-11-27'T'10:12:11.372Z") - .build(); - assertThat(solrQuery.getQuery(), is("logtime:[* TO 2017\\-11\\-27'T'10\\:12\\:11.372Z]")); - } - - @Test - public void testSetFilterQueryAddsAFilterQueryAndParameterPlaceholdersAreReplacedToValues() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .setFilterQueryText("(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]") - .setDocument(DOCUMENT) - .setInterval(null, "2017-11-27'T'10:12:11.372Z") - .build(); - assertThat(solrQuery.getFilterQueries()[0], is( "(logtime:2017\\-10\\-02'T'10\\:00\\:11.634Z AND id:{1 TO *]) OR logtime:{2017\\-10\\-02'T'10\\:00\\:11.634Z TO 2017\\-11\\-27'T'10\\:12\\:11.372Z]")); - } - - @Test - public void testSetFilterQueryWhenDocumentIsNull() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .setFilterQueryText("(logtime:\"${logtime}\" AND id:{\"${id}\" TO *]) OR logtime:{\"${logtime}\" TO \"${end}\"]") - .setInterval(null, "2017-11-27'T'10:12:11.372Z") - .build(); - assertThat(solrQuery.getFilterQueries(), is(nullValue())); - } - - @Test - public void testNullEndValueDoesNotAffectFilterQuery() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .setFilterQueryText("logtime:${logtime} AND id:{${id} TO *]") - .setDocument(DOCUMENT) - .build(); - assertThat(solrQuery.getFilterQueries()[0], is("logtime:2017\\-10\\-02'T'10\\:00\\:11.634Z AND id:{1 TO *]")); - } - - @Test - public void testSetFilterQueryWhenQueryFilterIsNullButDocumentIsNot() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder() - .setDocument(DOCUMENT) - .build(); - assertThat(solrQuery.getFilterQueries(), is(nullValue())); - } - - @Test - public void testSort() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder().addSort("logtime", "id").build(); - assertThat(solrQuery.getSorts().get(0).getItem(), is("logtime")); - assertThat(solrQuery.getSorts().get(1).getItem(), is("id")); - } - - @Test - public void test_start_and_end_values_are_given() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder().setQueryText("id:[\"${start}\" TO \"${end}\"]").setInterval("10", "13").build(); - assertThat(solrQuery.getQuery(), is("id:[\"10\" TO \"13\"]")); - } - - @Test - public void test_start_and_end_values_are_null() throws Exception { - SolrQuery solrQuery = new SolrQueryBuilder().setQueryText("id:[${start} TO ${end}]").build(); - assertThat(solrQuery.getQuery(), is("id:[* TO *]")); - } -} diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java deleted file mode 100644 index 322775e54b5..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryPropertiesTest.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.apache.ambari.infra.job.archive; - -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class SolrQueryPropertiesTest { - @Test - public void testApplySortColumns() throws Exception { - JobParameters jobParameters = new JobParametersBuilder() - .addString("sortColumn[0]", "logtime") - .addString("sortColumn[1]", "id") - .toJobParameters(); - - SolrQueryProperties solrQueryProperties = new SolrQueryProperties(); - solrQueryProperties.setSortColumn(new String[] {"testColumn"}); - solrQueryProperties.apply(jobParameters); - assertThat(solrQueryProperties.getSortColumn().length, is(2)); - assertThat(solrQueryProperties.getSortColumn()[0], is("logtime")); - assertThat(solrQueryProperties.getSortColumn()[1], is("id")); - } - - @Test - public void testApplyWhenNoSortIsDefined() throws Exception { - JobParameters jobParameters = new JobParametersBuilder() - .toJobParameters(); - - SolrQueryProperties solrQueryProperties = new SolrQueryProperties(); - solrQueryProperties.setSortColumn(new String[] {"testColumn"}); - solrQueryProperties.apply(jobParameters); - assertThat(solrQueryProperties.getSortColumn().length, is(1)); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample b/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample deleted file mode 100644 index d722f0ef6c7..00000000000 --- a/ambari-infra/ambari-infra-manager/src/test/resoruces/vagrant-infra-manager.properties.sample +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -infra-manager.batch.db.file=job-repository.db -infra-manager.batch.db.init=true -infra-manager.batch.db.username=admin -infra-manager.batch.db.password=admin -management.security.enabled=false -management.health.solr.enabled=false -infra-manager.server.data.folder=/tmp/ambariInfraManager - -infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=c6401.ambari.apache.org:2181/infra-solr -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logtime:[${start} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime} AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}] -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id -infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime} TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}]) -infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=2000 -infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=1000 -infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=HDFS -infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_column=logtime -infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX -infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_endpoint=hdfs://c6401.ambari.apache.org:8020 -infra-manager.jobs.solr_data_archiving.archive_service_logs.hdfs_destination_directory=/archived_service_logs -# Note: set hdfs user using the HADOOP_USER_NAME environmental variable. Value: hdfs \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/README.md b/ambari-infra/ambari-infra-solr-client/README.md deleted file mode 100644 index 2b6d004cfa5..00000000000 --- a/ambari-infra/ambari-infra-solr-client/README.md +++ /dev/null @@ -1,1060 +0,0 @@ - - -## Ambari Infra Solr Client - -CLI helper tool(s) for Ambari Infra Solr. - -### Post Ambari Server Upgrade (Ambari 2.7.x) - -Ambari Infra Solr uses Solr 7 from Ambari 2.7.0, therefore it is required migrate Solr 5 index (Ambari Infra 2.6.x), if you want to keep your old data. (otherwise backup part can be skipped) - -#### Contents: -- [I. Upgrade Ambari Infra Solr Clients](#i.-upgrade-ambari-infra-solr-client) -- [II. Gather required Ambari and Solr parameters](#0-gather-params) -- [III. Backup Solr Collections](#ii.-backup-collections-(ambari-2.6.x-to-ambari-2.7.x)) - - a.) If you have Ranger Ambari service with Solr audits: - - [1. Backup Ranger collection](#ii/1.-backup-ranger-collection) - - [2. Backup Ranger configs on Solr ZNode](#ii/2.-backup-ranger-configs-on-solr-znode) - - [3. Delete Ranger collection](#ii/3.-delete-ranger-collection) - - [4. Upgrade Ranger Solr schema](#ii/4.-upgrade-ranger-solr-schema) - - b.) If you have Atlas Ambari service: - - [5. Backup Atlas collections](#ii/5.-backup-atlas-collections) - - [6. Delete Atlas collections](#ii/6.-delete-atlas-collections) - - c.) If you have Log Search Ambari service: - - [7. Delete Log Search collections](#ii/7.-delete-log-search-collections) - - [8. Delete Log Search Solr configs](#ii/8.-delete-log-search-solr-configs) -- [IV. Upgrade Ambari Infra Solr package](#iii.-upgrade-infra-solr-packages) -- [V. Re-create Solr Collections](#iv.-re-create-collections) -- [VI. Migrate Solr Collections](#v.-migrate-solr-collections) - - a.) If you have Ranger Ambari service with Solr audits: - - [1. Migrate Ranger Solr collection](#v/1.-migrate-ranger-collections) - - b.) If you have Atlas Ambari service: - - [2. Migrate Atlas Solr collections](#v/2.-migrate-atlas-collections) -- [VII. Restore Solr Collections](#vi.-restore-collections) - - a.) If you have Ranger Ambari service with Solr audits: - - [1. Restore old Ranger collection](#vi/1.-restore-old-ranger-collection) - - b.) If you have Atlas Ambari service: - - [4. Restore old Atlas collections](#vi/4.-restore-old-atlas-collections) -- [VIII. Restart Solr Instances](#vii.-restart-infra-solr-instances) -- [IX. Transport old data to new collections](#viii.-transport-old-data-to-new-collections) - - a.) If you have Ranger Ambari service with Solr audits: - - [1. Transport old data to Ranger collection](#viii/1.-transport-old-data-to-ranger-collection) - - b.) If you have Atlas Ambari service: - - [2. Transport old data to Atlas collections](#viii/2.-transport-old-data-to-atlas-collections) -- [Happy Path](#happy-path) -- [APPENDIX](#appendix) - -### I. Upgrade Ambari Infra Solr Client - -##### Prerequisites: -- Upgrade Ambari server -- Make sure Solrs are up and running -- Do NOT restart Infra Solr after Ambari server upgrade (if you do, see [this](#if-solr-restarted)) -- There will be a small time window between backup collections and deleting collections - Ranger plugins will operate during that time, that means you can loose data during that time period. If that means a big problem in order to avoid that, you can enable to auudit to HDFS for that time. - -First make sure `ambari-infra-solr-client` is the latest. (If its before 2.7.x) It will contain the migrationHelper.py script at `/usr/lib/ambari-infra-solr-client` location. -Also make sure you won't upgrade `ambari-infra-solr` until the migration has not done. (all of this should happen after `ambari-server` upgrade, also make sure to not restart `INFRA_SOLR` instances). - -For upgrading `ambari-infra-solr-client` ssh into a host (where there is an `ambari=infra-solr` located as well): - -```bash -# For RHEL/CentOS/Oracle Linux: - -yum clean all -yum upgrade ambari-infra-solr-client - -# For SLES: - -zypper clean -zypper up ambari-infra-solr-client - -# For Ubuntu/Debian: - -apt-get clean all -apt-get update -apt-get install ambari-infra-solr-client -``` - -You will need to repeat that step on every other host where `ambari-infra-solr-client` is installed or optionally you can skip ambari-infra-solr-client upgrade on all host, and you can do that after the end of the next step, see [here](#automatic-upgrade-ambari-infra-solr-client). - -### II. Gather required Ambari and Solr parameters - -At the start, it is required to create a proper configuration input for the migration helper script. That can be done with [/usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py](#migration-config-generator) script. Choose one of the Solr server host, and ssh there and run (with proper ambari-server configurations as flags): - -```bash -# use a sudoer user for running the script !! -CONFIG_INI_LOCATION=ambari_solr_migration.ini # output of the script with required parameters for migrationHelper.py -# note 1: use -s if ambari-server uses https -# note 2: use --shared-driver if the backup location is shared for different hosts -# note 3: use --hdfs-base-path if the index data is located on hdfs (or --ranger-hdfs-base-path if only ranger collection is located there), e.g.: /user/infra-solr -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py --ini-file $CONFIG_INI_LOCATION --host c7401.ambari.apache.org --port 8080 --cluster cl1 --username admin --password admin --backup-base-path=/my/path --java-home /usr/jdk64/jdk1.8.0_112 -``` - -Some important flags that can be added at this point; -- `--shared-drive` : Use this flag if the location of the backup is shared between hosts (it will generate the index location as , therefore migration commands can be parallel on different hosts) -- `--backup-base-path`: base path of the backup. e.g. if you provide `/my/path`, the backup locations will be `/my/path/ranger` and `/my/path/atlas`, if the base path won't be the same for these, you can provie Ranger or Atlas specific ones with `--ranger-backup-base-path` and `--atlas-backup-base-path` -- `--hdfs-base-path`: use this if index is stored hdfs (that does not mean that the backup is stored on hdfs, it is only the index location), that is applied for all index, most of the time that is only used for ranger, so if that is the case ose `--ranger-hdfs-base-path` instead of this option, the value is mostly `/user/infra-solr` which means the collection itself could be at `hdfs:///user/infra-solr/ranger_audits` location -(IMPORTANT NOTE: if ranger index is stored on hdfs, make sure to use the proper `-Dsolr.hdfs.security.kerberos.principal` in `infra-solr-env/content` config, by default it points to the Infra Solr principal, but if it was set to something else before, that needs to be changed to that) - -The generated config file output could be something like that: -```ini -[ambari_server] -host = c7401.ambari.apache.org -port = 8080 -cluster = cl1 -protocol = http -username = admin -password = admin - -[local] -java_home = /usr/jdk64/jdk1.8.0_112/ -hostname = c7402.ambari.apache.org -shared_drive = false - -[cluster] -kerberos_enabled = true - -[infra_solr] -protocol = http -hosts = c7402.ambari.apache.org,c7403.ambari.apache.org -port = 8886 -zk_connect_string = c7401.ambari.apache.org:2181 -znode = /infra-solr -user = infra-solr -keytab = /etc/security/keytabs/ambari-infra-solr.service.keytab -principal = infra-solr/c7402.ambari.apache.org -zk_principal_user = zookeeper - -[ranger_collection] -enabled = true -ranger_config_set_name = ranger_audits -ranger_collection_name = ranger_audits -ranger_collection_shards = 2 -ranger_collection_max_shards_per_node = 4 -backup_ranger_config_set_name = old_ranger_audits -backup_ranger_collection_name = old_ranger_audits -backup_path = /my/path/ranger - -[atlas_collections] -enabled = true -config_set = atlas_configs -fulltext_index_name = fulltext_index -fulltext_index_shards = 2 -fulltext_index_max_shards_per_node = 4 -edge_index_name = edge_index -edge_index_shards = 2 -edge_index_max_shards_per_node = 4 -vertex_index_name = vertex_index -vertex_index_shards = 2 -vertex_index_max_shards_per_node = 4 -backup_fulltext_index_name = old_fulltext_index -backup_edge_index_name = old_edge_index -backup_vertex_index_name = old_vertex_index -backup_path = /my/path/atlas - -[logsearch_collections] -enabled = true -hadoop_logs_collection_name = hadoop_logs -audit_logs_collection_name = audit_logs -history_collection_name = history -``` -(NOTE: if Infra Solr is external from Ranger perspective and the Solr instances are not even located in the cluster, migrationConfigGenerator.py needs to be executed on the Infra Solr cluuster, then it won't find the Ranger service, so you will need to fill the Ranger parameters in the configuration ini file manually.`) - -After the file has created successfully by the script, review the configuration (e.g.: if 1 of the Solr is not up yet, and you do not want to use its REST API for operations, you can remove its host from the hosts of infra_solr section or you can change backup locations for different collections etc.). Also if it's not required to backup e.g. Atlas collections (so you are ok to drop those), you can change the `enabled` config of the collections section to `false`. - -[![asciicast](https://asciinema.org/a/188260.png)](https://asciinema.org/a/188260?speed=2) - -##### (Optional) Upgrade All ambari-infra-solr packages - -If you did not upgrade ambari-infra-solr-client packages on all host, you can do that from the host where you are, to send a command to Ambari to do that on every host where there is an `INFRA_SOLR_CLIENT` component located: - -```bash -CONFIG_INI_LOCATION=ambari_solr_migration.ini -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients -``` - -### III. Backup collections (Ambari 2.6.x to Ambari 2.7.x) - -##### Prerequisites: -- Check the Solr instances are running and also make sure you have stable shards (at least one core is up and running) -- Have enough space on the disks to store Solr backup data - -The backup process contains a few steps: backup ranger configs on znode, backup collections, delete Log Search znodes, then upgrade `managed-schema` znode for Ranger. -These tasks can be done with 1 [migrationHelper.py](#solr-migration-helper-script) command: - -```bash -# use a sudoer user for running the script !! -# first (optionally) you can check that there are any ACTIVE relplicas for all the shards -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action check-shards -# then run backup-and-cleanup ... you can run these actions separately with these action: 'backup','delete-collections', 'cleanup-znodes' -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup-and-cleanup -``` - -If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Infra Solr package upgrade](#iii.-upgrade-infra-solr-packages). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections. - -[![asciicast](https://asciinema.org/a/187421.png)](https://asciinema.org/a/187421?speed=2) - -#### III/1. Backup Ranger collection - -The [migrationHelper.py](#solr-migration-helper-script) script can be used to backup only Ranger collection (use `-s` option to filter on services) - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup -s RANGER -``` - -Also you can do the backup manually on every Solr node, by using [backup API of Solr](https://lucene.apache.org/solr/guide/6_6/making-and-restoring-backups.html). (use against core names, not collection name, it works as expected only if you have 1 shard on every node) - -Example: -```bash - -su infra-solr -SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr -# collection parameters -BACKUP_PATH=... # backup location, e.g.: /tmp/ranger-backup - -# RUN THIS FOR EVERY CORE ON SPECIFIC HOSTS !!! -BACKUP_CORE=... # specific core on a host -BACKUP_CORE_NAME=... # core names for backup -> / -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) -mkdir -p $BACKUP_PATH - -curl --negotiate -k -u : "$SOLR_URL/$BACKUP_CORE/replication?command=BACKUP&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME" -``` - -(help: [get core names](#get-core-/-shard-names-with-hosts)) - -#### III/2. Backup Ranger configs on Solr ZNode - -Next you can copy `ranger_audits` configs to a different znode, in order to keep the old schema. - -```bash -export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location -export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181 -# note 1: --transfer-mode copyToLocal or --transfer-mode copyFromLocal can be used if you want to use the local filesystem -# note 2: use --jaas-file option only if the cluster is kerberized -infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits --copy-dest /infra-solr/configs/old_ranger_audits -``` - -#### III/3. Delete Ranger collection - -At this point you can delete the actual Ranger collection with this command: - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s RANGER -``` - -Or do it manually by the Solr API: - -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr -COLLECTION_NAME=ranger_audits - -# use kinit and --negotiate option for curl only if the cluster is kerberized -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -``` - -#### III/4. Upgrade Ranger Solr schema - -Before creating the new Ranger collection, it is required to upgrade `managed-schema` configs. - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action cleanup-znodes -s RANGER -``` - -It can be done manually by `infra-solr-cloud-cli` as well: - -```bash -sudo -u infra-solr -i - -# If kerberos enabled -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -## BACKUP OLD CONFIG -export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location -export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181 -# note: --transfer-mode copyToLocal or --transfer-mode copyFromLocal can be used if you want to use the local filesystem -infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits --copy-dest /infra-solr/configs/old_ranger_audits -## UPLOAD NEW SCHEMA -# Setup env for zkcli.sh -source /etc/ambari-infra-solr/conf/infra-solr-env.sh -# Run that command only if kerberos is enabled. -export SOLR_ZK_CREDS_AND_ACLS="${SOLR_AUTHENTICATION_OPTS}" - -# Upload the new schema -/usr/lib/ambari-infra-solr/server/scripts/cloud-scripts/zkcli.sh --zkhost "${ZK_HOST}" -cmd putfile /configs/ranger_audits/managed-schema /usr/lib/ambari-infra-solr-client/migrate/managed-schema -``` - -#### III/5. Backup Atlas collections - -Atlas has 3 collections: fulltext_index, edge_index, vertex_index. -You will need to do similar steps that you did for Ranger, only difference is you will need to filter ATLAS service. - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup -s ATLAS -``` - -Also you can do the backup manually on every Solr node, by using [backup API of Solr](https://lucene.apache.org/solr/guide/6_6/making-and-restoring-backups.html). (use against core names, not collection name, it works as expected only if you have 1 shard on every node) - -Example: -```bash - -su infra-solr -SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr -# collection parameters -BACKUP_PATH=... # backup location, e.g.: /tmp/fulltext_index_backup - -# RUN THIS FOR EVERY CORE ON SPECIFIC HOSTS !!! -BACKUP_CORE=... # specific core on a host -BACKUP_CORE_NAME=... # core names for backup -> / -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) -mkdir -p $BACKUP_PATH - -curl --negotiate -k -u : "$SOLR_URL/$BACKUP_CORE/replication?command=BACKUP&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME" -``` -(help: [get core names](#get-core-/-shard-names-with-hosts)) - -#### III/6. Delete Atlas collections - -Next step for Atlas is to delete all 3 old collections. It can be done by `delete-collections` action with ATLAS filter. - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s ATLAS -``` - -Or manually run DELETE operation with 3 Solr API call on all 3 Atlas collections: - -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr - -# use kinit and --negotiate option for curl only if the cluster is kerberized -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -COLLECTION_NAME=fulltext_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -COLLECTION_NAME=edge_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -COLLECTION_NAME=vertex_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -``` - -#### III/7. Delete Log Search collections - -For Log Search, it is a must to delete all the old collections. Can be done similar way as for Ranger or Atlas: - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -s LOGSEARCH -``` -Or manually run Solr API DELETE commands here as well: -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr - -# use kinit and --negotiate option for curl only if the cluster is kerberized -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -COLLECTION_NAME=hadoop_logs -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -COLLECTION_NAME=audit_logs -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -COLLECTION_NAME=history -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=DELETE&name=$COLLECTION_NAME" -``` - -#### III/8. Delete Log Search Solr configs - -Log Search configs are changed a lot between Ambari 2.6.x and Ambari 2.7.x, so it is required to delete those as well. (configs will be regenerated during Log Search startup) -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action cleanup-znodes -s LOGSEARCH -``` -You can delete the znodes by zookeeper-client as well: - -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -# ZOOKEEPER CONNECTION STRING from zookeeper servers -export ZK_CONN_STR=... # without znode,e.g.: myhost1:2181,myhost2:2181,myhost3:2181 - -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/hadoop_logs -zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/audit_logs -zookeeper-client -server $ZK_CONN_STR rmr /infra-solr/configs/history -``` - -### IV. Upgrade Infra Solr packages - -At this step, you will need to upgrade `ambari-infra-solr` packages. (also make sure ambari-logsearch* packages are upgraded as well) - -You can do that through ambari commands with the migrationHelper.py script (that means you wont need to ssh into every Infra Solr instance host): -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances -# same can be done for logfeeders and logsearch portals if required: -# just use '--action upgrade-logsearch-portal' or '--action upgrade-logfeeders' -``` -That runs a package remove and a package install. - -Or the usual way is to run these commands on every host where `ambari-infra-solr` packages are located: - -```bash -# For RHEL/CentOS/Oracle Linux: - -yum clean all -yum upgrade ambari-infra-solr - -# For SLES: - -zypper clean -zypper up ambari-infra-solr - -# For Ubuntu/Debian: - -apt-get clean all -apt-get update -apt-get install ambari-infra-solr -``` - -After the packages are updated, Solr instances can be restarted. It can be done from the UI or from command line as well: - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr -``` - -### V. Re-create collections - -Restart Ranger Admin / Atlas / Log Search Ambari service, as the collections were deleted before, during startup, new collections will be created (as a Solr 7 collection). This can be done through the UI or with the following commands: - -```bash -# if Ranger installed on the cluster -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger -# if Atlas installed on the cluster -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas -# If LogSearch installed on the cluster -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch -``` - -At this point you can stop, and do the migration / restore later (until you will have the backup), and go ahead with e.g. HDP upgrade. (migration part can take long - 1GB/min.) - -### VI. Migrate Solr Collections - -From this point, you can migrate your old index in the background. On every hosts, where there is a backup located, you can run luce index migration tool (packaged with ambari-infra-solr-client).. For lucene index migration, [migrationHelper.py](#solr-migration-helper-script) can be used, or `/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh` directly. That script uses [IndexMigrationTool](#https://lucene.apache.org/solr/guide/7_3/indexupgrader-tool.html) -The whole migration can be done with execuing 1 command; -```bash -# use a sudoer user for running the script !! -# you can use this command with nohup in the background, like: `nohup > nohup2.out&`, as migration can take so much time (~1GB/min) -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -``` -If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Restore collections](#vi.-restore-collections). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections. - -[![asciicast](https://asciinema.org/a/187125.png)](https://asciinema.org/a/187125?speed=2) - -#### VI/1. Migrate Ranger collections - -Migration for `ranger_audits` collection (cores): - -```bash -# by default, you will mirate to Lucene 6.6.2, if you want to migrate again to Solr 7 (not requred), you can use --version 7.4.0 flag -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -s RANGER -``` - -Or you can run commands manually on nodes where your backups are located: -```bash - -export JAVA_HOME=/usr/jdk64/1.8.0_112 - -# if /tmp/ranger-backup is your backup location -infra-lucene-index-tool upgrade-index -d /tmp/ranger-backup -f -b -g - -# with 'infra-lucene-index-tool help' command you can checkout the command line options -``` - -By default, the tool will migrate from lucene version 5 to lucene version 6.6.2. (that's ok for Solr 7) If you want a lucene 7 index, you will need to re-run the migration tool command with `-v 7.4.0` option. - -#### VI/2. Migrate Atlas collections - -As Atlas has 3 collections, you will need similar steps that is required for Ranger, just for all 3 collections. -(fulltext_index, edge_index, vertex_index) - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -s ATLAS -``` - -Or you can run commands manually on nodes where your backups are located: -```bash - -export JAVA_HOME=/usr/jdk64/1.8.0_112 - -# if /tmp/fulltext_index_backup is your backup location -infra-lucene-index-tool upgrade-index -d /tmp/fulltext_index_backup -f -b -g - -# with 'infra-lucene-index-tool help' command you can checkout the command line options -``` - -By default, the tool will migrate from lucene version 5 to lucene version 6.6.2. (that's ok for Solr 7) If you want a lucene 7 index, you will need to re-run the migration tool command with `-v 7.4.0` option. - -### VII. Restore Collections - -For restoring the old collections, first you will need to create them. As those collections could be not listed in the security.json of Infra Solr, you can get 403 errors if you will try to access those collections later, for that time until you are doing the restoring + transport solr data to another collections, you can [trun off](#turn-off-infra-solr-authorization) the Solr authorization plugin. - -The collection creation and restore part can be done with 1 command: - -```bash -# use a sudoer user for running the script !! -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore --keep-backup -``` - -If the script finished successfully and everything looks green on Ambari UI as well, you can go ahead with [Restart Solr Instances](#vii.-restart-infra-solr-instances). Otherwise (or if you want to go step by step instead of the command above) you have to option to run tasks step by step (or manually as well). Those tasks are found in the next sections. - -[![asciicast](https://asciinema.org/a/187423.png)](https://asciinema.org/a/187423?speed=2) - -#### VII/1. Restore Old Ranger collection - -After lucene data migration is finished, you can restore your replicas on every hosts where you have the backups. But we need to restore the old data to a new collection, so first you will need to create that: (on a host where you have an installed Infra Solr component). For Ranger, use old_ranger_audits config set that you backup up during Solr schema config upgrade step. (set this as CONFIG_NAME), to make that collection to work with Solr 7, you need to copy your solrconfig.xml as well. -That can be done with executing the following command: -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore -s RANGER -``` - -Or you can manually create a collection for restoring the backup (`old_ranger_audits`) - -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr -NUM_SHARDS=... # use that number that was used for the old collection - important to use at least that many that you have originally before backup -NUM_REP=1 # can be more, but 1 is recommended for that temp collection -MAX_SHARDS_PER_NODE=... # use that number that was used for the old collection -CONFIG_NAME=old_ranger_audits -OLD_DATA_COLLECTION=old_ranger_audits - -# kinit only if kerberos is enabled for tha cluster -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -export JAVA_HOME=/usr/jdk64/1.8.0_112 # or other jdk8 location -export ZK_CONN_STR=... # without znode, e.g.: myhost1:2181,myhost2:2181,myhost3:2181 - -# note 1: jaas-file option required only if kerberos is enabled for the cluster -# note 2: copy new solrconfig.xml as the old one won't be compatible with solr 7 -infra-solr-cloud-cli --transfer-znode -z $ZK_CONN_STR --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf --copy-src /infra-solr/configs/ranger_audits/solrconfig.xml --copy-dest /infra-solr/configs/old_ranger_audits/solrconfig.xml - -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME" -``` - -Then restore the cores with Solr REST API: ([get core names](#get-core-/-shard-names-with-hosts)) - -```bash -su infra-solr -SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr -BACKUP_PATH=... # backup location, e.g.: /tmp/ranger-backup - -OLD_BACKUP_COLLECTION_CORE=... # choose a core to restore -BACKUP_CORE_NAME=... # choose a core from backup cores - you can find these names as : /snapshot.$BACKUP_CORE_NAME - -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) -curl --negotiate -k -u : "$SOLR_URL/$OLD_BACKUP_COLLECTION_CORE/replication?command=RESTORE&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME" -``` - -Or use simple `cp` or `hdfs dfs -put` commands to copy the migrated cores to the right places. - -#### VII/2. Restore Old Atlas collections - -For Atlas, use `old_` prefix for all 3 collections that you need to create and use `atlas_configs` config set, then use those for restore the backups; - -```bash -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore -s ATLAS -``` - -Or you can do the create collection and restore collections (cores) step by step: - -Create a collection for restoring the backup (`old_fulltext_index`, `old_vertex_index`, `old_edge_index`) -```bash -su infra-solr # infra-solr user - if you have a custom one, use that -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr -NUM_SHARDS=... # use that number that was used for the old collection - important to use at least that many that you have originally before backup -NUM_REP=1 # use 1! -MAX_SHARDS_PER_NODE=... # use that number that was used for the old collection -CONFIG_NAME=atlas_configs - -# kinit only if kerberos is enabled for tha cluster -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) - -OLD_DATA_COLLECTION=old_fulltext_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME" -OLD_DATA_COLLECTION=old_edge_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME" -OLD_DATA_COLLECTION=old_vertex_index -curl --negotiate -k -u : "$SOLR_URL/admin/collections?action=CREATE&name=$OLD_DATA_COLLECTION&numShards=$NUM_SHARDS&replicationFactor=$NUM_REP&maxShardsPerNode=$MAX_SHARDS_PER_NODE&collection.configName=$CONFIG_NAME" -``` - -Also you can manually run restore commands: ([get core names](#get-core-/-shard-names-with-hosts)) - -```bash -su infra-solr -SOLR_URL=... # actual solr host url, example: http://c6401.ambari.apache.org:8886/solr -BACKUP_PATH=... # backup location, e.g.: /tmp/fulltext_index-backup - -OLD_BACKUP_COLLECTION_CORE=... # choose a core to restore -BACKUP_CORE_NAME=... # choose a core from backup cores - you can find these names as : /snapshot.$BACKUP_CORE_NAME - -kinit -kt /etc/security/keytabs/ambari-infra-solr.service.keytab $(whoami)/$(hostname -f) -curl --negotiate -k -u : "$SOLR_URL/$OLD_BACKUP_COLLECTION_CORE/replication?command=RESTORE&location=$BACKUP_PATH&name=$BACKUP_CORE_NAME" -``` - -Or use simple `cp` or `hdfs dfs -put` commands to copy the migrated cores to the right places. - -### VIII. Restart Infra Solr instances - -Next step is to restart Solr instances. That can be done on the Ambari UI, or optionally you can use the migrationHelper script for that as well (rolling restart) -```bash -# --batch-interval -> interval between restart solr tasks -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action rolling-restart-solr --batch-interval 60 -``` - -### IX. Transport old data to new collections - -Last step (that can be done any time, as you already have your data in Solr) is to transport all data from the backup collections to the live ones. -It can be done by running `transport-old-data` action by migration helper script: - -```bash -# working directory is under '/tmp/solrDataManager' folder -/usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action transport-old-data -``` - -Or in the next few steps, you can see what needs to be done manually to transport old Ranger and Atlas Solr data to active collections. - -#### IX/1. Transport old data to Ranger collection - -In the end, you end up with 2 collections (ranger_audits and old_ranger_audits), in order to drop the restored one, you will need to transfer your old data to the new collection. To achieve this, you can use [solrDataManager.py](#solr-data-manager-script), which is located next to the `migrationHelper.py` script - -```bash -# Init values: -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr - -END_DATE=... # example: 2018-02-18T12:00:00.000Z , date until you export data - -OLD_COLLECTION=old_ranger_audits -ACTIVE_COLLECTION=ranger_audits -EXCLUDE_FIELDS=_version_ # comma separated exclude fields, at least _version_ is required - -# provide these with -k and -n options only if kerberos is enabled for Infra Solr !!! -INFRA_SOLR_KEYTAB=... # example: /etc/security/keytabs/ambari-infra-solr.service.keytab -INFRA_SOLR_PRINCIPAL=... # example: infra-solr/$(hostname -f)@EXAMPLE.COM - -DATE_FIELD=evtTime -# infra-solr-data-manager is a symlink points to /usr/lib/ambari-infra-solr-client/solrDataManager.py -infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 -f $DATE_FIELD -e $END_DATE --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS - -# Or if you want to run the command in the background (with log and pid file): -nohup infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 -f $DATE_FIELD -e $END_DATE --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS > /tmp/solr-data-mgr.log 2>&1 & echo $! > /tmp/solr-data-mgr.pid -``` -[![asciicast](https://asciinema.org/a/188396.png)](https://asciinema.org/a/188396?speed=2) - -#### IX/2. Transport old data to Atlas collections - -In the end, you end up with 6 Atlas collections (vertex_index, old_vertex_index, edge_index, old_edge_index, fulltext_index, old_fulltext_index ... old_* collections will only exist if there was a restore against a non-empty collections, that means you won't need to transfer data if there is no old_* pair for a specific collection), in order to drop the restored one, you will need to transfer your old data to the new collection. To achieve this, you can use [solrDataManager.py](#solr-data-manager-script), which is located next to the `migrationHelper.py` script. Here, the script usage will be a bit different as we cannot provide a proper date/timestamp field, so during the data transfer, the records will be sorted only by id. (to do this it will be needed to use `--skip-date-usage` flag) - -Example: (with vertex_index, to the same with edge_index and fulltext_index, most likely at least edge_index will be empty) -```bash -# Init values: -SOLR_URL=... # example: http://c6401.ambari.apache.org:8886/solr - -OLD_COLLECTION=old_vertex_index -ACTIVE_COLLECTION=vertex_index -EXCLUDE_FIELDS=_version_ # comma separated exclude fields, at least _version_ is required - -# provide these with -k and -n options only if kerberos is enabled for Infra Solr !!! -INFRA_SOLR_KEYTAB=... # example: /etc/security/keytabs/ambari-infra-solr.service.keytab -INFRA_SOLR_PRINCIPAL=... # example: infra-solr/$(hostname -f)@EXAMPLE.COM - -# infra-solr-data-manager is a symlink points to /usr/lib/ambari-infra-solr-client/solrDataManager.py -infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 --skip-date-usage --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS - -# Or if you want to run the command in the background (with log and pid file): -nohup infra-solr-data-manager -m archive -v -c $OLD_COLLECTION -s $SOLR_URL -z none -r 10000 -w 100000 --skip-date-usage --solr-output-collection $ACTIVE_COLLECTION -k $INFRA_SOLR_KEYTAB -n $INFRA_SOLR_PRINCIPAL --exclude-fields $EXCLUDE_FIELDS > /tmp/solr-data-mgr.log 2>&1 & echo $! > /tmp/solr-data-mgr.pid -``` - -[![asciicast](https://asciinema.org/a/188402.png)](https://asciinema.org/a/188402?speed=2) - -### Happy path - -Happy path steps are mainly for automation. - -##### 1. Generate migration config - -Generate ini config first for the migration, after running the following script, review the ini file content. - -```bash -CONFIG_INI_LOCATION=ambari_migration.ini -BACKUP_BASE_PATH=/tmp -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationConfigGenerator.py --ini-file $CONFIG_INI_LOCATION --host c7401.ambari.apache.org -port 8080 --cluster cl1 --username admin --password admin --backup-base-path=$BACKUP_BASE_PATH --java-home /usr/jdk64/jdk1.8.0_112 -``` -##### 2.a) Do backup-migrate-restore - -For doing a backup + cleanup, then later migrate + restore, use the following commands: - -```bash -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode backup -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode delete --skip-solr-client-upgrade -# go ahead with HDP upgrade or anything else, then if you have resource / time (recommended to use nohup as migrate part can take a lot of time): -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode migrate-restore # you can use --keep-backup option, it will keep the backup data, it's more safe but you need enough pace for that -``` - -Or you can execute these commands together (if you won't go with HDP upgrade after backup): -```bash -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode all -``` - -Which is equivalent will execute the following migrationHelper.py commands: - -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action backup-and-cleanup -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logsearch-portal -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logfeeders -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action migrate -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restore -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action rolling-restart-solr -``` - -##### 2.b) Do delete only if backup is not required - -For only cleanup collections, execute this script: -```bash -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode delete -``` - -Which is equivalent will execute the following migrationHelper.py commands: -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-clients -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action delete-collections -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-solr-instances -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logsearch-portal -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action upgrade-logfeeders -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-solr -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-logsearch -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-ranger -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action restart-atlas -``` - -##### 3. Transport Solr data from old collections to active collections (optional) - -Run this command to transport old data to active collections: -```bash -# recommended to use with nohup as that command can take long time as well -# working directory is under '/tmp/solrDataManager' folder -/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --ini-file $CONFIG_INI_LOCATION --mode transport -``` - -Or see [transport old data to new collections](#viii.-transport-old-data-to-new-collections) step - -### APPENDIX - -#### Additional filters for migrationHelper.py script - -- `--service-filter` or `-s`: you can filter on services for migration commands (like run against only ATLAS or RANGER), possible values: ATLAS,RANGER,LOGSEARCH -- `--skip-cores`: skip specific cores from migration (can be useful if just one of it failed during restore etc.) -- `--collection` or `-c`: run migration commands on just a specific collection (like: `ranger_adits`, or `old_ranger_audits` for restore) -- `--core-filter`: can be used only for index migration, that will work as a regex filter on the snapshot core folder e.g.: "mycore" means it will be applied only on "/index/location/mycore_folder" but not on "/index/location/myother_folder" - -#### What to do if Solr instances restarted right after Ambari upgrade but before upgrade Solr instance packages? - -If you restarted Solr before backup or upgrade Solr server packages, you can fix the Solr config with the following command: -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action fix-solr5-kerberos-config -``` - -That is basically add `SOLR_KERB_NAME_RULES` back to `infra-solr-env/content` and disable authorization for Solr. (upload a /security.json to /infra-solr znode without the authorization config, then turn manually managed /security.json on in order to not override /security.json again on Solr restart) After the command finished successfully, you will need to restart Solr instances. - -But if you added `SOLR_KERB_NAME_RULES` config to the `infra-solr-env/content`, you will require to delete that after you upgraded Solr package (and before restarting them). You can do that with the `fix-solr7-kerberos-config` action: -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action fix-solr7-kerberos-config -``` - -#### Get core / shard names with hosts - -To get which hosts are related for your collections, you can check the Solr UI (using SPNEGO), or checkout get state.json details using a zookeeper-client or Solr zookeeper api to get state.json details of the collection (`/solr/admin/zookeeper?detail=true&path=/collections//state.json`) - -#### Turn off Infra Solr Authorization - -You can turn off Solr authorization plugin with the `disable-solr-authorization` action (can be executed after config generation [step](#0-gather-params)): -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action disable-solr-authorization -``` - -You can re-enable it with the following command: (or set `infra-solr-security-json/infra_solr_security_manually_managed` configuration to `false`, then restart Solr) - -```bash -/usr/bin/python /usr/lib/ambari-infra-solr-client/migrationHelper.py --ini-file $CONFIG_INI_LOCATION --action enable-solr-authorization -``` - -#### Solr Migration Helper Script - -`/usr/lib/ambari-infra-solr-client/migrationHelper.py --help` - -```text -Usage: migrationHelper.py [options] - -Options: - -h, --help show this help message and exit - -a ACTION, --action=ACTION - delete-collections | backup | cleanup-znodes | backup- - and-cleanup | migrate | restore |' ' - rolling-restart-solr | rolling-restart-atlas | - rolling-restart-ranger | check-shards | check-backup- - shards | enable-solr-authorization | disable-solr- - authorization |' ' fix-solr5-kerberos- - config | fix-solr7-kerberos-config | upgrade-solr- - clients | upgrade-solr-instances | upgrade-logsearch- - portal | upgrade-logfeeders | stop-logsearch |' - ' restart-solr |restart-logsearch | restart-ranger | - restart-atlas | transport-old-data - -i INI_FILE, --ini-file=INI_FILE - Config ini file to parse (required) - -f, --force force index upgrade even if it's the right version - -v, --verbose use for verbose logging - -s SERVICE_FILTER, --service-filter=SERVICE_FILTER - run commands only selected services (comma separated: - LOGSEARCH,ATLAS,RANGER) - -c COLLECTION, --collection=COLLECTION - selected collection to run an operation - --async async Ambari operations (backup | restore | migrate) - --index-location=INDEX_LOCATION - location of the index backups. add ranger/atlas prefix - after the path. required only if no backup path in the - ini file - --atlas-index-location=ATLAS_INDEX_LOCATION - location of the index backups (for atlas). required - only if no backup path in the ini file - --ranger-index-location=RANGER_INDEX_LOCATION - location of the index backups (for ranger). required - only if no backup path in the ini file - --version=INDEX_VERSION - lucene index version for migration (6.6.2 or 7.4.0) - --solr-async-request-tries=SOLR_ASYNC_REQUEST_TRIES - number of max tries for async Solr requests (e.g.: - delete operation) - --request-tries=REQUEST_TRIES - number of tries for BACKUP/RESTORE status api calls in - the request - --request-time-interval=REQUEST_TIME_INTERVAL - time interval between BACKUP/RESTORE status api calls - in the request - --request-async skip BACKUP/RESTORE status api calls from the command - --transport-read-block-size=TRANSPORT_READ_BLOCK_SIZE - block size to use for reading from solr during - transport - --transport-write-block-size=TRANSPORT_WRITE_BLOCK_SIZE - number of records in the output files during transport - --include-solr-hosts=INCLUDE_SOLR_HOSTS - comma separated list of included solr hosts - --exclude-solr-hosts=EXCLUDE_SOLR_HOSTS - comma separated list of excluded solr hosts - --disable-solr-host-check - Disable to check solr hosts are good for the - collection backups - --core-filter=CORE_FILTER - core filter for replica folders - --skip-cores=SKIP_CORES - specific cores to skip (comma separated) - --hdfs-base-path=HDFS_BASE_PATH - hdfs base path where the collections are located - (e.g.: /user/infrasolr). Use if both atlas and ranger - collections are on hdfs. - --ranger-hdfs-base-path=RANGER_HDFS_BASE_PATH - hdfs base path where the ranger collection is located - (e.g.: /user/infra-solr). Use if only ranger - collection is on hdfs. - --atlas-hdfs-base-path=ATLAS_HDFS_BASE_PATH - hdfs base path where the atlas collections are located - (e.g.: /user/infra-solr). Use if only atlas - collections are on hdfs. - --keep-backup If it is turned on, Snapshot Solr data will not be - deleted from the filesystem during restore. - --batch-interval=BATCH_INTERVAL - batch time interval (seconds) between requests (for - restarting INFRA SOLR, default: 60) - --batch-fault-tolerance=BATCH_FAULT_TOLERANCE - fault tolerance of tasks for batch request (for - restarting INFRA SOLR, default: 0) - --shared-drive Use if the backup location is shared between hosts. - (override config from config ini file) - --skip-json-dump-files=SKIP_JSON_DUMP_FILES - comma separated list of files that won't be download - during collection dump (could be useful if it is - required to change something in manually in the - already downloaded file) - --skip-index-size Skip index size check for check-shards or check- - backup-shards - --skip-warnings Pass check-shards or check-backup-shards even if there - are warnings -``` - -#### Solr Migration Config Generator Script - -```text -Usage: migrationConfigGenerator.py [options] - -Options: - -h, --help show this help message and exit - -H HOST, --host=HOST hostname for ambari server - -P PORT, --port=PORT port number for ambari server - -c CLUSTER, --cluster=CLUSTER - name cluster - -f, --force-ranger force to get Ranger details - can be useful if Ranger - is configured to use external Solr (but points to - internal Sols) - -s, --ssl use if ambari server using https - -v, --verbose use for verbose logging - -u USERNAME, --username=USERNAME - username for accessing ambari server - -p PASSWORD, --password=PASSWORD - password for accessing ambari server - -j JAVA_HOME, --java-home=JAVA_HOME - local java_home location - -i INI_FILE, --ini-file=INI_FILE - Filename of the generated ini file for migration - (default: ambari_solr_migration.ini) - --backup-base-path=BACKUP_BASE_PATH - base path for backup, e.g.: /tmp/backup, then - /tmp/backup/ranger/ and /tmp/backup/atlas/ folders - will be generated - --backup-ranger-base-path=BACKUP_RANGER_BASE_PATH - base path for ranger backup (override backup-base-path - for ranger), e.g.: /tmp/backup/ranger - --backup-atlas-base-path=BACKUP_ATLAS_BASE_PATH - base path for atlas backup (override backup-base-path - for atlas), e.g.: /tmp/backup/atlas - --hdfs-base-path=HDFS_BASE_PATH - hdfs base path where the collections are located - (e.g.: /user/infrasolr). Use if both atlas and ranger - collections are on hdfs. - --ranger-hdfs-base-path=RANGER_HDFS_BASE_PATH - hdfs base path where the ranger collection is located - (e.g.: /user/infra-solr). Use if only ranger - collection is on hdfs. - --atlas-hdfs-base-path=ATLAS_HDFS_BASE_PATH - hdfs base path where the atlas collections are located - (e.g.: /user/infra-solr). Use if only atlas - collections are on hdfs. - --skip-atlas skip to gather Atlas service details - --skip-ranger skip to gather Ranger service details - --retry=RETRY number of retries during accessing random solr urls - --delay=DELAY delay (seconds) between retries during accessing - random solr urls - --shared-drive Use if the backup location is shared between hosts. -``` - -#### Solr Data Manager Script - -`/usr/lib/ambari-infra-solr-client/solrDataManager.py --help` - -```text -Usage: solrDataManager.py [options] - -Options: - --version show program's version number and exit - -h, --help show this help message and exit - -m MODE, --mode=MODE archive | delete | save - -s SOLR_URL, --solr-url=SOLR_URL - the url of the solr server including the port and - protocol - -c COLLECTION, --collection=COLLECTION - the name of the solr collection - -f FILTER_FIELD, --filter-field=FILTER_FIELD - the name of the field to filter on - -r READ_BLOCK_SIZE, --read-block-size=READ_BLOCK_SIZE - block size to use for reading from solr - -w WRITE_BLOCK_SIZE, --write-block-size=WRITE_BLOCK_SIZE - number of records in the output files - -i ID_FIELD, --id-field=ID_FIELD - the name of the id field - -o DATE_FORMAT, --date-format=DATE_FORMAT - the date format to use for --days - -q ADDITIONAL_FILTER, --additional-filter=ADDITIONAL_FILTER - additional solr filter - -j NAME, --name=NAME name included in result files - -g, --ignore-unfinished-uploading - --json-file create a json file instead of line delimited json - -z COMPRESSION, --compression=COMPRESSION - none | tar.gz | tar.bz2 | zip | gz - -k SOLR_KEYTAB, --solr-keytab=SOLR_KEYTAB - the keytab for a kerberized solr - -n SOLR_PRINCIPAL, --solr-principal=SOLR_PRINCIPAL - the principal for a kerberized solr - -a HDFS_KEYTAB, --hdfs-keytab=HDFS_KEYTAB - the keytab for a kerberized hdfs - -l HDFS_PRINCIPAL, --hdfs-principal=HDFS_PRINCIPAL - the principal for a kerberized hdfs - -u HDFS_USER, --hdfs-user=HDFS_USER - the user for accessing hdfs - -p HDFS_PATH, --hdfs-path=HDFS_PATH - the hdfs path to upload to - -t KEY_FILE_PATH, --key-file-path=KEY_FILE_PATH - the file that contains S3 , - -b BUCKET, --bucket=BUCKET - the bucket name for S3 upload - -y KEY_PREFIX, --key-prefix=KEY_PREFIX - the key prefix for S3 upload - -x LOCAL_PATH, --local-path=LOCAL_PATH - the local path to save the files to - -v, --verbose - --solr-output-collection=SOLR_OUTPUT_COLLECTION - target output solr collection for archive - --solr-output-url=SOLR_OUTPUT_URL - the url of the output solr server including the port - and protocol - --exclude-fields=EXCLUDE_FIELDS - Comma separated list of excluded fields from json - response - --skip-date-usage datestamp field won't be used for queries (sort based - on id field) - - specifying the end of the range: - -e END, --end=END end of the range - -d DAYS, --days=DAYS - number of days to keep -``` - -#### Ambari Solr Migration script - -`/usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --help` - -```text -Usage: /usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --mode --ini-file [additional options] - - -m, --mode available migration modes: delete-only | backup-only | migrate-restore | all | transport - -i, --ini-file ini-file location (used by migrationHelper.py) - -s, --migration-script-location migrateHelper.py location (default: /usr/lib/ambari-infra-solr-client/migrationHelper.py) - -w, --wait-between-steps wait between different migration steps in seconds (default: 15) - -p, --python-path python location, default: /usr/bin/python - -b, --batch-interval seconds between batch tasks for rolling restart solr at last step (default: 60) - -k, --keep-backup keep backup data (more secure, useful if you have enough space for that) - --skip-solr-client-upgrade skip ambari-infra-solr-client package upgrades - --skip-solr-server-upgrade skip ambari-infra-solr package upgrades - --skip-logsearch-upgrade skip ambari-logsearch-portal and ambari-logsearch-logfeeder package upgrades - --skip-warnings skip warnings at check-shards step - -h, --help print help -``` diff --git a/ambari-infra/ambari-infra-solr-client/build.xml b/ambari-infra/ambari-infra-solr-client/build.xml deleted file mode 100644 index f7b2633978b..00000000000 --- a/ambari-infra/ambari-infra-solr-client/build.xml +++ /dev/null @@ -1,79 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-infra/ambari-infra-solr-client/pom.xml b/ambari-infra/ambari-infra-solr-client/pom.xml deleted file mode 100644 index a4abc9aaf73..00000000000 --- a/ambari-infra/ambari-infra-solr-client/pom.xml +++ /dev/null @@ -1,178 +0,0 @@ - - - - - ambari-infra - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - http://maven.apache.org - Ambari Infra Solr Client - - ambari-infra-solr-client - - - 6.6.2 - lucene-core-${lucene6.version}.jar - http://central.maven.org/maven2/org/apache/lucene/lucene-core/${lucene6.version}/${lucene6-core-jar.name} - lucene-backward-codecs-${lucene6.version}.jar - http://central.maven.org/maven2/org/apache/lucene/lucene-backward-codecs/${lucene6.version}/${lucene6-backward-codecs-jar.name} - - - - - org.apache.solr - solr-solrj - ${solr.version} - - - org.apache.lucene - lucene-core - ${solr.version} - - - org.apache.lucene - lucene-backward-codecs - ${solr.version} - - - org.apache.zookeeper - zookeeper - - - commons-cli - commons-cli - - - org.codehaus.jackson - jackson-mapper-asl - 1.9.13 - - - commons-codec - commons-codec - 1.8 - - - commons-lang - commons-lang - - - org.slf4j - slf4j-api - 1.7.20 - - - org.slf4j - slf4j-log4j12 - 1.7.20 - - - log4j - log4j - 1.2.17 - - - com.sun.jdmk - jmxtools - - - com.sun.jmx - jmxri - - - javax.mail - mail - - - javax.jms - jmx - - - javax.jms - jms - - - - - com.amazonaws - aws-java-sdk-s3 - 1.11.5 - - - junit - junit - test - - - org.easymock - easymock - 3.4 - test - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - copy-dependencies - package - - copy-dependencies - - - true - ${basedir}/target/libs - false - false - true - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - package - - - - - - - - - run - - - - - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java deleted file mode 100644 index b0c778154f4..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudCLI.java +++ /dev/null @@ -1,680 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.solr; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class AmbariSolrCloudCLI { - - private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudCLI.class); - - private static final int ZK_CLIENT_TIMEOUT = 60000; // 1 minute - private static final int ZK_CLIENT_CONNECT_TIMEOUT = 60000; // 1 minute - private static final String CREATE_COLLECTION_COMMAND = "create-collection"; - private static final String UPLOAD_CONFIG_COMMAND = "upload-config"; - private static final String DOWNLOAD_CONFIG_COMMAND = "download-config"; - private static final String CONFIG_CHECK_COMMAND = "check-config"; - private static final String CREATE_SHARD_COMMAND = "create-shard"; - private static final String CREATE_ZNODE = "create-znode"; - private static final String SET_CLUSTER_PROP = "cluster-prop"; - private static final String SETUP_KERBEROS_PLUGIN = "setup-kerberos-plugin"; - private static final String CHECK_ZNODE = "check-znode"; - private static final String SECURE_ZNODE_COMMAND = "secure-znode"; - private static final String UNSECURE_ZNODE_COMMAND = "unsecure-znode"; - private static final String SECURE_SOLR_ZNODE_COMMAND = "secure-solr-znode"; - private static final String SECURITY_JSON_LOCATION = "security-json-location"; - private static final String REMOVE_ADMIN_HANDLERS = "remove-admin-handlers"; - private static final String TRANSFER_ZNODE_COMMAND = "transfer-znode"; - private static final String DELETE_ZNODE_COMMAND = "delete-znode"; - private static final String DUMP_COLLECTIONS_DATA_COMMAND = "dump-collections"; - private static final String CMD_LINE_SYNTAX = - "\n./solrCloudCli.sh --create-collection -z host1:2181,host2:2181/ambari-solr -c collection -cs conf_set" - + "\n./solrCloudCli.sh --upload-config -z host1:2181,host2:2181/ambari-solr -d /tmp/myconfig_dir -cs config_set" - + "\n./solrCloudCli.sh --download-config -z host1:2181,host2:2181/ambari-solr -cs config_set -d /tmp/myonfig_dir" - + "\n./solrCloudCli.sh --check-config -z host1:2181,host2:2181/ambari-solr -cs config_set" - + "\n./solrCloudCli.sh --create-shard -z host1:2181,host2:2181/ambari-solr -c collection -sn myshard" - + "\n./solrCloudCli.sh --remove-admin-handlers -z host1:2181,host2:2181/ambari-solr -c collection" - + "\n./solrCloudCli.sh --dump-collections -z host1:2181,host2:2181/ambari-solr -o collection-data.json" - + "\n./solrCloudCli.sh --create-znode -z host1:2181,host2:2181 -zn /ambari-solr" - + "\n./solrCloudCli.sh --check-znode -z host1:2181,host2:2181 -zn /ambari-solr" - + "\n./solrCloudCli.sh --delete-znode -z host1:2181,host2:2181 -zn /ambari-solr" - + "\n./solrCloudCli.sh --transfer-znode -z host1:2181,host2:2181 -cps /ambari-solr -cpd /ambari-solr-backup" - + "\n./solrCloudCli.sh --cluster-prop -z host1:2181,host2:2181/ambari-solr -cpn urlScheme -cpn http" - + "\n./solrCloudCli.sh --secure-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file" - + "\n./solrCloudCli.sh --unsecure-znode -z host1:2181,host2:2181 -zn /ambari-solr --jaas-file /etc/myconf/jaas_file" - + "\n./solrCloudCli.sh --secure-solr-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file" - + "\n./solrCloudCli.sh --setup-kerberos-plugin -z host1:2181,host2:2181 -zn /ambari-solr --security-json-location /etc/infra-solr/conf/security.json\n "; - - public static void main(String[] args) { - Options options = new Options(); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.setDescPadding(10); - helpFormatter.setWidth(200); - - final Option helpOption = Option.builder("h") - .longOpt("help") - .desc("Print commands") - .build(); - - final Option createCollectionOption = Option.builder("cc") - .longOpt(CREATE_COLLECTION_COMMAND) - .desc("Create collection in Solr (command)") - .build(); - - final Option uploadConfigurationOption = Option.builder("uc") - .longOpt(UPLOAD_CONFIG_COMMAND) - .desc("Upload configuration set to Zookeeper (command)") - .build(); - - final Option downloadConfigOption = Option.builder("dc") - .longOpt(DOWNLOAD_CONFIG_COMMAND) - .desc("Download configuration set from Zookeeper (command)") - .build(); - - final Option checkConfigOption = Option.builder("chc") - .longOpt(CONFIG_CHECK_COMMAND) - .desc("Check configuration exists in Zookeeper (command)") - .build(); - - final Option checkZnodeOption = Option.builder("chz") - .longOpt(CHECK_ZNODE) - .desc("Check znode exists in Zookeeper (command)") - .build(); - - final Option createShardOption = Option.builder("csh") - .longOpt(CREATE_SHARD_COMMAND) - .desc("Create shard in Solr (command)") - .build(); - - final Option setClusterPropOption = Option.builder("cp") - .longOpt(SET_CLUSTER_PROP) - .desc("Set cluster property (command)") - .build(); - - final Option createZnodeOption = Option.builder("cz") - .longOpt(CREATE_ZNODE) - .desc("Create Znode (command)") - .build(); - - final Option setupKerberosPluginOption = Option.builder("skp") - .longOpt(SETUP_KERBEROS_PLUGIN) - .desc("Setup kerberos plugin in security.json (command)") - .build(); - - final Option secureSolrZnodeOption = Option.builder("ssz") - .longOpt(SECURE_SOLR_ZNODE_COMMAND) - .desc("Set acls for solr znode (command)") - .build(); - - final Option secureZnodeOption = Option.builder("sz") - .longOpt(SECURE_ZNODE_COMMAND) - .desc("Set acls for znode (command)") - .build(); - - final Option unsecureZnodeOption = Option.builder("uz") - .longOpt(UNSECURE_ZNODE_COMMAND) - .desc("Disable security for znode (command)") - .build(); - - final Option removeAdminHandlerOption = Option.builder("rah") - .longOpt(REMOVE_ADMIN_HANDLERS) - .desc("Remove AdminHandlers request handler from solrconfig.xml (command)") - .build(); - - final Option transferZnodeOption = Option.builder("tz") - .longOpt(TRANSFER_ZNODE_COMMAND) - .desc("Transfer znode (copy from/to local or to another znode)") - .build(); - - final Option deleteZnodeOption = Option.builder("dz") - .longOpt(DELETE_ZNODE_COMMAND) - .desc("Delete znode") - .build(); - - final Option dumpCollectionsOption = Option.builder("dcd") - .longOpt(DUMP_COLLECTIONS_DATA_COMMAND) - .desc("Dump collections data") - .build(); - - final Option shardNameOption = Option.builder("sn") - .longOpt("shard-name") - .desc("Name of the shard for create-shard command") - .numberOfArgs(1) - .argName("my_new_shard") - .build(); - - final Option implicitRoutingOption = Option.builder("ir") - .longOpt("implicit-routing") - .desc("Use implicit routing when creating a collection") - .build(); - - final Option zkConnectStringOption = Option.builder("z") - .longOpt("zookeeper-connect-string") - .desc("Zookeeper quorum [and Znode (optional)]") - .numberOfArgs(1) - .argName("host:port,host:port[/ambari-solr]") - .build(); - - final Option znodeOption = Option.builder("zn") - .longOpt("znode") - .desc("Zookeeper ZNode") - .numberOfArgs(1) - .argName("/ambari-solr") - .build(); - - final Option collectionOption = Option.builder("c") - .longOpt("collection") - .desc("Collection name") - .numberOfArgs(1) - .argName("collection name") - .build(); - - final Option configSetOption = Option.builder("cs") - .longOpt("config-set") - .desc("Configuration set") - .numberOfArgs(1) - .argName("config_set") - .build(); - - final Option configDirOption = Option.builder("d") - .longOpt("config-dir") - .desc("Configuration directory") - .numberOfArgs(1) - .argName("config_dir") - .build(); - - final Option shardsOption = Option.builder("s") - .longOpt("shards") - .desc("Number of shards") - .numberOfArgs(1) - .argName("shard number") - .type(Integer.class) - .build(); - - final Option replicationOption = Option.builder("r") - .longOpt("replication") - .desc("Replication factor") - .numberOfArgs(1) - .argName("replication factor") - .type(Integer.class) - .build(); - - final Option retryOption = Option.builder("rt") - .longOpt("retry") - .desc("Number of retries for access Solr [default:10]") - .numberOfArgs(1) - .argName("number of retries") - .type(Integer.class) - .build(); - - final Option intervalOption = Option.builder("i") - .longOpt("interval") - .desc("Interval for retry logic in sec [default:5]") - .numberOfArgs(1) - .argName("interval") - .type(Integer.class) - .build(); - - final Option maxShardsOption = Option.builder("m") - .longOpt("max-shards") - .desc("Max number of shards per node (default: replication * shards)") - .numberOfArgs(1) - .argName("max number of shards") - .build(); - - final Option routerNameOption = Option.builder("rn") - .longOpt("router-name") - .desc("Router name for collection [default:implicit]") - .numberOfArgs(1) - .argName("router_name") - .build(); - - final Option routerFieldOption = Option.builder("rf") - .longOpt("router-field") - .desc("Router field for collection [default:_router_field_]") - .numberOfArgs(1) - .argName("router_field") - .build(); - - final Option jaasFileOption = Option.builder("jf") - .longOpt("jaas-file") - .desc("Location of the jaas-file to communicate with kerberized Solr") - .numberOfArgs(1) - .argName("jaas_file") - .build(); - - final Option keyStoreLocationOption = Option.builder("ksl") - .longOpt("key-store-location") - .desc("Location of the key store used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("key store location") - .build(); - - final Option keyStorePasswordOption = Option.builder("ksp") - .longOpt("key-store-password") - .desc("Key store password used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("key store password") - .build(); - - final Option keyStoreTypeOption = Option.builder("kst") - .longOpt("key-store-type") - .desc("Type of the key store used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("key store type") - .build(); - - final Option trustStoreLocationOption = Option.builder("tsl") - .longOpt("trust-store-location") - .desc("Location of the trust store used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("trust store location") - .build(); - - final Option trustStorePasswordOption = Option.builder("tsp") - .longOpt("trust-store-password") - .desc("Trust store password used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("trust store password") - .build(); - - final Option trustStoreTypeOption = Option.builder("tst") - .longOpt("trust-store-type") - .desc("Type of the trust store used to communicate with Solr using SSL") - .numberOfArgs(1) - .argName("trust store type") - .build(); - - final Option propNameOption = Option.builder("cpn") - .longOpt("property-name") - .desc("Cluster property name") - .numberOfArgs(1) - .argName("cluster prop name") - .build(); - - final Option propValueOption = Option.builder("cpv") - .longOpt("property-value") - .desc("Cluster property value") - .numberOfArgs(1) - .argName("cluster prop value") - .build(); - - final Option saslUsersOption = Option.builder("su") - .longOpt("sasl-users") - .desc("Sasl users (comma separated list)") - .numberOfArgs(1) - .argName("atlas,ranger,logsearch-solr") - .build(); - - final Option copyScrOption = Option.builder("cps") - .longOpt("copy-src") - .desc("ZNode or local source (used for ZNode transfer)") - .numberOfArgs(1) - .argName("/myznode | /my/path") - .build(); - - final Option copyDestOption = Option.builder("cpd") - .longOpt("copy-dest") - .desc("ZNode or local destination (used for ZNode transfer)") - .numberOfArgs(1) - .argName("/myznode | /my/path") - .build(); - - final Option transferModeOption = Option.builder("tm") - .longOpt("transfer-mode") - .desc("Transfer mode, if not used copy znode to znode.") - .numberOfArgs(1) - .argName("copyFromLocal | copyToLocal") - .build(); - - final Option securityJsonLocationOption = Option.builder("sjl") - .longOpt(SECURITY_JSON_LOCATION) - .desc("Local security.json path") - .numberOfArgs(1) - .argName("security.json location") - .build(); - - final Option secureOption = Option.builder("sec") - .longOpt("secure") - .desc("Flag for enable/disable kerberos (with --setup-kerberos or --setup-kerberos-plugin)") - .build(); - - final Option outputOption = Option.builder("o") - .longOpt("output") - .desc("File output for collections dump") - .numberOfArgs(1) - .build(); - - final Option includeDocNumberOption = Option.builder("idn") - .longOpt("include-doc-number") - .desc("Include the number of docs as well in collection dump") - .build(); - - options.addOption(helpOption); - options.addOption(retryOption); - options.addOption(removeAdminHandlerOption); - options.addOption(intervalOption); - options.addOption(zkConnectStringOption); - options.addOption(configSetOption); - options.addOption(configDirOption); - options.addOption(collectionOption); - options.addOption(secureZnodeOption); - options.addOption(unsecureZnodeOption); - options.addOption(secureSolrZnodeOption); - options.addOption(transferZnodeOption); - options.addOption(shardsOption); - options.addOption(replicationOption); - options.addOption(maxShardsOption); - options.addOption(routerNameOption); - options.addOption(routerFieldOption); - options.addOption(shardNameOption); - options.addOption(implicitRoutingOption); - options.addOption(createCollectionOption); - options.addOption(downloadConfigOption); - options.addOption(uploadConfigurationOption); - options.addOption(checkConfigOption); - options.addOption(createShardOption); - options.addOption(jaasFileOption); - options.addOption(keyStoreLocationOption); - options.addOption(keyStorePasswordOption); - options.addOption(keyStoreTypeOption); - options.addOption(trustStoreLocationOption); - options.addOption(trustStorePasswordOption); - options.addOption(trustStoreTypeOption); - options.addOption(setClusterPropOption); - options.addOption(propNameOption); - options.addOption(propValueOption); - options.addOption(createZnodeOption); - options.addOption(znodeOption); - options.addOption(secureOption); - options.addOption(transferModeOption); - options.addOption(copyScrOption); - options.addOption(copyDestOption); - options.addOption(saslUsersOption); - options.addOption(checkZnodeOption); - options.addOption(deleteZnodeOption); - options.addOption(dumpCollectionsOption); - options.addOption(setupKerberosPluginOption); - options.addOption(securityJsonLocationOption); - options.addOption(outputOption); - options.addOption(includeDocNumberOption); - - AmbariSolrCloudClient solrCloudClient = null; - - try { - CommandLineParser cmdLineParser = new DefaultParser(); - CommandLine cli = cmdLineParser.parse(options, args); - - if(cli.hasOption('h')) { - helpFormatter.printHelp("sample", options); - exit(0, null); - } - String command = ""; - if (cli.hasOption("cc")) { - command = CREATE_COLLECTION_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, configSetOption); - } else if (cli.hasOption("uc")) { - command = UPLOAD_CONFIG_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption); - } else if (cli.hasOption("dc")) { - command = DOWNLOAD_CONFIG_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption); - } else if (cli.hasOption("csh")) { - command = CREATE_SHARD_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, shardNameOption); - } else if (cli.hasOption("chc")) { - command = CONFIG_CHECK_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption); - } else if (cli.hasOption("cp")) { - command = SET_CLUSTER_PROP; - validateRequiredOptions(cli, command, zkConnectStringOption, propNameOption, propValueOption); - } else if (cli.hasOption("cz")) { - command = CREATE_ZNODE; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption); - } else if (cli.hasOption("chz")){ - command = CHECK_ZNODE; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption); - } else if (cli.hasOption("skp")) { - command = SETUP_KERBEROS_PLUGIN; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption); - } else if (cli.hasOption("sz")) { - command = SECURE_ZNODE_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption, saslUsersOption); - } else if (cli.hasOption("ssz")) { - command = SECURE_SOLR_ZNODE_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption, saslUsersOption); - } else if (cli.hasOption("uz")) { - command = UNSECURE_ZNODE_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption, jaasFileOption); - } else if (cli.hasOption("rah")) { - command = REMOVE_ADMIN_HANDLERS; - validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption); - } else if (cli.hasOption("tz")) { - command = TRANSFER_ZNODE_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, copyScrOption, copyDestOption); - } else if (cli.hasOption("dz")) { - command = DELETE_ZNODE_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, znodeOption); - } else if (cli.hasOption("dcd")) { - command = DUMP_COLLECTIONS_DATA_COMMAND; - validateRequiredOptions(cli, command, zkConnectStringOption, outputOption); - } else { - List commands = Arrays.asList(CREATE_COLLECTION_COMMAND, CREATE_SHARD_COMMAND, UPLOAD_CONFIG_COMMAND, - DOWNLOAD_CONFIG_COMMAND, CONFIG_CHECK_COMMAND, SET_CLUSTER_PROP, CREATE_ZNODE, SECURE_ZNODE_COMMAND, UNSECURE_ZNODE_COMMAND, - SECURE_SOLR_ZNODE_COMMAND, CHECK_ZNODE, SETUP_KERBEROS_PLUGIN, REMOVE_ADMIN_HANDLERS, TRANSFER_ZNODE_COMMAND, DELETE_ZNODE_COMMAND, - DUMP_COLLECTIONS_DATA_COMMAND); - helpFormatter.printHelp(CMD_LINE_SYNTAX, options); - exit(1, String.format("One of the supported commands is required (%s)", StringUtils.join(commands, "|"))); - } - - String zkConnectString = cli.getOptionValue('z'); - String collection = cli.getOptionValue('c'); - String configSet = cli.getOptionValue("cs"); - String configDir = cli.getOptionValue("d"); - int shards = cli.hasOption('s') ? Integer.parseInt(cli.getOptionValue('s')) : 1; - int replication = cli.hasOption('r') ? Integer.parseInt(cli.getOptionValue('r')) : 1; - int retry = cli.hasOption("rt") ? Integer.parseInt(cli.getOptionValue("rt")) : 5; - int interval = cli.hasOption('i') ? Integer.parseInt(cli.getOptionValue('i')) : 10; - int maxShards = cli.hasOption('m') ? Integer.parseInt(cli.getOptionValue('m')) : shards * replication; - String routerName = cli.hasOption("rn") ? cli.getOptionValue("rn") : null; - String routerField = cli.hasOption("rf") ? cli.getOptionValue("rf") : null; - String shardName = cli.hasOption("sn") ? cli.getOptionValue("sn") : null; - boolean implicitRouting = cli.hasOption("ir"); - String jaasFile = cli.hasOption("jf") ? cli.getOptionValue("jf") : null; - String keyStoreLocation = cli.hasOption("ksl") ? cli.getOptionValue("ksl") : null; - String keyStorePassword = cli.hasOption("ksp") ? cli.getOptionValue("ksp") : null; - String keyStoreType = cli.hasOption("kst") ? cli.getOptionValue("kst") : null; - String trustStoreLocation = cli.hasOption("tsl") ? cli.getOptionValue("tsl") : null; - String trustStorePassword = cli.hasOption("tsp") ? cli.getOptionValue("tsp") : null; - String trustStoreType = cli.hasOption("tst") ? cli.getOptionValue("tst") : null; - String clusterPropName = cli.hasOption("cpn") ? cli.getOptionValue("cpn") : null; - String clusterPropValue = cli.hasOption("cpv") ? cli.getOptionValue("cpv") : null; - String znode = cli.hasOption("zn") ? cli.getOptionValue("zn") : null; - boolean isSecure = cli.hasOption("sec"); - String saslUsers = cli.hasOption("su") ? cli.getOptionValue("su") : ""; - String securityJsonLocation = cli.hasOption("sjl") ? cli.getOptionValue("sjl") : ""; - String copySrc = cli.hasOption("cps") ? cli.getOptionValue("cps") : null; - String copyDest = cli.hasOption("cpd") ? cli.getOptionValue("cpd") : null; - String transferMode = cli.hasOption("tm") ? cli.getOptionValue("tm") : "NONE"; - String output = cli.hasOption("o") ? cli.getOptionValue("o") : null; - boolean includeDocNumber = cli.hasOption("idn"); - - AmbariSolrCloudClientBuilder clientBuilder = new AmbariSolrCloudClientBuilder() - .withZkConnectString(zkConnectString) - .withCollection(collection) - .withConfigSet(configSet) - .withShards(shards) - .withReplication(replication) - .withMaxShardsPerNode(maxShards) - .withRetry(retry) - .withInterval(interval) - .withRouterName(routerName) - .withRouterField(routerField) - .withJaasFile(jaasFile) // call before creating SolrClient - .isImplicitRouting(implicitRouting) - .withSolrZkClient(ZK_CLIENT_TIMEOUT, ZK_CLIENT_CONNECT_TIMEOUT) - .withKeyStoreLocation(keyStoreLocation) - .withKeyStorePassword(keyStorePassword) - .withKeyStoreType(keyStoreType) - .withTrustStoreLocation(trustStoreLocation) - .withTrustStorePassword(trustStorePassword) - .withTrustStoreType(trustStoreType) - .withClusterPropName(clusterPropName) - .withClusterPropValue(clusterPropValue) - .withTransferMode(transferMode) - .withCopySrc(copySrc) - .withCopyDest(copyDest) - .withOutput(output) - .withIncludeDocNumber(includeDocNumber) - .withSecurityJsonLocation(securityJsonLocation) - .withZnode(znode) - .withSecure(isSecure) - .withSaslUsers(saslUsers); - - switch (command) { - case CREATE_COLLECTION_COMMAND: - solrCloudClient = clientBuilder - .withSolrCloudClient() - .build(); - solrCloudClient.createCollection(); - break; - case UPLOAD_CONFIG_COMMAND: - solrCloudClient = clientBuilder - .withConfigDir(configDir) - .build(); - solrCloudClient.uploadConfiguration(); - break; - case DOWNLOAD_CONFIG_COMMAND: - solrCloudClient = clientBuilder - .withConfigDir(configDir) - .build(); - solrCloudClient.downloadConfiguration(); - break; - case CONFIG_CHECK_COMMAND: - solrCloudClient = clientBuilder.build(); - boolean configExists = solrCloudClient.configurationExists(); - if (!configExists) { - exit(1, null); - } - break; - case CREATE_SHARD_COMMAND: - solrCloudClient = clientBuilder - .withSolrCloudClient() - .build(); - solrCloudClient.createShard(shardName); - break; - case SET_CLUSTER_PROP: - solrCloudClient = clientBuilder.build(); - solrCloudClient.setClusterProp(); - break; - case CREATE_ZNODE: - solrCloudClient = clientBuilder.build(); - solrCloudClient.createZnode(); - break; - case CHECK_ZNODE: - solrCloudClient = clientBuilder.build(); - boolean znodeExists = solrCloudClient.isZnodeExists(znode); - if (!znodeExists) { - exit(1, String.format("'%s' znode does not exist. Solr is responsible to create the ZNode, " + - "check Solr started successfully or not", znode)); - } - break; - case SETUP_KERBEROS_PLUGIN: - solrCloudClient = clientBuilder.build(); - solrCloudClient.setupKerberosPlugin(); - break; - case SECURE_ZNODE_COMMAND: - solrCloudClient = clientBuilder.build(); - solrCloudClient.secureZnode(); - break; - case UNSECURE_ZNODE_COMMAND: - solrCloudClient = clientBuilder.build(); - solrCloudClient.unsecureZnode(); - break; - case SECURE_SOLR_ZNODE_COMMAND: - solrCloudClient = clientBuilder.build(); - solrCloudClient.secureSolrZnode(); - case REMOVE_ADMIN_HANDLERS: - solrCloudClient = clientBuilder.build(); - solrCloudClient.removeAdminHandlerFromCollectionConfig(); - break; - case TRANSFER_ZNODE_COMMAND: - solrCloudClient = clientBuilder.build(); - solrCloudClient.transferZnode(); - break; - case DELETE_ZNODE_COMMAND: - solrCloudClient = clientBuilder.build(); - solrCloudClient.deleteZnode(); - break; - case DUMP_COLLECTIONS_DATA_COMMAND: - solrCloudClient = clientBuilder - .withSolrCloudClient().build(); - solrCloudClient.outputCollectionData(); - break; - default: - throw new AmbariSolrCloudClientException(String.format("Not found command: '%s'", command)); - } - } catch (Exception e) { - helpFormatter.printHelp( - CMD_LINE_SYNTAX, options); - exit(1, e.getMessage()); - } finally { - if (solrCloudClient != null && solrCloudClient.getSolrZkClient() != null) { - solrCloudClient.getSolrZkClient().close(); - } - } - exit(0, null); - } - - private static void validateRequiredOptions(CommandLine cli, String command, Option... optionsToValidate) - throws AmbariSolrCloudClientException { - List requiredOptions = new ArrayList<>(); - for (Option opt : optionsToValidate) { - if (!cli.hasOption(opt.getOpt())) { - requiredOptions.add(opt.getOpt()); - } - } - if (!requiredOptions.isEmpty()) { - throw new AmbariSolrCloudClientException( - String.format("The following options required for '%s' : %s", - command, StringUtils.join(requiredOptions, ","))); - } - } - - private static void exit(int exitCode, String message) { - if (message != null){ - LOG.error(message); - } - LOG.info("Return code: {}", exitCode); - System.exit(exitCode); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java deleted file mode 100644 index 7571c99b1dc..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClient.java +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr; - -import org.apache.ambari.infra.solr.commands.CheckConfigZkCommand; -import org.apache.ambari.infra.solr.commands.CreateCollectionCommand; -import org.apache.ambari.infra.solr.commands.CreateShardCommand; -import org.apache.ambari.infra.solr.commands.CreateSolrZnodeZkCommand; -import org.apache.ambari.infra.solr.commands.DeleteZnodeZkCommand; -import org.apache.ambari.infra.solr.commands.DownloadConfigZkCommand; -import org.apache.ambari.infra.solr.commands.DumpCollectionsCommand; -import org.apache.ambari.infra.solr.commands.EnableKerberosPluginSolrZkCommand; -import org.apache.ambari.infra.solr.commands.GetShardsCommand; -import org.apache.ambari.infra.solr.commands.GetSolrHostsCommand; -import org.apache.ambari.infra.solr.commands.ListCollectionCommand; -import org.apache.ambari.infra.solr.commands.RemoveAdminHandlersCommand; -import org.apache.ambari.infra.solr.commands.SecureSolrZNodeZkCommand; -import org.apache.ambari.infra.solr.commands.SecureZNodeZkCommand; -import org.apache.ambari.infra.solr.commands.SetClusterPropertyZkCommand; -import org.apache.ambari.infra.solr.commands.TransferZnodeZkCommand; -import org.apache.ambari.infra.solr.commands.UnsecureZNodeZkCommand; -import org.apache.ambari.infra.solr.commands.UploadConfigZkCommand; -import org.apache.ambari.infra.solr.commands.CheckZnodeZkCommand; -import org.apache.ambari.infra.solr.util.ShardUtils; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.SolrZkClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collection; -import java.util.List; - -/** - * Client for communicate with Solr (and Zookeeper) - */ -public class AmbariSolrCloudClient { - - private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudClient.class); - - private final String zkConnectString; - private final String collection; - private final String configSet; - private final String configDir; - private final int shards; - private final int replication; - private final int retryTimes; - private final int interval; - private final CloudSolrClient solrCloudClient; - private final SolrZkClient solrZkClient; - private final int maxShardsPerNode; - private final String routerName; - private final String routerField; - private final boolean implicitRouting; - private final String jaasFile; - private final String znode; - private final String saslUsers; - private final String propName; - private final String propValue; - private final String securityJsonLocation; - private final boolean secure; - private final String transferMode; - private final String copySrc; - private final String copyDest; - private final String output; - private final boolean includeDocNumber; - - public AmbariSolrCloudClient(AmbariSolrCloudClientBuilder builder) { - this.zkConnectString = builder.zkConnectString; - this.collection = builder.collection; - this.configSet = builder.configSet; - this.configDir = builder.configDir; - this.shards = builder.shards; - this.replication = builder.replication; - this.retryTimes = builder.retryTimes; - this.interval = builder.interval; - this.jaasFile = builder.jaasFile; - this.solrCloudClient = builder.solrCloudClient; - this.solrZkClient = builder.solrZkClient; - this.maxShardsPerNode = builder.maxShardsPerNode; - this.routerName = builder.routerName; - this.routerField = builder.routerField; - this.implicitRouting = builder.implicitRouting; - this.znode = builder.znode; - this.saslUsers = builder.saslUsers; - this.propName = builder.propName; - this.propValue = builder.propValue; - this.securityJsonLocation = builder.securityJsonLocation; - this.secure = builder.secure; - this.transferMode = builder.transferMode; - this.copySrc = builder.copySrc; - this.copyDest = builder.copyDest; - this.output = builder.output; - this.includeDocNumber = builder.includeDocNumber; - } - - /** - * Get Solr collections - */ - public List listCollections() throws Exception { - return new ListCollectionCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Create Solr collection if exists - */ - public String createCollection() throws Exception { - List collections = listCollections(); - if (!collections.contains(getCollection())) { - String collection = new CreateCollectionCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("Collection '{}' creation request sent.", collection); - } else { - LOG.info("Collection '{}' already exits.", getCollection()); - if (this.isImplicitRouting()) { - createShard(null); - } - } - return getCollection(); - } - - public String outputCollectionData() throws Exception { - List collections = listCollections(); - String result = new DumpCollectionsCommand(getRetryTimes(), getInterval(), collections).run(this); - LOG.info("Dump collections response: {}", result); - return result; - } - - /** - * Set cluster property in clusterprops.json. - */ - public void setClusterProp() throws Exception { - LOG.info("Set cluster prop: '{}'", this.getPropName()); - String newPropValue = new SetClusterPropertyZkCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("Set cluster prop '{}' successfully to '{}'", this.getPropName(), newPropValue); - } - - /** - * Create a znode only if it does not exist. Return 0 code if it exists. - */ - public void createZnode() throws Exception { - boolean znodeExists = isZnodeExists(this.znode); - if (znodeExists) { - LOG.info("Znode '{}' already exists.", this.znode); - } else { - LOG.info("Znode '{}' does not exist. Creating...", this.znode); - String newZnode = new CreateSolrZnodeZkCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("Znode '{}' is created successfully.", newZnode); - } - } - - /** - * Check znode exists or not based on the zookeeper connect string. - * E.g.: localhost:2181 and znode: /ambari-solr, checks existance of localhost:2181/ambari-solr - */ - public boolean isZnodeExists(String znode) throws Exception { - LOG.info("Check '{}' znode exists or not", znode); - boolean result = new CheckZnodeZkCommand(getRetryTimes(), getInterval(), znode).run(this); - if (result) { - LOG.info("'{}' znode exists", znode); - } else { - LOG.info("'{}' znode does not exist", znode); - } - return result; - } - - public void setupKerberosPlugin() throws Exception { - LOG.info("Setup kerberos plugin in security.json"); - new EnableKerberosPluginSolrZkCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("KerberosPlugin is set in security.json"); - } - - /** - * Secure solr znode - */ - public void secureSolrZnode() throws Exception { - new SecureSolrZNodeZkCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Secure znode - */ - public void secureZnode() throws Exception { - new SecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Unsecure znode - */ - public void unsecureZnode() throws Exception { - LOG.info("Disable security for znode - ", this.getZnode()); - new UnsecureZNodeZkCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Upload config set to zookeeper - */ - public String uploadConfiguration() throws Exception { - String configSet = new UploadConfigZkCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("'{}' is uploaded to zookeeper.", configSet); - return configSet; - } - - /** - * Download config set from zookeeper - */ - public String downloadConfiguration() throws Exception { - String configDir = new DownloadConfigZkCommand(getRetryTimes(), getInterval()).run(this); - LOG.info("Config set is download from zookeeper. ({})", configDir); - return configDir; - } - - /** - * Get configuration if exists in zookeeper - */ - public boolean configurationExists() throws Exception { - boolean configExits = new CheckConfigZkCommand(getRetryTimes(), getInterval()).run(this); - if (configExits) { - LOG.info("Config {} exits", configSet); - } else { - LOG.info("Configuration '{}' does not exist", configSet); - } - return configExits; - } - - /** - * Create shard in collection - create a new one if shard name specified, if - * not create based on the number of shards logic (with shard_# suffix) - * - * @param shard - * name of the created shard - */ - public Collection createShard(String shard) throws Exception { - Collection existingShards = getShardNames(); - if (shard != null) { - new CreateShardCommand(shard, getRetryTimes(), getInterval()).run(this); - existingShards.add(shard); - } else { - List shardList = ShardUtils.generateShardList(getMaxShardsPerNode()); - for (String shardName : shardList) { - if (!existingShards.contains(shardName)) { - new CreateShardCommand(shardName, getRetryTimes(), getInterval()).run(this); - LOG.info("Adding new shard to collection request sent ('{}': {})", getCollection(), shardName); - existingShards.add(shardName); - } - } - } - return existingShards; - } - - /** - * Get shard names - */ - public Collection getShardNames() throws Exception { - Collection slices = new GetShardsCommand(getRetryTimes(), getInterval()).run(this); - return ShardUtils.getShardNamesFromSlices(slices, this.getCollection()); - } - - /** - * Get Solr Hosts - */ - public Collection getSolrHosts() throws Exception { - return new GetSolrHostsCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Remove solr.admin.AdminHandlers requestHandler from solrconfi.xml - */ - public boolean removeAdminHandlerFromCollectionConfig() throws Exception { - return new RemoveAdminHandlersCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Transfer znode data (cannot be both scr and dest local) - */ - public boolean transferZnode() throws Exception { - return new TransferZnodeZkCommand(getRetryTimes(), getInterval()).run(this); - } - - /** - * Delete znode path (and all sub nodes) - */ - public boolean deleteZnode() throws Exception { - return new DeleteZnodeZkCommand(getRetryTimes(), getInterval()).run(this); - } - - public String getZkConnectString() { - return zkConnectString; - } - - public String getCollection() { - return collection; - } - - public String getConfigSet() { - return configSet; - } - - public String getConfigDir() { - return configDir; - } - - public int getShards() { - return shards; - } - - public int getReplication() { - return replication; - } - - public int getRetryTimes() { - return retryTimes; - } - - public int getInterval() { - return interval; - } - - public CloudSolrClient getSolrCloudClient() { - return solrCloudClient; - } - - public SolrZkClient getSolrZkClient() { - return solrZkClient; - } - - public int getMaxShardsPerNode() { - return maxShardsPerNode; - } - - public String getRouterName() { - return routerName; - } - - public String getRouterField() { - return routerField; - } - - public boolean isImplicitRouting() { - return implicitRouting; - } - - public String getJaasFile() { - return jaasFile; - } - - public String getSaslUsers() { - return saslUsers; - } - - public String getZnode() { - return znode; - } - - public String getPropName() { - return propName; - } - - public String getPropValue() { - return propValue; - } - - public boolean isSecure() { - return secure; - } - - public String getSecurityJsonLocation() { - return securityJsonLocation; - } - - public String getTransferMode() { - return transferMode; - } - - public String getCopySrc() { - return copySrc; - } - - public String getCopyDest() { - return copyDest; - } - - public String getOutput() { - return output; - } - - public boolean isIncludeDocNumber() { - return includeDocNumber; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java deleted file mode 100644 index db4396b5819..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientBuilder.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.solr; - -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder; -import org.apache.solr.common.cloud.SolrZkClient; - -public class AmbariSolrCloudClientBuilder { - private static final String KEYSTORE_LOCATION_ARG = "javax.net.ssl.keyStore"; - private static final String KEYSTORE_PASSWORD_ARG = "javax.net.ssl.keyStorePassword"; - private static final String KEYSTORE_TYPE_ARG = "javax.net.ssl.keyStoreType"; - private static final String TRUSTSTORE_LOCATION_ARG = "javax.net.ssl.trustStore"; - private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword"; - private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType"; - private static final String JAVA_SECURITY_AUTH_LOGIN_CONFIG = "java.security.auth.login.config"; - private static final String SOLR_HTTPCLIENT_BUILDER_FACTORY = "solr.httpclient.builder.factory"; - - String zkConnectString; - String collection; - String configSet; - String configDir; - int shards = 1; - int replication = 1; - int retryTimes = 10; - int interval = 5; - int maxShardsPerNode = replication * shards; - String routerName = "implicit"; - String routerField = "_router_field_"; - CloudSolrClient solrCloudClient; - SolrZkClient solrZkClient; - boolean implicitRouting; - String jaasFile; - String znode; - String saslUsers; - String propName; - String propValue; - String securityJsonLocation; - boolean secure; - String transferMode; - String copySrc; - String copyDest; - String output; - public boolean includeDocNumber; - - public AmbariSolrCloudClient build() { - return new AmbariSolrCloudClient(this); - } - - public AmbariSolrCloudClientBuilder withZkConnectString(String zkConnectString) { - this.zkConnectString = zkConnectString; - return this; - } - - public AmbariSolrCloudClientBuilder withCollection(String collection) { - this.collection = collection; - return this; - } - - public AmbariSolrCloudClientBuilder withConfigSet(String configSet) { - this.configSet = configSet; - return this; - } - - public AmbariSolrCloudClientBuilder withConfigDir(String configDir) { - this.configDir = configDir; - return this; - } - - public AmbariSolrCloudClientBuilder withShards(int shards) { - this.shards = shards; - return this; - } - - public AmbariSolrCloudClientBuilder withReplication(int replication) { - this.replication = replication; - return this; - } - - public AmbariSolrCloudClientBuilder withRetry(int retryTimes) { - this.retryTimes = retryTimes; - return this; - } - - public AmbariSolrCloudClientBuilder withInterval(int interval) { - this.interval = interval; - return this; - } - - public AmbariSolrCloudClientBuilder withMaxShardsPerNode(int maxShardsPerNode) { - this.maxShardsPerNode = maxShardsPerNode; - return this; - } - - public AmbariSolrCloudClientBuilder withRouterName(String routerName) { - this.routerName = routerName; - return this; - } - - public AmbariSolrCloudClientBuilder withRouterField(String routerField) { - this.routerField = routerField; - return this; - } - - public AmbariSolrCloudClientBuilder isImplicitRouting(boolean implicitRouting) { - this.implicitRouting = implicitRouting; - return this; - } - - public AmbariSolrCloudClientBuilder withJaasFile(String jaasFile) { - this.jaasFile = jaasFile; - setupSecurity(jaasFile); - return this; - } - - public AmbariSolrCloudClientBuilder withSolrCloudClient() { - this.solrCloudClient = new CloudSolrClient.Builder().withZkHost(this.zkConnectString).build(); - return this; - } - - public AmbariSolrCloudClientBuilder withSolrZkClient(int zkClientTimeout, int zkClientConnectTimeout) { - this.solrZkClient = new SolrZkClient(this.zkConnectString, zkClientTimeout, zkClientConnectTimeout); - return this; - } - - public AmbariSolrCloudClientBuilder withKeyStoreLocation(String keyStoreLocation) { - if (keyStoreLocation != null) { - System.setProperty(KEYSTORE_LOCATION_ARG, keyStoreLocation); - } - return this; - } - - public AmbariSolrCloudClientBuilder withKeyStorePassword(String keyStorePassword) { - if (keyStorePassword != null) { - System.setProperty(KEYSTORE_PASSWORD_ARG, keyStorePassword); - } - return this; - } - - public AmbariSolrCloudClientBuilder withKeyStoreType(String keyStoreType) { - if (keyStoreType != null) { - System.setProperty(KEYSTORE_TYPE_ARG, keyStoreType); - } - return this; - } - - public AmbariSolrCloudClientBuilder withTrustStoreLocation(String trustStoreLocation) { - if (trustStoreLocation != null) { - System.setProperty(TRUSTSTORE_LOCATION_ARG, trustStoreLocation); - } - return this; - } - - public AmbariSolrCloudClientBuilder withTrustStorePassword(String trustStorePassword) { - if (trustStorePassword != null) { - System.setProperty(TRUSTSTORE_PASSWORD_ARG, trustStorePassword); - } - return this; - } - - public AmbariSolrCloudClientBuilder withTrustStoreType(String trustStoreType) { - if (trustStoreType != null) { - System.setProperty(TRUSTSTORE_TYPE_ARG, trustStoreType); - } - return this; - } - - public AmbariSolrCloudClientBuilder withSaslUsers(String saslUsers) { - this.saslUsers = saslUsers; - return this; - } - - public AmbariSolrCloudClientBuilder withZnode(String znode) { - this.znode = znode; - return this; - } - - public AmbariSolrCloudClientBuilder withClusterPropName(String clusterPropName) { - this.propName = clusterPropName; - return this; - } - - public AmbariSolrCloudClientBuilder withClusterPropValue(String clusterPropValue) { - this.propValue = clusterPropValue; - return this; - } - - public AmbariSolrCloudClientBuilder withTransferMode(String transferMode) { - this.transferMode = transferMode; - return this; - } - - public AmbariSolrCloudClientBuilder withCopySrc(String copySrc) { - this.copySrc = copySrc; - return this; - } - - public AmbariSolrCloudClientBuilder withCopyDest(String copyDest) { - this.copyDest = copyDest; - return this; - } - - public AmbariSolrCloudClientBuilder withOutput(String output) { - this.output = output; - return this; - } - - public AmbariSolrCloudClientBuilder withIncludeDocNumber(boolean includeDocNumber) { - this.includeDocNumber = includeDocNumber; - return this; - } - - public AmbariSolrCloudClientBuilder withSecurityJsonLocation(String securityJson) { - this.securityJsonLocation = securityJson; - return this; - } - - public AmbariSolrCloudClientBuilder withSecure(boolean isSecure) { - this.secure = isSecure; - return this; - } - - private void setupSecurity(String jaasFile) { - if (jaasFile != null) { - System.setProperty(JAVA_SECURITY_AUTH_LOGIN_CONFIG, jaasFile); - System.setProperty(SOLR_HTTPCLIENT_BUILDER_FACTORY, Krb5HttpClientBuilder.class.getCanonicalName()); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java deleted file mode 100644 index d339a77b431..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientException.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr; - -public class AmbariSolrCloudClientException extends Exception{ - public AmbariSolrCloudClientException(String message) { - super(message); - } - public AmbariSolrCloudClientException(String message, Throwable throwable) { - super(message, throwable); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java deleted file mode 100644 index 60b4e0af940..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/S3Uploader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.infra.solr; - -import java.io.File; - -import org.apache.commons.io.FileUtils; - -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.services.s3.AmazonS3Client; - -/** - * Uploads a file to S3, meant to be used by solrDataManager.py - */ -public class S3Uploader { - public static void main(String[] args) { - try { - String keyFilePath = args[0]; - String bucketName = args[1]; - String keyPrefix = args[2]; - String filePath = args[3]; - - String keyFileContent = FileUtils.readFileToString(new File(keyFilePath)).trim(); - String[] keys = keyFileContent.split(","); - String accessKey = keys[0]; - String secretKey = keys[1]; - - BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); - AmazonS3Client client = new AmazonS3Client(credentials); - - File file = new File(filePath); - String key = keyPrefix + file.getName(); - - if (client.doesObjectExist(bucketName, key)) { - System.out.println("Object '" + key + "' already exists"); - System.exit(0); - } - - client.putObject(bucketName, key, file); - } catch (Exception e) { - e.printStackTrace(System.err); - System.exit(1); - } - - System.exit(0); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java deleted file mode 100644 index 5e87859590f..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractRetryCommand.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.AmbariSolrCloudClientException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class AbstractRetryCommand { - private static final Logger LOG = LoggerFactory.getLogger(AbstractRetryCommand.class); - - private final int interval; - private final int maxRetries; - - public AbstractRetryCommand(int maxRetries, int interval) { - this.maxRetries = maxRetries; - this.interval = interval; - } - - public abstract RESPONSE createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception; - - public RESPONSE run(AmbariSolrCloudClient solrCloudClient) throws Exception { - return retry(0, solrCloudClient); - } - - private RESPONSE retry(int tries, AmbariSolrCloudClient solrCloudClient) throws Exception { - try { - return createAndProcessRequest(solrCloudClient); - } catch (Exception ex) { - LOG.error(ex.getMessage(), ex); - tries++; - LOG.info("Command failed, tries again (tries: {})", tries); - if (maxRetries == tries) { - throw new AmbariSolrCloudClientException(String.format("Maximum retries exceeded: %d", tries), ex); - } else { - Thread.sleep(interval * 1000); - return retry(tries, solrCloudClient); - } - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java deleted file mode 100644 index fdf26a728b6..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractSolrRetryCommand.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.AmbariSolrCloudClientException; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.client.solrj.response.SolrResponseBase; - -public abstract class AbstractSolrRetryCommand - extends AbstractRetryCommand { - - public AbstractSolrRetryCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - public abstract RESPONSE handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception; - - public abstract REQUEST createRequest(AmbariSolrCloudClient client); - - public abstract String errorMessage(AmbariSolrCloudClient client); - - @Override - public RESPONSE createAndProcessRequest(AmbariSolrCloudClient client) throws Exception { - REQUEST request = createRequest(client); - CollectionAdminResponse response = (CollectionAdminResponse) request.process(client.getSolrCloudClient()); - handleErrorIfExists(response, errorMessage(client)); - return handleResponse(response, client); - } - - private void handleErrorIfExists(SolrResponseBase response, String message) throws AmbariSolrCloudClientException { - if (response.getStatus() != 0) { - throw new AmbariSolrCloudClientException(message); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java deleted file mode 100644 index b4872e21777..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractStateFileZkCommand.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.domain.AmbariSolrState; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; - -public abstract class AbstractStateFileZkCommand extends AbstractZookeeperRetryCommand{ - - public static final String STATE_FILE = "ambari-solr-state.json"; - public static final String STATE_FIELD = "ambari_solr_security_state"; - - public AbstractStateFileZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - public AmbariSolrState getStateFromJson(AmbariSolrCloudClient client, String fileName) throws Exception { - byte[] data = client.getSolrZkClient().getData(fileName, null, null, true); - String input = new String(data); - ObjectMapper mapper = new ObjectMapper(); - JsonNode rootNode = mapper.readValue(input.getBytes(), JsonNode.class); - return AmbariSolrState.valueOf(rootNode.get(STATE_FIELD).asText()); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java deleted file mode 100644 index dec34f1dc95..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperConfigCommand.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.solr.common.cloud.ZkConfigManager; - -public abstract class AbstractZookeeperConfigCommand extends AbstractZookeeperRetryCommand { - - public AbstractZookeeperConfigCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - protected abstract RESPONSE executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) - throws Exception; - - @Override - protected RESPONSE executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - ZkConfigManager zkConfigManager = createZkConfigManager(zkClient); - return executeZkConfigCommand(zkConfigManager, client); - } - - protected ZkConfigManager createZkConfigManager(SolrZkClient zkClient) { - return new ZkConfigManager(zkClient); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java deleted file mode 100644 index e37088db3f0..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/AbstractZookeeperRetryCommand.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; - -public abstract class AbstractZookeeperRetryCommand extends AbstractRetryCommand { - - public AbstractZookeeperRetryCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - protected abstract RESPONSE executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) - throws Exception; - - @Override - public RESPONSE createAndProcessRequest(AmbariSolrCloudClient client) throws Exception { - SolrZkClient zkClient = client.getSolrZkClient(); - SolrZooKeeper solrZooKeeper = zkClient.getSolrZooKeeper(); - return executeZkCommand(client, zkClient, solrZooKeeper); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java deleted file mode 100644 index 0a03a65425a..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckConfigZkCommand.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.ZkConfigManager; - -public class CheckConfigZkCommand extends AbstractZookeeperConfigCommand { - - public CheckConfigZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) throws Exception { - return zkConfigManager.configExists(client.getConfigSet()); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java deleted file mode 100644 index 93eb478d2e3..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CheckZnodeZkCommand.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.AmbariSolrCloudClientException; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.KeeperException; - -public class CheckZnodeZkCommand extends AbstractZookeeperRetryCommand { - - private String znode; - - public CheckZnodeZkCommand(int maxRetries, int interval, String znode) { - super(maxRetries, interval); - this.znode = znode; - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - try { - return zkClient.exists(this.znode, false); - } catch (KeeperException e) { - throw new AmbariSolrCloudClientException("Exception during checking znode, " + - "Check zookeeper servers are running (n+1/2) or zookeeper quorum has established or not.", e); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java deleted file mode 100644 index 5d296ae839f..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateCollectionCommand.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.util.ShardUtils; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; - -public class CreateCollectionCommand extends AbstractSolrRetryCommand { - - public CreateCollectionCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - public String handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception { - return client.getCollection(); - } - - @Override - public CollectionAdminRequest.Create createRequest(AmbariSolrCloudClient client) { - CollectionAdminRequest.Create request = - CollectionAdminRequest.createCollection(client.getCollection(), client.getConfigSet(), client.getShards(), client.getReplication()); - request.setMaxShardsPerNode(client.getMaxShardsPerNode()); - if (client.isImplicitRouting()) { - request.setRouterName(client.getRouterName()); - request.setRouterField(client.getRouterField()); - request.setShards(ShardUtils.generateShardListStr(client.getMaxShardsPerNode())); - } - return request; - } - - @Override - public String errorMessage(AmbariSolrCloudClient client) { - return String.format("Cannot create collection: '%s'", client.getCollection()); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java deleted file mode 100644 index 549296678e1..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateShardCommand.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; - -public class CreateShardCommand extends AbstractSolrRetryCommand { - - private final String shardName; - - public CreateShardCommand(String shardName, int maxRetries, int interval) { - super(maxRetries, interval); - this.shardName = shardName; - } - - @Override - public String handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception { - return shardName; - } - - @Override - public CollectionAdminRequest.CreateShard createRequest(AmbariSolrCloudClient client) { - return CollectionAdminRequest.createShard(client.getCollection(), shardName); - } - - @Override - public String errorMessage(AmbariSolrCloudClient client) { - return String.format("Cannot add shard to collection '%s'", client.getCollection()); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java deleted file mode 100644 index 1460a8468b1..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/CreateSolrZnodeZkCommand.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.AmbariSolrCloudClientException; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.KeeperException; - -public class CreateSolrZnodeZkCommand extends AbstractZookeeperRetryCommand { - - public CreateSolrZnodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - try { - zkClient.makePath(client.getZnode(), true); - return client.getZnode(); - } catch (KeeperException e) { - throw new AmbariSolrCloudClientException("Cannot create ZNode, check zookeeper servers are running (n+1/2), or zookeeper quorum has established or not.",e); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java deleted file mode 100644 index 1c9d4fb6187..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DeleteZnodeZkCommand.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; - -public class DeleteZnodeZkCommand extends AbstractZookeeperRetryCommand { - - public DeleteZnodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - zkClient.clean(client.getZnode()); - return true; - } - -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java deleted file mode 100644 index 990c3c31275..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DownloadConfigZkCommand.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.AmbariSolrCloudClientException; -import org.apache.solr.common.cloud.ZkConfigManager; - -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class DownloadConfigZkCommand extends AbstractZookeeperConfigCommand { - - public DownloadConfigZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected String executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) throws Exception { - Path configDir = Paths.get(client.getConfigDir()); - String configSet = client.getConfigSet(); - try { - zkConfigManager.downloadConfigDir(configSet, configDir); - return configDir.toString(); - } catch (IOException e){ - throw new AmbariSolrCloudClientException("Error downloading configuration set, check Solr Znode has started or not " + - "(starting Solr (for Log Search) is responsible to create the Znode)" ,e); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java deleted file mode 100644 index 708ecac3139..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/DumpCollectionsCommand.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.domain.json.SolrCollection; -import org.apache.ambari.infra.solr.domain.json.SolrCoreData; -import org.apache.ambari.infra.solr.domain.json.SolrShard; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.solr.common.cloud.ZkStateReader; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class DumpCollectionsCommand extends AbstractZookeeperRetryCommand { - - private static final Logger logger = LoggerFactory.getLogger(DumpCollectionsCommand.class); - - private final List collections; - - public DumpCollectionsCommand(int maxRetries, int interval, List collections) { - super(maxRetries, interval); - this.collections = collections; - } - - @Override - protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - Map collectionMap = new HashMap<>(); - if (!this.collections.isEmpty()) { - for (String collection : this.collections) { - SolrCollection solrCollection = new SolrCollection(); - CloudSolrClient solrClient = client.getSolrCloudClient(); - if (client.isIncludeDocNumber()) { - long numberOfDocs = getNumberOfDocs(solrClient, collection); - solrCollection.setNumberOfDocs(numberOfDocs); - } - Collection slices = getSlices(solrClient, collection); - Integer numShards = slices.size(); - Map solrShardMap = new HashMap<>(); - Map> leaderHostCoreMap = new HashMap<>(); - Map leaderCoreDataMap = new HashMap<>(); - Map> leaderShardCoreMap = new HashMap<>(); - Map leaderCoreHostMap = new HashMap<>(); - for (Slice slice : slices) { - SolrShard solrShard = new SolrShard(); - solrShard.setName(slice.getName()); - solrShard.setState(slice.getState()); - Collection replicas = slice.getReplicas(); - Map replicaMap = new HashMap<>(); - leaderShardCoreMap.put(slice.getName(), new ArrayList<>()); - for (Replica replica : replicas) { - replicaMap.put(replica.getName(), replica); - Replica.State state = replica.getState(); - if (Replica.State.ACTIVE.equals(state) - && replica.getProperties().get("leader") != null && "true".equals(replica.getProperties().get("leader"))) { - String coreName = replica.getCoreName(); - String hostName = getHostFromNodeName(replica.getNodeName()); - if (leaderHostCoreMap.containsKey(hostName)) { - List coresList = leaderHostCoreMap.get(hostName); - coresList.add(coreName); - } else { - List coreList = new ArrayList<>(); - coreList.add(coreName); - leaderHostCoreMap.put(hostName, coreList); - } - Map properties = new HashMap<>(); - properties.put("name", coreName); - properties.put("coreNodeName", replica.getName()); - properties.put("shard", slice.getName()); - properties.put("collection", collection); - properties.put("numShards", numShards.toString()); - properties.put("replicaType", replica.getType().name()); - SolrCoreData solrCoreData = new SolrCoreData(replica.getName(), hostName, properties); - leaderCoreDataMap.put(coreName, solrCoreData); - leaderShardCoreMap.get(slice.getName()).add(coreName); - leaderCoreHostMap.put(coreName, hostName); - } - } - solrShard.setReplicas(replicaMap); - solrShardMap.put(slice.getName(), solrShard); - } - solrCollection.setShards(solrShardMap); - solrCollection.setLeaderHostCoreMap(leaderHostCoreMap); - solrCollection.setLeaderSolrCoreDataMap(leaderCoreDataMap); - solrCollection.setLeaderShardsMap(leaderShardCoreMap); - solrCollection.setLeaderCoreHostMap(leaderCoreHostMap); - solrCollection.setName(collection); - collectionMap.put(collection, solrCollection); - } - } - ObjectMapper objectMapper = new ObjectMapper(); - final ObjectWriter objectWriter = objectMapper - .writerWithDefaultPrettyPrinter(); - File file = new File(client.getOutput()); - if (!file.exists()) { - file.createNewFile(); - } - objectWriter.writeValue(file, collectionMap); - return objectWriter.writeValueAsString(collectionMap); - } - - private String getHostFromNodeName(String nodeName) { - String[] splitted = nodeName.split(":"); - if (splitted.length > 0) { - return splitted[0]; - } else { - if (nodeName.endsWith("_solr")) { - String[] splitted_ = nodeName.split("_"); - return splitted_[0]; - } - return nodeName; - } - } - - private Collection getSlices(CloudSolrClient solrClient, String collection) { - ZkStateReader reader = solrClient.getZkStateReader(); - DocCollection docCollection = reader.getClusterState().getCollection(collection); - return docCollection.getSlices(); - } - - private long getNumberOfDocs(CloudSolrClient solrClient, String collection) throws Exception { - solrClient.setDefaultCollection(collection); - SolrQuery q = new SolrQuery("*:*"); - q.setRows(0); - return solrClient.query(q).getResults().getNumFound(); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java deleted file mode 100644 index 793addd9e18..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/EnableKerberosPluginSolrZkCommand.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.CreateMode; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCommand { - - private static final String SECURITY_JSON = "/security.json"; - private static final String UNSECURE_CONTENT = "{}"; - - public EnableKerberosPluginSolrZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String result = ""; - String filePath = client.getZnode() + SECURITY_JSON; - String fileContent = getFileContentFromZnode(zkClient, filePath); - String securityContent = getFileContent(client.getSecurityJsonLocation()); - if (client.isSecure()) { - if (!fileContent.equals(securityContent)) { - putFileContent(zkClient, filePath, securityContent); - } - result = securityContent; - } else { - if (!fileContent.equals(UNSECURE_CONTENT)) { - putFileContent(zkClient, filePath, UNSECURE_CONTENT); - } - result = UNSECURE_CONTENT; - } - return result; - } - - private void putFileContent(SolrZkClient zkClient, String fileName, String content) throws Exception { - if (zkClient.exists(fileName, true)) { - zkClient.setData(fileName, content.getBytes(StandardCharsets.UTF_8), true); - } else { - zkClient.create(fileName, content.getBytes(StandardCharsets.UTF_8), CreateMode.PERSISTENT, true); - } - } - - private String getFileContentFromZnode(SolrZkClient zkClient, String fileName) throws Exception { - String result; - if (zkClient.exists(fileName, true)) { - byte[] data = zkClient.getData(fileName, null, null, true); - result = new String(data, StandardCharsets.UTF_8); - } else { - result = UNSECURE_CONTENT; - } - return result; - } - - private String getFileContent(String fileLocation) throws IOException { - File securityJson = new File(fileLocation); - if (StringUtils.isNotEmpty(fileLocation) && securityJson.exists()) { - return FileUtils.readFileToString(securityJson); - } else { - return UNSECURE_CONTENT; - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java deleted file mode 100644 index 3683a1b9b45..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetShardsCommand.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; - -import java.util.Collection; - -public class GetShardsCommand extends AbstractRetryCommand> { - - public GetShardsCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - public Collection createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception { - ZkStateReader zkReader = new ZkStateReader(solrCloudClient.getSolrZkClient()); - zkReader.createClusterStateWatchersAndUpdate(); - return zkReader.getClusterState().getCollection(solrCloudClient.getCollection()).getSlices(); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java deleted file mode 100644 index 5a14a448174..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetSolrHostsCommand.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.zookeeper.ZooKeeper; - -import java.net.InetAddress; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -public class GetSolrHostsCommand extends AbstractRetryCommand> { - - public GetSolrHostsCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - public Collection createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception { - List solrHosts = new ArrayList<>(); - - ZooKeeper zk = new ZooKeeper(solrCloudClient.getZkConnectString(), 10000, null); - List ids = zk.getChildren("/live_nodes", false); - for (String id : ids) { - if (id.endsWith("_solr")) { - String hostAndPort = id.substring(0, id.length() - 5); - String[] tokens = hostAndPort.split(":"); - String host = InetAddress.getByName(tokens[0]).getHostName(); - - solrHosts.add(host); - } - } - - return solrHosts; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java deleted file mode 100644 index 10a8daae316..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/GetStateFileZkCommand.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.domain.AmbariSolrState; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; - -public class GetStateFileZkCommand extends AbstractStateFileZkCommand { - private String unsecureZnode; - - public GetStateFileZkCommand(int maxRetries, int interval, String unsecureZnode) { - super(maxRetries, interval); - this.unsecureZnode = unsecureZnode; - } - - @Override - protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - AmbariSolrState result = AmbariSolrState.UNSECURE; - String stateFile = String.format("%s/%s", unsecureZnode, AbstractStateFileZkCommand.STATE_FILE); - if (zkClient.exists(stateFile, true)) { - result = getStateFromJson(client, stateFile); - } - return result; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java deleted file mode 100644 index 41094c72047..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/ListCollectionCommand.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; - -import java.util.List; - -public class ListCollectionCommand extends AbstractSolrRetryCommand> { - - public ListCollectionCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - public List handleResponse(CollectionAdminResponse response, AmbariSolrCloudClient client) throws Exception { - List allCollectionList = (List) response - .getResponse().get("collections"); - return allCollectionList; - } - - @Override - public CollectionAdminRequest.List createRequest(AmbariSolrCloudClient client) { - return new CollectionAdminRequest.List(); - } - - @Override - public String errorMessage(AmbariSolrCloudClient client) { - return "Cannot get collections."; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java deleted file mode 100644 index 32fae7b141b..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/RemoveAdminHandlersCommand.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.data.Stat; - -public class RemoveAdminHandlersCommand extends AbstractZookeeperRetryCommand { - - public RemoveAdminHandlersCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String solrConfigXmlPath = String.format("/configs/%s/solrconfig.xml", client.getCollection()); - if (zkClient.exists(solrConfigXmlPath, true)) { - Stat stat = new Stat(); - byte[] solrConfigXmlBytes = zkClient.getData(solrConfigXmlPath, null, stat, true); - String solrConfigStr = new String(solrConfigXmlBytes); - if (solrConfigStr.contains("class=\"solr.admin.AdminHandlers\"")) { - byte[] newSolrConfigXmlBytes = new String(solrConfigXmlBytes).replaceAll("(?s)", "").getBytes(); - zkClient.setData(solrConfigXmlPath, newSolrConfigXmlBytes, stat.getVersion() + 1, true); - } - } - return true; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java deleted file mode 100644 index 695862394d0..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureSolrZNodeZkCommand.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.util.AclUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; -import org.apache.zookeeper.data.Stat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class SecureSolrZNodeZkCommand extends AbstractZookeeperRetryCommand { - - private static final Logger LOG = LoggerFactory.getLogger(SecureSolrZNodeZkCommand.class); - - public SecureSolrZNodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String zNode = client.getZnode(); - List newAclList = new ArrayList<>(); - List saslUserList = AclUtils.createAclListFromSaslUsers(client.getSaslUsers().split(",")); - newAclList.addAll(saslUserList); - newAclList.add(new ACL(ZooDefs.Perms.READ, new Id("world", "anyone"))); - - String configsPath = String.format("%s/%s", zNode, "configs"); - String collectionsPath = String.format("%s/%s", zNode, "collections"); - String aliasesPath = String.format("%s/%s", zNode, "aliases.json"); // TODO: protect this later somehow - List excludePaths = Arrays.asList(configsPath, collectionsPath, aliasesPath); - - createZnodeIfNeeded(configsPath, client.getSolrZkClient()); - createZnodeIfNeeded(collectionsPath, client.getSolrZkClient()); - - AclUtils.setRecursivelyOn(client.getSolrZkClient().getSolrZooKeeper(), zNode, newAclList, excludePaths); - - List commonConfigAcls = new ArrayList<>(); - commonConfigAcls.addAll(saslUserList); - commonConfigAcls.add(new ACL(ZooDefs.Perms.READ | ZooDefs.Perms.CREATE, new Id("world", "anyone"))); - - LOG.info("Set sasl users for znode '{}' : {}", client.getZnode(), StringUtils.join(saslUserList, ",")); - LOG.info("Skip {}/configs and {}/collections", client.getZnode(), client.getZnode()); - solrZooKeeper.setACL(configsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(configsPath, new Stat()), commonConfigAcls), -1); - solrZooKeeper.setACL(collectionsPath, AclUtils.mergeAcls(solrZooKeeper.getACL(collectionsPath, new Stat()), commonConfigAcls), -1); - - LOG.info("Set world:anyone to 'cr' on {}/configs and {}/collections", client.getZnode(), client.getZnode()); - AclUtils.setRecursivelyOn(solrZooKeeper, configsPath, saslUserList); - AclUtils.setRecursivelyOn(solrZooKeeper, collectionsPath, saslUserList); - - return true; - } - - private void createZnodeIfNeeded(String configsPath, SolrZkClient zkClient) throws KeeperException, InterruptedException { - if (!zkClient.exists(configsPath, true)) { - LOG.info("'{}' does not exist. Creating it ...", configsPath); - zkClient.makePath(configsPath, true); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureZNodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureZNodeZkCommand.java deleted file mode 100644 index a96dc5d5bd6..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SecureZNodeZkCommand.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.util.AclUtils; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; - -import java.util.ArrayList; -import java.util.List; - -public class SecureZNodeZkCommand extends AbstractZookeeperRetryCommand { - - public SecureZNodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String zNode = client.getZnode(); - List newAclList = new ArrayList<>(); - List saslUserList = AclUtils.createAclListFromSaslUsers(client.getSaslUsers().split(",")); - newAclList.addAll(saslUserList); - newAclList.add(new ACL(ZooDefs.Perms.READ, new Id("world", "anyone"))); - AclUtils.setRecursivelyOn(client.getSolrZkClient().getSolrZooKeeper(), zNode, newAclList); - return true; - } - -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SetClusterPropertyZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SetClusterPropertyZkCommand.java deleted file mode 100644 index e79773e5077..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/SetClusterPropertyZkCommand.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.ClusterProperties; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; - -public class SetClusterPropertyZkCommand extends AbstractZookeeperRetryCommand{ - - public SetClusterPropertyZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String propertyName = client.getPropName(); - String propertyValue = client.getPropValue(); - ClusterProperties clusterProperties = new ClusterProperties(zkClient); - clusterProperties.setClusterProperty(propertyName, propertyValue); - return propertyValue; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/TransferZnodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/TransferZnodeZkCommand.java deleted file mode 100644 index fa09869d370..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/TransferZnodeZkCommand.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; - -public class TransferZnodeZkCommand extends AbstractZookeeperRetryCommand { - - public TransferZnodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - boolean isSrcZk = true; - boolean isDestZk = true; - if ("copyToLocal".equals(client.getTransferMode())) { - isDestZk = false; - } else if ("copyFromLocal".equals(client.getTransferMode())) { - isSrcZk = false; - } - zkClient.zkTransfer(client.getCopySrc(), isSrcZk, client.getCopyDest(), isDestZk, true); - return true; - } - - -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UnsecureZNodeZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UnsecureZNodeZkCommand.java deleted file mode 100644 index ad61270d4d0..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UnsecureZNodeZkCommand.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.util.AclUtils; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; - -import java.util.ArrayList; -import java.util.List; - -public class UnsecureZNodeZkCommand extends AbstractZookeeperRetryCommand { - - public UnsecureZNodeZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected Boolean executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - String zNode = client.getZnode(); - AclUtils.setRecursivelyOn(client.getSolrZkClient().getSolrZooKeeper(), zNode, ZooDefs.Ids.OPEN_ACL_UNSAFE); - return true; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java deleted file mode 100644 index 2b360fbd8cc..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UpdateStateFileZkCommand.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.ambari.infra.solr.domain.AmbariSolrState; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.CreateMode; -import org.codehaus.jackson.map.ObjectMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -public class UpdateStateFileZkCommand extends AbstractStateFileZkCommand { - - private static final Logger LOG = LoggerFactory.getLogger(UpdateStateFileZkCommand.class); - - private String unsecureZnode; - - public UpdateStateFileZkCommand(int maxRetries, int interval, String unsecureZnode) { - super(maxRetries, interval); - this.unsecureZnode = unsecureZnode; - } - - @Override - protected AmbariSolrState executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception { - boolean secure = client.isSecure(); - String stateFile = String.format("%s/%s", unsecureZnode, AbstractStateFileZkCommand.STATE_FILE); - AmbariSolrState result = null; - if (secure) { - LOG.info("Update state file in secure mode."); - updateStateFile(client, zkClient, AmbariSolrState.SECURE, stateFile); - result = AmbariSolrState.SECURE; - } else { - LOG.info("Update state file in unsecure mode."); - updateStateFile(client, zkClient, AmbariSolrState.UNSECURE, stateFile); - result = AmbariSolrState.UNSECURE; - } - return result; - } - - private void updateStateFile(AmbariSolrCloudClient client, SolrZkClient zkClient, AmbariSolrState stateToUpdate, - String stateFile) throws Exception { - if (!zkClient.exists(stateFile, true)) { - LOG.info("State file does not exits. Initializing it as '{}'", stateToUpdate); - zkClient.create(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8), - CreateMode.PERSISTENT, true); - } else { - AmbariSolrState stateOnSecure = getStateFromJson(client, stateFile); - if (stateToUpdate.equals(stateOnSecure)) { - LOG.info("State file is in '{}' mode. No update.", stateOnSecure); - } else { - LOG.info("State file is in '{}' mode. Updating it to '{}'", stateOnSecure, stateToUpdate); - zkClient.setData(stateFile, createStateJson(stateToUpdate).getBytes(StandardCharsets.UTF_8), true); - } - } - } - - private String createStateJson(AmbariSolrState state) throws Exception { - Map secStateMap = new HashMap<>(); - secStateMap.put(AbstractStateFileZkCommand.STATE_FIELD, state.toString()); - return new ObjectMapper().writeValueAsString(secStateMap); - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UploadConfigZkCommand.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UploadConfigZkCommand.java deleted file mode 100644 index fc7482d8294..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/commands/UploadConfigZkCommand.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.commands; - -import org.apache.ambari.infra.solr.AmbariSolrCloudClient; -import org.apache.solr.common.cloud.ZkConfigManager; - -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class UploadConfigZkCommand extends AbstractZookeeperConfigCommand { - - public UploadConfigZkCommand(int maxRetries, int interval) { - super(maxRetries, interval); - } - - @Override - protected String executeZkConfigCommand(ZkConfigManager zkConfigManager, AmbariSolrCloudClient client) throws Exception { - Path configDir = Paths.get(client.getConfigDir()); - String configSet = client.getConfigSet(); - zkConfigManager.uploadConfigDir(configDir, configSet); - return configSet; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/AmbariSolrState.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/AmbariSolrState.java deleted file mode 100644 index 489d3f1e38a..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/AmbariSolrState.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.domain; - -/** - * Enum state values for storing security status in unsecure znode - */ -public enum AmbariSolrState { - SECURE, UNSECURE -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCollection.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCollection.java deleted file mode 100644 index 21820ece6c4..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCollection.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.domain.json; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class SolrCollection { - private String name; - private long numberOfDocs = -1; - private Map shards = new HashMap<>(); - private Map> leaderHostCoreMap = new HashMap<>(); - private Map leaderSolrCoreDataMap = new HashMap<>(); - private Map> leaderShardsMap = new HashMap<>(); - private Map leaderCoreHostMap = new HashMap<>(); - - public long getNumberOfDocs() { - return numberOfDocs; - } - - public void setNumberOfDocs(long numberOfDocs) { - this.numberOfDocs = numberOfDocs; - } - - public Map getShards() { - return shards; - } - - public void setShards(Map shards) { - this.shards = shards; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Map> getLeaderHostCoreMap() { - return leaderHostCoreMap; - } - - public void setLeaderHostCoreMap(Map> leaderHostCoreMap) { - this.leaderHostCoreMap = leaderHostCoreMap; - } - - public Map getLeaderSolrCoreDataMap() { - return leaderSolrCoreDataMap; - } - - public void setLeaderSolrCoreDataMap(Map leaderSolrCoreDataMap) { - this.leaderSolrCoreDataMap = leaderSolrCoreDataMap; - } - - public Map> getLeaderShardsMap() { - return leaderShardsMap; - } - - public void setLeaderShardsMap(Map> leaderShardsMap) { - this.leaderShardsMap = leaderShardsMap; - } - - public Map getLeaderCoreHostMap() { - return leaderCoreHostMap; - } - - public void setLeaderCoreHostMap(Map leaderCoreHostMap) { - this.leaderCoreHostMap = leaderCoreHostMap; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCoreData.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCoreData.java deleted file mode 100644 index 5724a517ddc..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrCoreData.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.domain.json; - -import java.util.Map; - -public class SolrCoreData { - private String coreNodeName; - private String hostName; - private Map properties; - - public SolrCoreData(String coreNodeName, String hostName, Map properties) { - this.coreNodeName = coreNodeName; - this.hostName = hostName; - this.properties = properties; - } - - public String getCoreNodeName() { - return coreNodeName; - } - - public void setCoreNodeName(String coreNodeName) { - this.coreNodeName = coreNodeName; - } - - public String getHostName() { - return hostName; - } - - public void setHostName(String hostName) { - this.hostName = hostName; - } - - public Map getProperties() { - return properties; - } - - public void setProperties(Map properties) { - this.properties = properties; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrShard.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrShard.java deleted file mode 100644 index f121663f8bb..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/domain/json/SolrShard.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.domain.json; - -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice.State; - -import java.util.Map; - -public class SolrShard { - - private String name; - private State state; - private Map replicas; - - public Map getReplicas() { - return replicas; - } - - public void setReplicas(Map replicas) { - this.replicas = replicas; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public State getState() { - return state; - } - - public void setState(State state) { - this.state = state; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/AclUtils.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/AclUtils.java deleted file mode 100644 index dd5d6c85588..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/AclUtils.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.util; - -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; -import org.apache.zookeeper.data.Stat; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AclUtils { - - public static List mergeAcls(List originalAcls, List updateAcls) { - Map aclMap = new HashMap<>(); - List acls = new ArrayList<>(); - if (originalAcls != null) { - for (ACL acl : originalAcls) { - aclMap.put(acl.getId().getId(), acl); - } - } - - if (updateAcls != null) { - for (ACL acl : updateAcls) { - aclMap.put(acl.getId().getId(), acl); - } - } - - for (Map.Entry aclEntry : aclMap.entrySet()) { - acls.add(aclEntry.getValue()); - } - return acls; - } - - public static List createAclListFromSaslUsers(String[] saslUsers) { - List saslUserList = new ArrayList<>(); - for (String saslUser : saslUsers) { - ACL acl = new ACL(); - acl.setId(new Id("sasl", saslUser)); - acl.setPerms(ZooDefs.Perms.ALL); - saslUserList.add(acl); - } - return saslUserList; - } - - public static void setRecursivelyOn(SolrZooKeeper solrZooKeeper, String node, List acls) throws KeeperException, InterruptedException { - setRecursivelyOn(solrZooKeeper, node, acls, new ArrayList()); - } - - public static void setRecursivelyOn(SolrZooKeeper solrZooKeeper, String node, List acls, List excludePaths) - throws KeeperException, InterruptedException { - if (!excludePaths.contains(node)) { - List newAcls = AclUtils.mergeAcls(solrZooKeeper.getACL(node, new Stat()), acls); - solrZooKeeper.setACL(node, newAcls, -1); - for (String child : solrZooKeeper.getChildren(node, null)) { - setRecursivelyOn(solrZooKeeper, path(node, child), acls, excludePaths); - } - } - } - - private static String path(String node, String child) { - return node.endsWith("/") ? node + child : node + "/" + child; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java b/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java deleted file mode 100644 index f46565b41e4..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/java/org/apache/ambari/infra/solr/util/ShardUtils.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.util; - -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; - -public class ShardUtils { - - private static final Logger LOG = LoggerFactory.getLogger(ShardUtils.class); - - public static String generateShardListStr(int maxShardsPerNode) { - String shardsListStr = ""; - for (int i = 0; i < maxShardsPerNode; i++) { - if (i != 0) { - shardsListStr += ","; - } - String shard = "shard" + i; - shardsListStr += shard; - } - return shardsListStr; - } - - public static List generateShardList(int maxShardsPerNode) { - List shardsList = new ArrayList<>(); - for (int i = 0; i < maxShardsPerNode; i++) { - shardsList.add("shard" + i); - } - return shardsList; - } - - public static Collection getShardNamesFromSlices(Collection slices, String collection) { - Collection result = new HashSet(); - Iterator iter = slices.iterator(); - while (iter.hasNext()) { - Slice slice = iter.next(); - for (Replica replica : slice.getReplicas()) { - LOG.info("collectionName=" + collection + ", slice.name=" - + slice.getName() + ", slice.state=" + slice.getState() - + ", replica.core=" + replica.getStr("core") - + ", replica.state=" + replica.getStr("state")); - result.add(slice.getName()); - } - } - return result; - } -} diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/migrationConfigGenerator.py b/ambari-infra/ambari-infra-solr-client/src/main/python/migrationConfigGenerator.py deleted file mode 100755 index 125b59d370b..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/python/migrationConfigGenerator.py +++ /dev/null @@ -1,518 +0,0 @@ -#!/usr/bin/python - -''' -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -import os -import socket -import signal -import sys -import time -import traceback -import urllib2, ssl -import logging -import json -import base64 -import optparse -import ConfigParser -from subprocess import Popen, PIPE -from random import randrange - -SOLR_SERVICE_NAME = 'AMBARI_INFRA_SOLR' -SOLR_COMPONENT_NAME ='INFRA_SOLR' - -ATLAS_SERVICE_NAME = 'ATLAS' - -RANGER_SERVICE_NAME = 'RANGER' -RANGER_COMPONENT_NAME = 'RANGER_ADMIN' - -ZOOKEEPER_SERVICE_NAME = 'ZOOKEEPER' -ZOOKEEPER_COMPONENT_NAME ='ZOOKEEPER_SERVER' - -CLUSTERS_URL = '/api/v1/clusters/{0}' -BLUEPRINT_CONFIG_URL = '?format=blueprint' -GET_SERVICES_URL = '/services/{0}' -GET_HOSTS_COMPONENTS_URL = '/services/{0}/components/{1}?fields=host_components' - -GET_STATE_JSON_URL = '{0}/admin/zookeeper?wt=json&detail=true&path=%2Fclusterstate.json&view=graph' - -logger = logging.getLogger() -handler = logging.StreamHandler() -formatter = logging.Formatter("%(asctime)s - %(message)s") -handler.setFormatter(formatter) -logger.addHandler(handler) - -class colors: - OKGREEN = '\033[92m' - WARNING = '\033[38;5;214m' - FAIL = '\033[91m' - ENDC = '\033[0m' - -def api_accessor(host, username, password, protocol, port): - def do_request(api_url, request_type, request_body=''): - try: - url = '{0}://{1}:{2}{3}'.format(protocol, host, port, api_url) - logger.debug('Execute {0} {1}'.format(request_type, url)) - if request_body: - logger.debug('Request body: {0}'.format(request_body)) - admin_auth = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') - request = urllib2.Request(url) - request.add_header('Authorization', 'Basic %s' % admin_auth) - request.add_header('X-Requested-By', 'ambari') - request.add_data(request_body) - request.get_method = lambda: request_type - response = None - if protocol == 'https': - ctx = ssl.create_default_context() - ctx.check_hostname = False - ctx.verify_mode = ssl.CERT_NONE - response = urllib2.urlopen(request, context=ctx) - else: - response = urllib2.urlopen(request) - response_body = response.read() - except Exception as exc: - raise Exception('Problem with accessing api. Reason: {0}'.format(exc)) - return response_body - return do_request - -def create_solr_api_request_command(request_url, user='infra-solr', kerberos_enabled='false', keytab=None, principal=None, output=None): - use_infra_solr_user="sudo -u {0}".format(user) - curl_prefix = "curl -k" - if output is not None: - curl_prefix+=" -o {0}".format(output) - api_cmd = '{0} kinit -kt {1} {2} && {3} {4} --negotiate -u : "{5}"'.format(use_infra_solr_user, keytab, principal, use_infra_solr_user, curl_prefix, request_url) \ - if kerberos_enabled == 'true' else '{0} {1} "{2}"'.format(use_infra_solr_user, curl_prefix, request_url) - return api_cmd - -def get_random_solr_url(solr_urls): - splitted_solr_urls = solr_urls.split(',') - random_index = randrange(0, len(splitted_solr_urls)) - result = splitted_solr_urls[random_index] - logger.debug("Use {0} for request.".format(result)) - return result - -def retry(func, *args, **kwargs): - retry_count = kwargs.pop("count", 10) - delay = kwargs.pop("delay", 5) - context = kwargs.pop("context", "") - for r in range(retry_count): - try: - result = func(*args, **kwargs) - if result is not None: return result - except Exception as e: - logger.debug("Error occurred during {0} operation: {1}".format(context, str(traceback.format_exc()))) - logger.info("{0}: waiting for {1} seconds before retyring again (retry count: {2})".format(context, delay, r+1)) - time.sleep(delay) - -def get_shard_numbers_per_collections(state_json_data): - collection_shard_map={} - for key,val in state_json_data.iteritems(): - if 'shards' in val: - shard_count=len(val['shards']) - collection_shard_map[key]=shard_count - return collection_shard_map - -def get_max_shards_for_collections(state_json_data): - collection_max_shard_map={} - for key,val in state_json_data.iteritems(): - if 'maxShardsPerNode' in val: - collection_max_shard_map[key]=val['maxShardsPerNode'] - return collection_max_shard_map - -def get_state_json_map(solr_urls, user='infra-solr', kerberos_enabled='false', keytab=None, principal=None): - state_json_data={} - request = GET_STATE_JSON_URL.format(get_random_solr_url(solr_urls)) - get_state_json_cmd=create_solr_api_request_command(request, user, kerberos_enabled, keytab, principal) - process = Popen(get_state_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - logger.error(str(err)) - response=json.loads(str(out)) - if 'znode' in response: - if 'data' in response['znode']: - state_json_data=json.loads(response['znode']['data']) - return state_json_data - -def read_json(json_file): - with open(json_file) as data_file: - data = json.load(data_file) - return data - -def get_json(accessor, url): - response = accessor(url, 'GET') - logger.debug('GET ' + url + ' response: ') - logger.debug('----------------------------') - logger.debug(response) - json_resp = json.loads(response) - return json_resp - -def post_json(accessor, url, request_body): - response = accessor(url, 'POST', json.dumps(request_body)) - logger.debug('POST ' + url + ' response: ') - logger.debug('----------------------------') - logger.debug(response) - json_resp = json.loads(response) - return json_resp - -def get_component_hosts(host_components_json): - hosts = [] - if "host_components" in host_components_json and len(host_components_json['host_components']) > 0: - for host_component in host_components_json['host_components']: - if 'HostRoles' in host_component: - hosts.append(host_component['HostRoles']['host_name']) - return hosts - -def get_solr_hosts(options, accessor): - host_components_json = get_json(accessor, CLUSTERS_URL.format(options.cluster) + GET_HOSTS_COMPONENTS_URL.format(SOLR_SERVICE_NAME, SOLR_COMPONENT_NAME)) - component_hosts = get_component_hosts(host_components_json) - return component_hosts - -def get_zookeeper_server_hosts(options, accessor): - host_components_json = get_json(accessor, CLUSTERS_URL.format(options.cluster) + GET_HOSTS_COMPONENTS_URL.format(ZOOKEEPER_SERVICE_NAME, ZOOKEEPER_COMPONENT_NAME)) - component_hosts = get_component_hosts(host_components_json) - return component_hosts - -def get_cluster_configs(blueprint): - result = [] - if 'configurations' in blueprint: - result = blueprint['configurations'] - return result - -def get_config_props(cluster_config, config_type): - props={} - for config in cluster_config: - if config_type in config and 'properties' in config[config_type]: - props=config[config_type]['properties'] - return props - -def is_security_enabled(cluster_config): - result = 'false' - cluster_env_props=get_config_props(cluster_config, 'cluster-env') - if cluster_env_props and 'security_enabled' in cluster_env_props and cluster_env_props['security_enabled'] == 'true': - result = 'true' - return result - -def set_log_level(verbose): - if verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - -def get_solr_env_props(cluster_config): - return get_config_props(cluster_config, 'infra-solr-env') - -def get_solr_urls(cluster_config, solr_hosts, solr_protocol): - infra_solr_env_props = get_solr_env_props(cluster_config) - - solr_port = infra_solr_env_props['infra_solr_port'] if 'infra_solr_port' in infra_solr_env_props else '8886' - solr_addr_list = [] - for solr_host in solr_hosts: - solr_addr = "{0}://{1}:{2}/solr".format(solr_protocol, solr_host, solr_port) - solr_addr_list.append(solr_addr) - - return ','.join(solr_addr_list) - -def get_solr_protocol(cluster_config): - infra_solr_env_props = get_solr_env_props(cluster_config) - return 'https' if 'infra_solr_ssl_enabled' in infra_solr_env_props and infra_solr_env_props['infra_solr_ssl_enabled'] == 'true' else 'http' - -def get_zookeeper_connection_string(cluster_config, zookeeper_hosts): - client_port = "2181" - zoo_cfg_props=get_config_props(cluster_config, 'zoo.cfg') - if zoo_cfg_props and 'clientPort' in zoo_cfg_props: - client_port = zoo_cfg_props['clientPort'] - - zookeeper_addr_list = [] - for zookeeper_host in zookeeper_hosts: - zookeeper_addr = zookeeper_host + ":" + client_port - zookeeper_addr_list.append(zookeeper_addr) - - return ','.join(zookeeper_addr_list) - -def get_solr_znode(cluster_config): - infra_solr_env_props = get_solr_env_props(cluster_config) - return infra_solr_env_props['infra_solr_znode'] if 'infra_solr_znode' in infra_solr_env_props else '/infra-solr' - -def get_installed_components(blueprint): - components = [] - if 'host_groups' in blueprint: - for host_group in blueprint['host_groups']: - if 'components' in host_group: - for component in host_group['components']: - if 'name' in component: - if component['name'] not in components: - components.append(component['name']) - return components - -def generate_ambari_solr_migration_ini_file(options, accessor, protocol): - - print "Start generating config file: {0} ...".format(options.ini_file) - - config = ConfigParser.RawConfigParser() - - config.add_section('ambari_server') - config.set('ambari_server', 'host', options.host) - config.set('ambari_server', 'port', options.port) - config.set('ambari_server', 'cluster', options.cluster) - config.set('ambari_server', 'protocol', protocol) - config.set('ambari_server', 'username', options.username) - config.set('ambari_server', 'password', options.password) - - print "Get Ambari cluster details ..." - blueprint = get_json(accessor, CLUSTERS_URL.format(options.cluster) + BLUEPRINT_CONFIG_URL) - installed_components = get_installed_components(blueprint) - - print "Set JAVA_HOME: {0}".format(options.java_home) - host = socket.getfqdn() - - cluster_config = get_cluster_configs(blueprint) - solr_hosts = get_solr_hosts(options, accessor) - - if solr_hosts and host not in solr_hosts: - print "{0}WARNING{1}: Host '{2}' is not found in Infra Solr hosts ({3}). Migration commands won't work from here."\ - .format(colors.WARNING, colors.ENDC, host, ','.join(solr_hosts)) - - zookeeper_hosts = get_zookeeper_server_hosts(options, accessor) - - security_enabled = is_security_enabled(cluster_config) - zk_connect_string = get_zookeeper_connection_string(cluster_config, zookeeper_hosts) - if zk_connect_string: - print "Service detected: " + colors.OKGREEN + "ZOOKEEPER" + colors.ENDC - print "Zookeeper connection string: {0}".format(str(zk_connect_string)) - solr_protocol = get_solr_protocol(cluster_config) - solr_urls = get_solr_urls(cluster_config, solr_hosts, solr_protocol) - if solr_urls: - print "Service detected: " + colors.OKGREEN + "AMBARI_INFRA_SOLR" + colors.ENDC - solr_znode = get_solr_znode(cluster_config) - if solr_znode: - print "Infra Solr znode: {0}".format(solr_znode) - infra_solr_env_props = get_config_props(cluster_config, 'infra-solr-env') - - infra_solr_user = infra_solr_env_props['infra_solr_user'] if 'infra_solr_user' in infra_solr_env_props else 'infra-solr' - infra_solr_kerberos_keytab = infra_solr_env_props['infra_solr_kerberos_keytab'] if 'infra_solr_kerberos_keytab' in infra_solr_env_props else '/etc/security/keytabs/ambari-infra-solr.service.keytab' - infra_solr_kerberos_principal_config = infra_solr_env_props['infra_solr_kerberos_principal'] if 'infra_solr_kerberos_principal' in infra_solr_env_props else 'infra-solr' - infra_solr_kerberos_principal = "infra-solr/" + host - if '/' in infra_solr_kerberos_principal_config: - infra_solr_kerberos_principal = infra_solr_kerberos_principal_config.replace('_HOST',host) - else: - infra_solr_kerberos_principal = infra_solr_kerberos_principal_config + "/" + host - infra_solr_port = infra_solr_env_props['infra_solr_port'] if 'infra_solr_port' in infra_solr_env_props else '8886' - - config.add_section('local') - config.set('local', 'java_home', options.java_home) - config.set('local', 'hostname', host) - if options.shared_drive: - config.set('local', 'shared_drive', 'true') - else: - config.set('local', 'shared_drive', 'false') - - config.add_section('cluster') - config.set('cluster', 'kerberos_enabled', security_enabled) - - config.add_section('infra_solr') - config.set('infra_solr', 'protocol', solr_protocol) - config.set('infra_solr', 'hosts', ','.join(solr_hosts)) - config.set('infra_solr', 'zk_connect_string', zk_connect_string) - config.set('infra_solr', 'znode', solr_znode) - config.set('infra_solr', 'user', infra_solr_user) - config.set('infra_solr', 'port', infra_solr_port) - if security_enabled == 'true': - config.set('infra_solr', 'keytab', infra_solr_kerberos_keytab) - config.set('infra_solr', 'principal', infra_solr_kerberos_principal) - zookeeper_env_props = get_config_props(cluster_config, 'zookeeper-env') - zookeeper_principal_name = zookeeper_env_props['zookeeper_principal_name'] if 'zookeeper_principal_name' in zookeeper_env_props else "zookeeper/_HOST@EXAMPLE.COM" - zk_principal_user = zookeeper_principal_name.split("/")[0] - default_zk_quorum = "{zookeeper_quorum}" - external_zk_connection_string = infra_solr_env_props['infra_solr_zookeeper_quorum'] if 'infra_solr_zookeeper_quorum' in infra_solr_env_props else default_zk_quorum - if default_zk_quorum != external_zk_connection_string: - print "Found external zk connection string: {0}".format(external_zk_connection_string) - config.set('infra_solr', 'external_zk_connect_string', external_zk_connection_string) - config.set('infra_solr', 'zk_principal_user', zk_principal_user) - - state_json_map = retry(get_state_json_map, solr_urls, infra_solr_user, security_enabled, infra_solr_kerberos_keytab, infra_solr_kerberos_principal, count=options.retry, delay=options.delay, context="Get clusterstate.json") - coll_shard_map=get_shard_numbers_per_collections(state_json_map) - max_shards_map=get_max_shards_for_collections(state_json_map) - - config.add_section('ranger_collection') - if "RANGER_ADMIN" in installed_components and not options.skip_ranger: - print "Service detected: " + colors.OKGREEN + "RANGER" + colors.ENDC - ranger_env_props = get_config_props(cluster_config, 'ranger-env') - if "is_solrCloud_enabled" in ranger_env_props and ranger_env_props['is_solrCloud_enabled'] == 'true': - if "is_external_solrCloud_enabled" in ranger_env_props and ranger_env_props['is_external_solrCloud_enabled'] == 'true' and not options.force_ranger: - config.set('ranger_collection', 'enabled', 'false') - else: - config.set('ranger_collection', 'enabled', 'true') - ranger_config_set = ranger_env_props['ranger_solr_config_set'] if ranger_env_props and 'ranger_solr_config_set' in ranger_env_props else 'ranger_audits' - ranger_collection_name = ranger_env_props['ranger_solr_collection_name'] if ranger_env_props and 'ranger_solr_collection_name' in ranger_env_props else 'ranger_audits' - config.set('ranger_collection', 'ranger_config_set_name', ranger_config_set) - config.set('ranger_collection', 'ranger_collection_name', ranger_collection_name) - if ranger_collection_name in coll_shard_map: - config.set('ranger_collection', 'ranger_collection_shards', coll_shard_map[ranger_collection_name]) - if ranger_collection_name in max_shards_map: - config.set('ranger_collection', 'ranger_collection_max_shards_per_node', max_shards_map[ranger_collection_name]) - config.set('ranger_collection', 'backup_ranger_config_set_name', 'old_ranger_audits') - config.set('ranger_collection', 'backup_ranger_collection_name', 'old_ranger_audits') - print 'Ranger Solr collection: ' + ranger_collection_name - ranger_backup_path = None - if options.backup_base_path: - ranger_backup_path = os.path.join(options.backup_base_path, "ranger") - elif options.backup_ranger_base_path: - ranger_backup_path = options.backup_ranger_base_path - if ranger_backup_path is not None: - config.set('ranger_collection', 'backup_path', ranger_backup_path) - print 'Ranger backup path: ' + ranger_backup_path - if options.ranger_hdfs_base_path: - config.set('ranger_collection', 'hdfs_base_path', options.ranger_hdfs_base_path) - elif options.hdfs_base_path: - config.set('ranger_collection', 'hdfs_base_path', options.hdfs_base_path) - else: - config.set('ranger_collection', 'enabled', 'false') - else: - config.set('ranger_collection', 'enabled', 'false') - - config.add_section('atlas_collections') - if "ATLAS_SERVER" in installed_components and not options.skip_atlas: - print "Service detected: " + colors.OKGREEN + "ATLAS" + colors.ENDC - config.set('atlas_collections', 'enabled', 'true') - config.set('atlas_collections', 'config_set', 'atlas_configs') - config.set('atlas_collections', 'fulltext_index_name', 'fulltext_index') - config.set('atlas_collections', 'backup_fulltext_index_name', 'old_fulltext_index') - if 'fulltext_index' in coll_shard_map: - config.set('atlas_collections', 'fulltext_index_shards', coll_shard_map['fulltext_index']) - if 'fulltext_index' in max_shards_map: - config.set('atlas_collections', 'fulltext_index_max_shards_per_node', max_shards_map['fulltext_index']) - config.set('atlas_collections', 'edge_index_name', 'edge_index') - config.set('atlas_collections', 'backup_edge_index_name', 'old_edge_index') - if 'edge_index' in coll_shard_map: - config.set('atlas_collections', 'edge_index_shards', coll_shard_map['edge_index']) - if 'edge_index' in max_shards_map: - config.set('atlas_collections', 'edge_index_max_shards_per_node', max_shards_map['edge_index']) - config.set('atlas_collections', 'vertex_index_name', 'vertex_index') - config.set('atlas_collections', 'backup_vertex_index_name', 'old_vertex_index') - if 'vertex_index' in coll_shard_map: - config.set('atlas_collections', 'vertex_index_shards', coll_shard_map['vertex_index']) - if 'vertex_index' in max_shards_map: - config.set('atlas_collections', 'vertex_index_max_shards_per_node', max_shards_map['vertex_index']) - print 'Atlas Solr collections: fulltext_index, edge_index, vertex_index' - atlas_backup_path = None - if options.backup_base_path: - atlas_backup_path = os.path.join(options.backup_base_path, "atlas") - elif options.backup_ranger_base_path: - atlas_backup_path = options.backup_atlas_base_path - if atlas_backup_path is not None: - config.set('atlas_collections', 'backup_path', atlas_backup_path) - print 'Atlas backup path: ' + atlas_backup_path - if options.atlas_hdfs_base_path: - config.set('atlas_collections', 'hdfs_base_path', options.atlas_hdfs_base_path) - elif options.hdfs_base_path: - config.set('atlas_collections', 'hdfs_base_path', options.hdfs_base_path) - else: - config.set('atlas_collections', 'enabled', 'false') - - config.add_section('logsearch_collections') - if "LOGSEARCH_SERVER" in installed_components: - print "Service detected: " + colors.OKGREEN + "LOGSEARCH" + colors.ENDC - - logsearch_props = get_config_props(cluster_config, 'logsearch-properties') - - logsearch_hadoop_logs_coll_name = logsearch_props['logsearch.solr.collection.service.logs'] if logsearch_props and 'logsearch.solr.collection.service.logs' in logsearch_props else 'hadoop_logs' - logsearch_audit_logs_coll_name = logsearch_props['logsearch.solr.collection.audit.logs'] if logsearch_props and 'logsearch.solr.collection.audit.logs' in logsearch_props else 'audit_logs' - - config.set('logsearch_collections', 'enabled', 'true') - config.set('logsearch_collections', 'hadoop_logs_collection_name', logsearch_hadoop_logs_coll_name) - config.set('logsearch_collections', 'audit_logs_collection_name', logsearch_audit_logs_coll_name) - config.set('logsearch_collections', 'history_collection_name', 'history') - print 'Log Search Solr collections: {0}, {1}, history'.format(logsearch_hadoop_logs_coll_name, logsearch_audit_logs_coll_name) - else: - config.set('logsearch_collections', 'enabled', 'false') - - if security_enabled == 'true': - print "Kerberos: enabled" - else: - print "Kerberos: disabled" - - with open(options.ini_file, 'w') as f: - config.write(f) - - print "Config file generation has finished " + colors.OKGREEN + "successfully" + colors.ENDC - -def validate_inputs(options): - errors=[] - if not options.host: - errors.append("Option is empty or missing: host") - if not options.port: - errors.append("Option is empty or missing: port") - if not options.cluster: - errors.append("Option is empty or missing: cluster") - if not options.username: - errors.append("Option is empty or missing: username") - if not options.password: - errors.append("Option is empty or missing: password") - if not options.java_home: - errors.append("Option is empty or missing: java-home") - elif not os.path.isdir(options.java_home): - errors.append("java-home directory does not exist ({0})".format(options.java_home)) - return errors - -if __name__=="__main__": - try: - parser = optparse.OptionParser("usage: %prog [options]") - parser.add_option("-H", "--host", dest="host", type="string", help="hostname for ambari server") - parser.add_option("-P", "--port", dest="port", type="int", help="port number for ambari server") - parser.add_option("-c", "--cluster", dest="cluster", type="string", help="name cluster") - parser.add_option("-f", "--force-ranger", dest="force_ranger", default=False, action="store_true", help="force to get Ranger details - can be useful if Ranger is configured to use external Solr (but points to internal Sols)") - parser.add_option("-s", "--ssl", dest="ssl", action="store_true", help="use if ambari server using https") - parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="use for verbose logging") - parser.add_option("-u", "--username", dest="username", type="string", help="username for accessing ambari server") - parser.add_option("-p", "--password", dest="password", type="string", help="password for accessing ambari server") - parser.add_option("-j", "--java-home", dest="java_home", type="string", help="local java_home location") - parser.add_option("-i", "--ini-file", dest="ini_file", default="ambari_solr_migration.ini", type="string", help="Filename of the generated ini file for migration (default: ambari_solr_migration.ini)") - parser.add_option("--backup-base-path", dest="backup_base_path", default=None, type="string", help="base path for backup, e.g.: /tmp/backup, then /tmp/backup/ranger/ and /tmp/backup/atlas/ folders will be generated") - parser.add_option("--backup-ranger-base-path", dest="backup_ranger_base_path", default=None, type="string", help="base path for ranger backup (override backup-base-path for ranger), e.g.: /tmp/backup/ranger") - parser.add_option("--backup-atlas-base-path", dest="backup_atlas_base_path", default=None, type="string", help="base path for atlas backup (override backup-base-path for atlas), e.g.: /tmp/backup/atlas") - parser.add_option("--hdfs-base-path", dest="hdfs_base_path", default=None, type="string", help="hdfs base path where the collections are located (e.g.: /user/infrasolr). Use if both atlas and ranger collections are on hdfs.") - parser.add_option("--ranger-hdfs-base-path", dest="ranger_hdfs_base_path", default=None, type="string", help="hdfs base path where the ranger collection is located (e.g.: /user/infra-solr). Use if only ranger collection is on hdfs.") - parser.add_option("--atlas-hdfs-base-path", dest="atlas_hdfs_base_path", default=None, type="string", help="hdfs base path where the atlas collections are located (e.g.: /user/infra-solr). Use if only atlas collections are on hdfs.") - parser.add_option("--skip-atlas", dest="skip_atlas", action="store_true", default=False, help="skip to gather Atlas service details") - parser.add_option("--skip-ranger", dest="skip_ranger", action="store_true", default=False, help="skip to gather Ranger service details") - parser.add_option("--retry", dest="retry", type="int", default=10, help="number of retries during accessing random solr urls") - parser.add_option("--delay", dest="delay", type="int", default=5, help="delay (seconds) between retries during accessing random solr urls") - parser.add_option("--shared-drive", dest="shared_drive", default=False, action="store_true", help="Use if the backup location is shared between hosts.") - - (options, args) = parser.parse_args() - - set_log_level(options.verbose) - errors = validate_inputs(options) - - if errors: - print 'Errors' - for error in errors: - print '- {0}'.format(error) - print '' - parser.print_help() - else: - protocol = 'https' if options.ssl else 'http' - accessor = api_accessor(options.host, options.username, options.password, protocol, options.port) - try: - generate_ambari_solr_migration_ini_file(options, accessor, protocol) - except Exception as exc: - print traceback.format_exc() - print 'Config file generation ' + colors.FAIL + 'failed' + colors.ENDC - except KeyboardInterrupt: - print - sys.exit(128 + signal.SIGINT) \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/migrationHelper.py b/ambari-infra/ambari-infra-solr-client/src/main/python/migrationHelper.py deleted file mode 100755 index b2e835b4ca4..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/python/migrationHelper.py +++ /dev/null @@ -1,2079 +0,0 @@ -#!/usr/bin/python - -''' -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -import copy -import glob -import logging -import os -import sys -import urllib2, ssl -import json -import base64 -import optparse -import socket -import time -import traceback -import ConfigParser -import solrDataManager as solr_data_manager - -from datetime import datetime, timedelta -from random import randrange, randint -from subprocess import Popen, PIPE - -HTTP_PROTOCOL = 'http' -HTTPS_PROTOCOL = 'https' - -AMBARI_SUDO = "/var/lib/ambari-agent/ambari-sudo.sh" - -SOLR_SERVICE_NAME = 'AMBARI_INFRA_SOLR' - -SOLR_COMPONENT_NAME ='INFRA_SOLR' - -LOGSEARCH_SERVICE_NAME = 'LOGSEARCH' - -LOGSEARCH_SERVER_COMPONENT_NAME ='LOGSEARCH_SERVER' -LOGSEARCH_LOGFEEDER_COMPONENT_NAME ='LOGSEARCH_LOGFEEDER' - -RANGER_SERVICE_NAME = "RANGER" -RANGER_ADMIN_COMPONENT_NAME = "RANGER_ADMIN" - -ATLAS_SERVICE_NAME = "ATLAS" -ATLAS_SERVER_COMPONENT_NAME = "ATLAS_SERVER" - -CLUSTERS_URL = '/api/v1/clusters/{0}' - -GET_HOSTS_COMPONENTS_URL = '/services/{0}/components/{1}?fields=host_components' - -REQUESTS_API_URL = '/requests' -BATCH_REQUEST_API_URL = "/api/v1/clusters/{0}/request_schedules" -GET_ACTUAL_CONFIG_URL = '/configurations/service_config_versions?service_name={0}&is_current=true' -CREATE_CONFIGURATIONS_URL = '/configurations' - -LIST_SOLR_COLLECTION_URL = '{0}/admin/collections?action=LIST&wt=json' -CREATE_SOLR_COLLECTION_URL = '{0}/admin/collections?action=CREATE&name={1}&collection.configName={2}&numShards={3}&replicationFactor={4}&maxShardsPerNode={5}&wt=json' -DELETE_SOLR_COLLECTION_URL = '{0}/admin/collections?action=DELETE&name={1}&wt=json&async={2}' -RELOAD_SOLR_COLLECTION_URL = '{0}/admin/collections?action=RELOAD&name={1}&wt=json' -REQUEST_STATUS_SOLR_COLLECTION_URL = '{0}/admin/collections?action=REQUESTSTATUS&requestid={1}&wt=json' -CORE_DETAILS_URL = '{0}replication?command=details&wt=json' - -INFRA_SOLR_CLIENT_BASE_PATH = '/usr/lib/ambari-infra-solr-client/' -RANGER_NEW_SCHEMA = 'migrate/managed-schema' -SOLR_CLOUD_CLI_SCRIPT = 'solrCloudCli.sh' -COLLECTIONS_DATA_JSON_LOCATION = INFRA_SOLR_CLIENT_BASE_PATH + "migrate/data/{0}" - -logger = logging.getLogger() -handler = logging.StreamHandler() -formatter = logging.Formatter("%(asctime)s - %(message)s") -handler.setFormatter(formatter) -logger.addHandler(handler) - -class colors: - OKGREEN = '\033[92m' - WARNING = '\033[38;5;214m' - FAIL = '\033[91m' - ENDC = '\033[0m' - -def api_accessor(host, username, password, protocol, port): - def do_request(api_url, request_type, request_body=''): - try: - url = '{0}://{1}:{2}{3}'.format(protocol, host, port, api_url) - logger.debug('Execute {0} {1}'.format(request_type, url)) - if request_body: - logger.debug('Request body: {0}'.format(request_body)) - admin_auth = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') - request = urllib2.Request(url) - request.add_header('Authorization', 'Basic %s' % admin_auth) - request.add_header('X-Requested-By', 'ambari') - request.add_data(request_body) - request.get_method = lambda: request_type - response = None - if protocol == 'https': - ctx = ssl.create_default_context() - ctx.check_hostname = False - ctx.verify_mode = ssl.CERT_NONE - response = urllib2.urlopen(request, context=ctx) - else: - response = urllib2.urlopen(request) - response_body = response.read() - except Exception as exc: - raise Exception('Problem with accessing api. Reason: {0}'.format(exc)) - return response_body - return do_request - -def set_log_level(verbose): - if verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - -def retry(func, *args, **kwargs): - retry_count = kwargs.pop("count", 10) - delay = kwargs.pop("delay", 5) - context = kwargs.pop("context", "") - for r in range(retry_count): - try: - result = func(*args, **kwargs) - if result is not None: return result - except Exception as e: - logger.error("Error occurred during {0} operation: {1}".format(context, str(traceback.format_exc()))) - logger.info("\n{0}: waiting for {1} seconds before retyring again (retry count: {2})".format(context, delay, r+1)) - time.sleep(delay) - print '{0} operation {1}FAILED{2}'.format(context, colors.FAIL, colors.ENDC) - sys.exit(1) - -def get_keytab_and_principal(config): - kerberos_enabled = 'false' - keytab=None - principal=None - if config.has_section('cluster') and config.has_option('cluster', 'kerberos_enabled'): - kerberos_enabled=config.get('cluster', 'kerberos_enabled') - - if config.has_section('infra_solr'): - if config.has_option('infra_solr', 'user'): - user=config.get('infra_solr', 'user') - if kerberos_enabled == 'true': - if config.has_option('infra_solr', 'keytab'): - keytab=config.get('infra_solr', 'keytab') - if config.has_option('infra_solr', 'principal'): - principal=config.get('infra_solr', 'principal') - return keytab, principal - -def create_solr_api_request_command(request_url, config, output=None): - user='infra-solr' - if config.has_section('infra_solr'): - if config.has_option('infra_solr', 'user'): - user=config.get('infra_solr', 'user') - kerberos_enabled='false' - if config.has_section('cluster') and config.has_option('cluster', 'kerberos_enabled'): - kerberos_enabled=config.get('cluster', 'kerberos_enabled') - keytab, principal=get_keytab_and_principal(config) - use_infra_solr_user="sudo -u {0}".format(user) - curl_prefix = "curl -k" - if output is not None: - curl_prefix+=" -o {0}".format(output) - api_cmd = '{0} kinit -kt {1} {2} && {3} {4} --negotiate -u : "{5}"'.format(use_infra_solr_user, keytab, principal, use_infra_solr_user, curl_prefix, request_url) \ - if kerberos_enabled == 'true' else '{0} {1} "{2}"'.format(use_infra_solr_user, curl_prefix, request_url) - logger.debug("Solr API command: {0}".format(api_cmd)) - return api_cmd - -def create_infra_solr_client_command(options, config, command, appendZnode=False): - user='infra-solr' - kerberos_enabled='false' - infra_solr_cli_opts = '' - java_home=None - jaasOption=None - zkConnectString=None - if config.has_section('cluster') and config.has_option('cluster', 'kerberos_enabled'): - kerberos_enabled=config.get('cluster', 'kerberos_enabled') - if config.has_section('infra_solr'): - if config.has_option('infra_solr', 'user'): - user=config.get('infra_solr', 'user') - if config.has_option('infra_solr', 'external_zk_connect_string'): - zkConnectString=config.get('infra_solr', 'external_zk_connect_string') - elif config.has_option('infra_solr', 'zk_connect_string'): - zkConnectString=config.get('infra_solr', 'zk_connect_string') - if kerberos_enabled == 'true': - zk_principal_user = config.get('infra_solr', 'zk_principal_user') if config.has_option('infra_solr', 'zk_principal_user') else 'zookeeper' - infra_solr_cli_opts= '-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username={0} -Dzookeeper.sasl.clientconfig=Client'.format(zk_principal_user) - jaasOption=" --jaas-file /etc/ambari-infra-solr/conf/infra_solr_jaas.conf" - command+=jaasOption - if config.has_section('local') and config.has_option('local', 'java_home'): - java_home=config.get('local', 'java_home') - if not java_home: - raise Exception("'local' section or 'java_home' is missing (or empty) from the configuration") - if not zkConnectString: - raise Exception("'zk_connect_string' section or 'external_zk_connect_string' is missing (or empty) from the configuration") - if appendZnode: - if config.has_option('infra_solr', 'znode'): - znode_to_append=config.get('infra_solr', 'znode') - zkConnectString+="{0}".format(znode_to_append) - else: - raise Exception("'znode' option is required for infra_solr section") - - set_java_home_= 'JAVA_HOME={0}'.format(java_home) - set_infra_solr_cli_opts = ' INFRA_SOLR_CLI_OPTS="{0}"'.format(infra_solr_cli_opts) if infra_solr_cli_opts != '' else '' - solr_cli_cmd = '{0} {1}{2} /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string {3} {4}'\ - .format(AMBARI_SUDO, set_java_home_, set_infra_solr_cli_opts, zkConnectString, command) - - return solr_cli_cmd - -def get_random_solr_url(solr_urls, options = None): - random_index = randrange(0, len(solr_urls)) - result = solr_urls[random_index] - logger.debug("Use {0} solr address for next request.".format(result)) - return result - -def format_json(dictionary, tab_level=0): - output = '' - tab = ' ' * 2 * tab_level - for key, value in dictionary.iteritems(): - output += ',\n{0}"{1}": '.format(tab, key) - if isinstance(value, dict): - output += '{\n' + format_json(value, tab_level + 1) + tab + '}' - else: - output += '"{0}"'.format(value) - output += '\n' - return output[2:] - -def read_json(json_file): - with open(json_file) as data_file: - data = json.load(data_file) - return data - -def get_json(accessor, url): - response = accessor(url, 'GET') - logger.debug('GET ' + url + ' response: ') - logger.debug('----------------------------') - logger.debug(str(response)) - json_resp = json.loads(response) - return json_resp - -def post_json(accessor, url, request_body): - response = accessor(url, 'POST', json.dumps(request_body)) - logger.debug('POST ' + url + ' response: ') - logger.debug( '----------------------------') - logger.debug(str(response)) - json_resp = json.loads(response) - return json_resp - -def get_component_hosts(host_components_json): - hosts = [] - if "host_components" in host_components_json and len(host_components_json['host_components']) > 0: - for host_component in host_components_json['host_components']: - if 'HostRoles' in host_component: - hosts.append(host_component['HostRoles']['host_name']) - return hosts - -def create_batch_command(command, hosts, cluster, service_name, component_name, interval_seconds, fault_tolerance, context): - request_schedules = [] - request_schedule = {} - batch = [] - requests = [] - order_id = 1 - all = len(hosts) - for host in hosts: - request = {} - request['order_id'] = order_id - request['type'] = 'POST' - request['uri'] = "/clusters/{0}/requests".format(cluster) - request_body_info = {} - request_info = {} - request_info["context"] = context + " ({0} of {1})".format(order_id, all) - request_info["command"] = command - - order_id = order_id + 1 - - resource_filter = {} - resource_filter["service_name"] = service_name - resource_filter["component_name"] = component_name - resource_filter["hosts"] = host - - resource_filters = [] - resource_filters.append(resource_filter) - request_body_info["Requests/resource_filters"] = resource_filters - request_body_info['RequestInfo'] = request_info - - request['RequestBodyInfo'] = request_body_info - requests.append(request) - batch_requests_item = {} - batch_requests_item['requests'] = requests - batch.append(batch_requests_item) - batch_settings_item = {} - batch_settings = {} - batch_settings['batch_separation_in_seconds'] = interval_seconds - batch_settings['task_failure_tolerance'] = fault_tolerance - batch_settings_item['batch_settings'] = batch_settings - batch.append(batch_settings_item) - request_schedule['batch'] = batch - - request_schedule_item = {} - request_schedule_item['RequestSchedule'] = request_schedule - request_schedules.append(request_schedule_item) - - return request_schedules - -def create_command_request(command, parameters, hosts, cluster, context, service=SOLR_SERVICE_NAME, component=SOLR_COMPONENT_NAME): - request = {} - request_info = {} - request_info["context"] = context - request_info["command"] = command - request_info["parameters"] = parameters - - operation_level = {} - operation_level["level"] = "HOST_COMPONENT" - operation_level["cluster_name"] = cluster - - request_info["operation_level"] = operation_level - request["RequestInfo"] = request_info - - resource_filter = {} - resource_filter["service_name"] = service - resource_filter["component_name"] = component - resource_filter["hosts"] = ','.join(hosts) - - resource_filters = [] - resource_filters.append(resource_filter) - request["Requests/resource_filters"] = resource_filters - return request - -def fill_params_for_backup(params, collection): - collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json")) - if collection in collections_data and 'leaderHostCoreMap' in collections_data[collection]: - params["solr_backup_host_cores_map"] = json.dumps(collections_data[collection]['leaderHostCoreMap']) - if collection in collections_data and 'leaderCoreHostMap' in collections_data[collection]: - params["solr_backup_core_host_map"] = json.dumps(collections_data[collection]['leaderCoreHostMap']) - return params - -def fill_params_for_restore(params, original_collection, collection, config_set): - backup_collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json")) - if original_collection in backup_collections_data and 'leaderHostCoreMap' in backup_collections_data[original_collection]: - params["solr_backup_host_cores_map"] = json.dumps(backup_collections_data[original_collection]['leaderHostCoreMap']) - if original_collection in backup_collections_data and 'leaderCoreHostMap' in backup_collections_data[original_collection]: - params["solr_backup_core_host_map"] = json.dumps(backup_collections_data[original_collection]['leaderCoreHostMap']) - - collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("restore_collections.json")) - if collection in collections_data and 'leaderHostCoreMap' in collections_data[collection]: - params["solr_restore_host_cores_map"] = json.dumps(collections_data[collection]['leaderHostCoreMap']) - if collection in collections_data and 'leaderCoreHostMap' in collections_data[collection]: - params["solr_restore_core_host_map"] = json.dumps(collections_data[collection]['leaderCoreHostMap']) - if collection in collections_data and 'leaderSolrCoreDataMap' in collections_data[collection]: - params["solr_restore_core_data"] = json.dumps(collections_data[collection]['leaderSolrCoreDataMap']) - if config_set: - params["solr_restore_config_set"] = config_set - - return params - -def fill_parameters(options, config, collection, index_location, hdfs_path=None, shards=None): - params = {} - if collection: - params['solr_collection'] = collection - params['solr_backup_name'] = collection - if index_location: - params['solr_index_location'] = index_location - if options.index_version: - params['solr_index_version'] = options.index_version - if options.force: - params['solr_index_upgrade_force'] = options.force - if options.async: - params['solr_request_async'] = options.request_async - if options.request_tries: - params['solr_request_tries'] = options.request_tries - if options.request_time_interval: - params['solr_request_time_interval'] = options.request_time_interval - if options.disable_solr_host_check: - params['solr_check_hosts'] = False - if options.core_filter: - params['solr_core_filter'] = options.core_filter - if options.core_filter: - params['solr_skip_cores'] = options.skip_cores - if shards: - params['solr_shards'] = shards - if options.shared_drive: - params['solr_shared_fs'] = True - elif config.has_section('local') and config.has_option('local', 'shared_drive') and config.get('local', 'shared_drive') == 'true': - params['solr_shared_fs'] = True - if hdfs_path: - params['solr_hdfs_path'] = hdfs_path - if options.keep_backup: - params['solr_keep_backup'] = True - return params - -def validte_common_options(options, parser, config): - if not options.index_location: - parser.print_help() - print 'index-location option is required' - sys.exit(1) - - if not options.collection: - parser.print_help() - print 'collection option is required' - sys.exit(1) - -def get_service_components(options, accessor, cluster, service, component): - host_components_json = get_json(accessor, CLUSTERS_URL.format(cluster) + GET_HOSTS_COMPONENTS_URL.format(service, component)) - component_hosts = get_component_hosts(host_components_json) - return component_hosts - -def get_solr_hosts(options, accessor, cluster): - component_hosts = get_service_components(options, accessor, cluster, SOLR_SERVICE_NAME, SOLR_COMPONENT_NAME) - - if options.include_solr_hosts: - new_component_hosts = [] - include_solr_hosts_list = options.include_solr_hosts.split(',') - for include_host in include_solr_hosts_list: - if include_host in component_hosts: - new_component_hosts.append(include_host) - component_hosts = new_component_hosts - if options.exclude_solr_hosts: - exclude_solr_hosts_list = options.exclude_solr_hosts.split(',') - for exclude_host in exclude_solr_hosts_list: - if exclude_host in component_hosts: - component_hosts.remove(exclude_host) - return component_hosts - -def restore(options, accessor, parser, config, original_collection, collection, config_set, index_location, hdfs_path, shards): - """ - Send restore solr collection custom command request to ambari-server - """ - cluster = config.get('ambari_server', 'cluster') - - component_hosts = get_solr_hosts(options, accessor, cluster) - parameters = fill_parameters(options, config, collection, index_location, hdfs_path, shards) - parameters = fill_params_for_restore(parameters, original_collection, collection, config_set) - - cmd_request = create_command_request("RESTORE", parameters, component_hosts, cluster, 'Restore Solr Collection: ' + collection) - return post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - -def migrate(options, accessor, parser, config, collection, index_location): - """ - Send migrate lucene index custom command request to ambari-server - """ - cluster = config.get('ambari_server', 'cluster') - - component_hosts = get_solr_hosts(options, accessor, cluster) - parameters = fill_parameters(options, config, collection, index_location) - - cmd_request = create_command_request("MIGRATE", parameters, component_hosts, cluster, 'Migrating Solr Collection: ' + collection) - return post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - -def backup(options, accessor, parser, config, collection, index_location): - """ - Send backup solr collection custom command request to ambari-server - """ - cluster = config.get('ambari_server', 'cluster') - - component_hosts = get_solr_hosts(options, accessor, cluster) - parameters = fill_parameters(options, config, collection, index_location) - - parameters = fill_params_for_backup(parameters, collection) - - cmd_request = create_command_request("BACKUP", parameters, component_hosts, cluster, 'Backup Solr Collection: ' + collection) - return post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - - -def upgrade_solr_instances(options, accessor, parser, config): - """ - Upgrade (remove & re-install) infra solr instances - """ - cluster = config.get('ambari_server', 'cluster') - solr_instance_hosts = get_service_components(options, accessor, cluster, "AMBARI_INFRA_SOLR", "INFRA_SOLR") - - context = "Upgrade Solr Instances" - sys.stdout.write("Sending upgrade request: [{0}] ".format(context)) - sys.stdout.flush() - - cmd_request = create_command_request("UPGRADE_SOLR_INSTANCE", {}, solr_instance_hosts, cluster, context) - response = post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Upgrade command request id: {0}'.format(request_id) - if options.async: - print "Upgrade request sent to Ambari server. Check Ambari UI about the results." - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, context) - print "{0}... {1} DONE{2}".format(context, colors.OKGREEN, colors.ENDC) - -def upgrade_solr_clients(options, accessor, parser, config): - """ - Upgrade (remove & re-install) infra solr clients - """ - cluster = config.get('ambari_server', 'cluster') - solr_client_hosts = get_service_components(options, accessor, cluster, "AMBARI_INFRA_SOLR", "INFRA_SOLR_CLIENT") - - fqdn = socket.getfqdn() - if fqdn in solr_client_hosts: - solr_client_hosts.remove(fqdn) - host = socket.gethostname() - if host in solr_client_hosts: - solr_client_hosts.remove(host) - context = "Upgrade Solr Clients" - sys.stdout.write("Sending upgrade request: [{0}] ".format(context)) - sys.stdout.flush() - - cmd_request = create_command_request("UPGRADE_SOLR_CLIENT", {}, solr_client_hosts, cluster, context, component="INFRA_SOLR_CLIENT") - response = post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Upgrade command request id: {0}'.format(request_id) - if options.async: - print "Upgrade request sent to Ambari server. Check Ambari UI about the results." - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, context) - print "{0}... {1}DONE{2}".format(context, colors.OKGREEN, colors.ENDC) - -def upgrade_logfeeders(options, accessor, parser, config): - """ - Upgrade (remove & re-install) logfeeders - """ - cluster = config.get('ambari_server', 'cluster') - logfeeder_hosts = get_service_components(options, accessor, cluster, "LOGSEARCH", "LOGSEARCH_LOGFEEDER") - - context = "Upgrade Log Feeders" - sys.stdout.write("Sending upgrade request: [{0}] ".format(context)) - sys.stdout.flush() - - cmd_request = create_command_request("UPGRADE_LOGFEEDER", {}, logfeeder_hosts, cluster, context, service="LOGSEARCH", component="LOGSEARCH_LOGFEEDER") - response = post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Upgrade command request id: {0}'.format(request_id) - if options.async: - print "Upgrade request sent to Ambari server. Check Ambari UI about the results." - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, context) - print "{0}... {1} DONE{2}".format(context, colors.OKGREEN, colors.ENDC) - -def upgrade_logsearch_portal(options, accessor, parser, config): - """ - Upgrade (remove & re-install) logsearch server instances - """ - cluster = config.get('ambari_server', 'cluster') - logsearch_portal_hosts = get_service_components(options, accessor, cluster, "LOGSEARCH", "LOGSEARCH_SERVER") - - context = "Upgrade Log Search Portal" - sys.stdout.write("Sending upgrade request: [{0}] ".format(context)) - sys.stdout.flush() - - cmd_request = create_command_request("UPGRADE_LOGSEARCH_PORTAL", {}, logsearch_portal_hosts, cluster, context, service="LOGSEARCH", component="LOGSEARCH_SERVER") - response = post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Upgrade command request id: {0}'.format(request_id) - if options.async: - print "Upgrade request sent to Ambari server. Check Ambari UI about the results." - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, context) - print "{0}... {1} DONE{2}".format(context, colors.OKGREEN, colors.ENDC) - -def service_components_command(options, accessor, parser, config, service, component, command, command_str): - """ - Run command on service components - """ - cluster = config.get('ambari_server', 'cluster') - service_components = get_service_components(options, accessor, cluster, service, component) - - context = "{0} {1}".format(command_str, component) - sys.stdout.write("Sending '{0}' request: [{1}] ".format(command, context)) - sys.stdout.flush() - - cmd_request = create_command_request(command, {}, service_components, cluster, context, service=service, component=component) - response = post_json(accessor, CLUSTERS_URL.format(cluster) + REQUESTS_API_URL, cmd_request) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print '{0} command request id: {1}'.format(command_str, request_id) - if options.async: - print "{0} request sent to Ambari server. Check Ambari UI about the results.".format(command_str) - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, context) - print "{0}... {1} DONE{2}".format(context, colors.OKGREEN, colors.ENDC) - -def monitor_request(options, accessor, cluster, request_id, context): - while True: - request_response=get_json(accessor, "/api/v1/clusters/{0}{1}/{2}".format(cluster, REQUESTS_API_URL, request_id)) - if 'Requests' in request_response and 'request_status' in request_response['Requests']: - request_status = request_response['Requests']['request_status'] - logger.debug("\nMonitoring '{0}' request (id: '{1}') status is {2}".format(context, request_id, request_status)) - if request_status in ['FAILED', 'TIMEDOUT', 'ABORTED', 'COMPLETED', 'SKIPPED_FAILED']: - if request_status == 'COMPLETED': - print "\nRequest (id: {0}) {1}COMPLETED{2}".format(request_id, colors.OKGREEN, colors.ENDC) - time.sleep(4) - else: - print "\nRequest (id: {0}) {1}FAILED{2} (checkout Ambari UI about the failed tasks)\n".format(request_id, colors.FAIL, colors.ENDC) - sys.exit(1) - break - else: - if not options.verbose: - sys.stdout.write(".") - sys.stdout.flush() - logger.debug("Sleep 5 seconds ...") - time.sleep(5) - else: - print "'Requests' or 'request_status' cannot be found in JSON response: {0}".format(request_response) - sys.exit(1) - -def get_request_id(json_response): - if "Requests" in json_response: - if "id" in json_response['Requests']: - return json_response['Requests']['id'] - raise Exception("Cannot access request id from Ambari response: {0}".format(json_response)) - -def filter_collections(options, collections): - if options.collection is not None: - filtered_collections = [] - if options.collection in collections: - filtered_collections.append(options.collection) - return filtered_collections - else: - return collections - -def get_infra_solr_props(config, accessor): - cluster = config.get('ambari_server', 'cluster') - service_configs = get_json(accessor, CLUSTERS_URL.format(cluster) + GET_ACTUAL_CONFIG_URL.format(SOLR_SERVICE_NAME)) - infra_solr_props = {} - infra_solr_env_properties = {} - infra_solr_security_json_properties = {} - if 'items' in service_configs and len(service_configs['items']) > 0: - if 'configurations' in service_configs['items'][0]: - for config in service_configs['items'][0]['configurations']: - if 'type' in config and config['type'] == 'infra-solr-env': - infra_solr_env_properties = config['properties'] - if 'type' in config and config['type'] == 'infra-solr-security-json': - infra_solr_security_json_properties = config['properties'] - infra_solr_props['infra-solr-env'] = infra_solr_env_properties - infra_solr_props['infra-solr-security-json'] = infra_solr_security_json_properties - return infra_solr_props - -def insert_string_before(full_str, sub_str, insert_str): - idx = full_str.index(sub_str) - return full_str[:idx] + insert_str + full_str[idx:] - -def set_solr_security_management(infra_solr_props, accessor, enable = True): - security_props = infra_solr_props['infra-solr-security-json'] - check_value = "false" if enable else "true" - set_value = "true" if enable else "false" - turn_status = "on" if enable else "off" - if 'infra_solr_security_manually_managed' in security_props and security_props['infra_solr_security_manually_managed'] == check_value: - security_props['infra_solr_security_manually_managed'] = set_value - post_configuration = create_configs('infra-solr-security-json', security_props, 'Turn {0} security.json manaul management by migrationHelper.py'.format(turn_status)) - apply_configs(config, accessor, post_configuration) - else: - print "Configuration 'infra-solr-security-json/infra_solr_security_manually_managed' has already set to '{0}'".format(set_value) - -def set_solr_name_rules(infra_solr_props, accessor, add = False): - """ - Set name rules in infra-solr-env/content if not set in add mode, in non-add mode, remove it if exists - :param add: solr kerb name rules needs to be added (if false, it needs to be removed) - """ - infra_solr_env_props = infra_solr_props['infra-solr-env'] - name_rules_param = "SOLR_KERB_NAME_RULES=\"{{infra_solr_kerberos_name_rules}}\"\n" - - if 'content' in infra_solr_env_props and (name_rules_param not in infra_solr_env_props['content']) is add: - if add: - print "Adding 'SOLR_KERB_NAME_RULES' to 'infra-solr-env/content'" - new_content = insert_string_before(infra_solr_env_props['content'], "SOLR_KERB_KEYTAB", name_rules_param) - infra_solr_env_props['content'] = new_content - post_configuration = create_configs('infra-solr-env', infra_solr_env_props, 'Add "SOLR_KERB_NAME_RULES" by migrationHelper.py') - apply_configs(config, accessor, post_configuration) - else: - print "Removing 'SOLR_KERB_NAME_RULES' from 'infra-solr-env/content'" - new_content = infra_solr_env_props['content'].replace(name_rules_param, '') - infra_solr_env_props['content'] = new_content - post_configuration = create_configs('infra-solr-env', infra_solr_env_props, 'Remove "SOLR_KERB_NAME_RULES" by migrationHelper.py') - apply_configs(config, accessor, post_configuration) - else: - if add: - print "'SOLR_KERB_NAME_RULES' has already set in configuration 'infra-solr-env/content'" - else: - print "Configuration 'infra-solr-env/content' does not contain 'SOLR_KERB_NAME_RULES'" - -def apply_configs(config, accessor, post_configuration): - cluster = config.get('ambari_server', 'cluster') - desired_configs_post_body = {} - desired_configs_post_body["Clusters"] = {} - desired_configs_post_body["Clusters"]["desired_configs"] = post_configuration - accessor(CLUSTERS_URL.format(cluster), 'PUT', json.dumps(desired_configs_post_body)) - -def create_configs(config_type, properties, context): - configs_for_posts = {} - configuration = {} - configuration['type'] = config_type - configuration['tag'] = "version" + str(int(round(time.time() * 1000))) - configuration['properties'] = properties - configuration['service_config_version_note'] = context - configs_for_posts[config_type] = configuration - return configs_for_posts - -def common_data(list1, list2): - common_data = [] - for x in list1: - for y in list2: - if x == y: - common_data.append(x) - return common_data - -def filter_solr_hosts_if_match_any(splitted_solr_hosts, collection, collections_json): - """ - Return common hosts if there is any match with the collection related hosts, if not then filter won't apply (e.g.: won't filter with IPs in host names) - """ - collection_related_hosts = [] - all_collection_data = get_collections_data(collections_json) - if collection in all_collection_data: - collection_data = all_collection_data[collection] - if 'shards' in collection_data: - for shard in collection_data['shards']: - if 'replicas' in collection_data['shards'][shard]: - for replica in collection_data['shards'][shard]['replicas']: - nodeName = collection_data['shards'][shard]['replicas'][replica]['nodeName'] - hostName = nodeName.split(":")[0] - if hostName not in collection_related_hosts: - collection_related_hosts.append(hostName) - common_list = common_data(splitted_solr_hosts, collection_related_hosts) - return common_list if common_list else splitted_solr_hosts - -def get_solr_urls(options, config, collection, collections_json): - solr_urls = [] - solr_hosts = None - solr_port = "8886" - solr_protocol = "http" - if config.has_section("infra_solr") and config.has_option("infra_solr", "port"): - solr_port = config.get('infra_solr', 'port') - if config.has_section("infra_solr") and config.has_option("infra_solr", "protocol"): - solr_protocol = config.get('infra_solr', 'protocol') - if config.has_section('infra_solr') and config.has_option('infra_solr', 'hosts'): - solr_hosts = config.get('infra_solr', 'hosts') - - splitted_solr_hosts = solr_hosts.split(',') - splitted_solr_hosts = filter_solr_hosts_if_match_any(splitted_solr_hosts, collection, collections_json) - if options.include_solr_hosts: - # keep only included ones, do not override any - include_solr_hosts_list = options.include_solr_hosts.split(',') - new_splitted_hosts = [] - for host in splitted_solr_hosts: - if any(inc_solr_host in host for inc_solr_host in include_solr_hosts_list): - new_splitted_hosts.append(host) - splitted_solr_hosts = new_splitted_hosts - - if options.exclude_solr_hosts: - exclude_solr_hosts_list = options.exclude_solr_hosts.split(',') - hosts_to_exclude = [] - for host in splitted_solr_hosts: - if any(exc_solr_host in host for exc_solr_host in exclude_solr_hosts_list): - hosts_to_exclude.append(host) - for excluded_url in hosts_to_exclude: - splitted_solr_hosts.remove(excluded_url) - - for solr_host in splitted_solr_hosts: - solr_addr = "{0}://{1}:{2}/solr".format(solr_protocol, solr_host, solr_port) - solr_urls.append(solr_addr) - - return solr_urls - -def get_input_output_solr_url(src_solr_urls, target_solr_urls): - """ - Choose random solr urls for the source and target collections, prefer localhost and common urls - """ - def intersect(a, b): - return list(set(a) & set(b)) - input_solr_urls = src_solr_urls - output_solr_urls = target_solr_urls - hostname = socket.getfqdn() - if any(hostname in s for s in input_solr_urls): - input_solr_urls = filter(lambda x: hostname in x, input_solr_urls) - if any(hostname in s for s in output_solr_urls): - output_solr_urls = filter(lambda x: hostname in x, output_solr_urls) - common_url_list = intersect(input_solr_urls, output_solr_urls) - if common_url_list: - input_solr_urls = common_url_list - output_solr_urls = common_url_list - - return get_random_solr_url(input_solr_urls), get_random_solr_url(output_solr_urls) - -def is_atlas_available(config, service_filter): - return 'ATLAS' in service_filter and config.has_section('atlas_collections') \ - and config.has_option('atlas_collections', 'enabled') and config.get('atlas_collections', 'enabled') == 'true' - -def is_ranger_available(config, service_filter): - return 'RANGER' in service_filter and config.has_section('ranger_collection') \ - and config.has_option('ranger_collection', 'enabled') and config.get('ranger_collection', 'enabled') == 'true' - -def is_logsearch_available(config, service_filter): - return 'LOGSEARCH' in service_filter and config.has_section('logsearch_collections') \ - and config.has_option('logsearch_collections', 'enabled') and config.get('logsearch_collections', 'enabled') == 'true' - -def monitor_solr_async_request(options, config, status_request, request_id): - request_status_json_cmd=create_solr_api_request_command(status_request, config) - logger.debug("Solr request: {0}".format(status_request)) - async_request_success_msg = "Async Solr request (id: {0}) {1}COMPLETED{2}".format(request_id, colors.OKGREEN, colors.ENDC) - async_request_timeout_msg = "Async Solr request (id: {0}) {1}FAILED{2}".format(request_id, colors.FAIL, colors.ENDC) - async_request_fail_msg = "\nAsync Solr request (id: {0}) {1}TIMED OUT{2} (increase --solr-async-request-tries if required, default is 400)".format(request_id, colors.FAIL, colors.ENDC) - max_tries = options.solr_async_request_tries if options.solr_async_request_tries else 400 - tries = 0 - sys.stdout.write("Start monitoring Solr request with id {0} ...".format(request_id)) - sys.stdout.flush() - async_request_finished = False - async_request_failed = False - async_request_timed_out = False - while not async_request_finished: - tries = tries + 1 - process = Popen(request_status_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - raise Exception("{0} command failed: {1}".format(request_status_json_cmd, str(err))) - else: - response=json.loads(str(out)) - logger.debug(response) - if 'status' in response: - async_state=response['status']['state'] - async_msg=response['status']['msg'] - if async_state == "completed": - async_request_finished = True - sys.stdout.write("\nSolr response message: {0}\n".format(async_msg)) - sys.stdout.flush() - elif async_state == "failed": - async_request_finished = True - async_request_failed = True - sys.stdout.write("\nSolr response message: {0}\n".format(async_msg)) - sys.stdout.flush() - else: - if not options.verbose: - sys.stdout.write(".") - sys.stdout.flush() - logger.debug(str(async_msg)) - logger.debug("Sleep 5 seconds ...") - time.sleep(5) - else: - raise Exception("The 'status' field is missing from the response: {0}".format(response)) - if tries == max_tries: - async_request_finished = True - async_request_timed_out = True - - if async_request_failed: - if async_request_timed_out: - print async_request_timeout_msg - sys.exit(1) - else: - print async_request_fail_msg - sys.exit(1) - else: - print async_request_success_msg - return request_id - - -def delete_collection(options, config, collection, solr_urls, response_data_map): - async_id = str(randint(1000,100000)) - solr_url = get_random_solr_url(solr_urls, options) - request = DELETE_SOLR_COLLECTION_URL.format(solr_url, collection, async_id) - logger.debug("Solr request: {0}".format(request)) - delete_collection_json_cmd=create_solr_api_request_command(request, config) - process = Popen(delete_collection_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - raise Exception("{0} command failed: {1}".format(delete_collection_json_cmd, str(err))) - response=json.loads(str(out)) - if 'requestid' in response: - print 'Deleting collection {0} request sent. {1}DONE{2}'.format(collection, colors.OKGREEN, colors.ENDC) - response_data_map['request_id']=response['requestid'] - response_data_map['status_request']=REQUEST_STATUS_SOLR_COLLECTION_URL.format(solr_url, response['requestid']) - return collection - else: - raise Exception("DELETE collection ('{0}') failed. Response: {1}".format(collection, str(out))) - -def create_collection(options, config, solr_urls, collection, config_set, shards, replica, max_shards_per_node): - request = CREATE_SOLR_COLLECTION_URL.format(get_random_solr_url(solr_urls, options), collection, config_set, shards, replica, max_shards_per_node) - logger.debug("Solr request: {0}".format(request)) - create_collection_json_cmd=create_solr_api_request_command(request, config) - process = Popen(create_collection_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - raise Exception("{0} command failed: {1}".format(create_collection_json_cmd, str(err))) - response=json.loads(str(out)) - if 'success' in response: - print 'Creating collection {0} was {1}SUCCESSFUL{2}'.format(collection, colors.OKGREEN, colors.ENDC) - return collection - else: - raise Exception("CREATE collection ('{0}') failed. ({1}) Response: {1}".format(collection, str(out))) - -def reload_collection(options, config, solr_urls, collection): - request = RELOAD_SOLR_COLLECTION_URL.format(get_random_solr_url(solr_urls, options), collection) - logger.debug("Solr request: {0}".format(request)) - reload_collection_json_cmd=create_solr_api_request_command(request, config) - process = Popen(reload_collection_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - raise Exception("{0} command failed: {1}".format(reload_collection_json_cmd, str(err))) - response=json.loads(str(out)) - if 'success' in response: - print 'Reloading collection {0} was {1}SUCCESSFUL{2}'.format(collection, colors.OKGREEN, colors.ENDC) - return collection - else: - raise Exception("RELOAD collection ('{0}') failed. ({1}) Response: {1}".format(collection, str(out))) - -def human_size(size_bytes): - if size_bytes == 1: - return "1 byte" - suffixes_table = [('bytes',0),('KB',2),('MB',2),('GB',2),('TB',2), ('PB',2)] - num = float(size_bytes) - for suffix, precision in suffixes_table: - if num < 1024.0: - break - num /= 1024.0 - if precision == 0: - formatted_size = "%d" % num - else: - formatted_size = str(round(num, ndigits=precision)) - return "%s %s" % (formatted_size, suffix) - -def parse_size(human_size): - units = {"bytes": 1, "KB": 1024, "MB": 1024**2, "GB": 1024**3, "TB": 1024**4 } - number, unit = [string.strip() for string in human_size.split()] - return int(float(number)*units[unit]) - -def get_replica_index_size(config, core_url, replica): - request = CORE_DETAILS_URL.format(core_url) - logger.debug("Solr request: {0}".format(request)) - get_core_detaul_json_cmd=create_solr_api_request_command(request, config) - process = Popen(get_core_detaul_json_cmd, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - raise Exception("{0} command failed: {1}".format(get_core_detaul_json_cmd, str(err))) - response=json.loads(str(out)) - if 'details' in response: - if 'indexSize' in response['details']: - return response['details']['indexSize'] - else: - raise Exception("Not found 'indexSize' in core details ('{0}'). Response: {1}".format(replica, str(out))) - else: - raise Exception("GET core details ('{0}') failed. Response: {1}".format(replica, str(out))) - -def delete_znode(options, config, znode): - solr_cli_command=create_infra_solr_client_command(options, config, '--delete-znode --znode {0}'.format(znode)) - logger.debug("Solr cli command: {0}".format(solr_cli_command)) - sys.stdout.write('Deleting znode {0} ... '.format(znode)) - sys.stdout.flush() - process = Popen(solr_cli_command, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - sys.stdout.write(colors.FAIL + 'FAILED\n' + colors.ENDC) - sys.stdout.flush() - raise Exception("{0} command failed: {1}".format(solr_cli_command, str(err))) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - logger.debug(str(out)) - -def copy_znode(options, config, copy_src, copy_dest, copy_from_local=False, copy_to_local=False): - solr_cli_command=create_infra_solr_client_command(options, config, '--transfer-znode --copy-src {0} --copy-dest {1}'.format(copy_src, copy_dest)) - if copy_from_local: - solr_cli_command+=" --transfer-mode copyFromLocal" - elif copy_to_local: - solr_cli_command+=" --transfer-mode copyToLocal" - logger.debug("Solr cli command: {0}".format(solr_cli_command)) - sys.stdout.write('Transferring data from {0} to {1} ... '.format(copy_src, copy_dest)) - sys.stdout.flush() - process = Popen(solr_cli_command, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - sys.stdout.write(colors.FAIL + 'FAILED\n' + colors.ENDC) - sys.stdout.flush() - raise Exception("{0} command failed: {1}".format(solr_cli_command, str(err))) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - logger.debug(str(out)) - -def list_collections(options, config, output_file, include_number_of_docs=False): - dump_json_files_list=[] - skip_dump=False - if options.skip_json_dump_files: - dump_json_files_list=options.skip_json_dump_files.split(',') - if dump_json_files_list: - for dump_json_file in dump_json_files_list: - if output_file.endswith(dump_json_file): - skip_dump=True - if skip_dump: - print 'Skipping collection dump file generation: {0}'.format(output_file) - if not os.path.exists(output_file): - print "{0}FAIL{1}: Collection dump file '{2}' does not exist.".format(colors.FAIL, colors.ENDC, output_file) - sys.exit(1) - else: - command_suffix = '--dump-collections --output {0}'.format(output_file) - if include_number_of_docs: - command_suffix+=' --include-doc-number' - solr_cli_command=create_infra_solr_client_command(options, config, command_suffix, appendZnode=True) - logger.debug("Solr cli command: {0}".format(solr_cli_command)) - sys.stdout.write('Dumping collections data to {0} ... '.format(output_file)) - sys.stdout.flush() - process = Popen(solr_cli_command, stdout=PIPE, stderr=PIPE, shell=True) - out, err = process.communicate() - if process.returncode != 0: - sys.stdout.write(colors.FAIL + 'FAILED\n' + colors.ENDC) - sys.stdout.flush() - raise Exception("{0} command failed: {1}".format(solr_cli_command, str(err))) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - logger.debug(str(out)) - collections_data = get_collections_data(output_file) - return collections_data.keys() if collections_data is not None else [] - -def get_collections_data(output_file): - return read_json(output_file) - -def get_collection_data(collections_data, collection): - return collections_data[collection] if collection in collections_data else None - -def delete_logsearch_collections(options, config, collections_json_location, collections): - service_logs_collection = config.get('logsearch_collections', 'hadoop_logs_collection_name') - audit_logs_collection = config.get('logsearch_collections', 'audit_logs_collection_name') - history_collection = config.get('logsearch_collections', 'history_collection_name') - if service_logs_collection in collections: - solr_urls = get_solr_urls(options, config, service_logs_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, service_logs_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(service_logs_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(service_logs_collection) - if audit_logs_collection in collections: - solr_urls = get_solr_urls(options, config, audit_logs_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, audit_logs_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(audit_logs_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(audit_logs_collection) - if history_collection in collections: - solr_urls = get_solr_urls(options, config, history_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, history_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(history_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(history_collection) - -def delete_atlas_collections(options, config, collections_json_location, collections): - fulltext_collection = config.get('atlas_collections', 'fulltext_index_name') - edge_index_collection = config.get('atlas_collections', 'edge_index_name') - vertex_index_collection = config.get('atlas_collections', 'vertex_index_name') - if fulltext_collection in collections: - solr_urls = get_solr_urls(options, config, fulltext_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, fulltext_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(fulltext_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(fulltext_collection) - if edge_index_collection in collections: - solr_urls = get_solr_urls(options, config, edge_index_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, edge_index_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(edge_index_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(edge_index_collection) - if vertex_index_collection in collections: - solr_urls = get_solr_urls(options, config, vertex_index_collection, collections_json_location) - response_map={} - retry(delete_collection, options, config, vertex_index_collection, solr_urls, response_map, context='[Delete {0} collection]'.format(vertex_index_collection)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation.'.format(vertex_index_collection) - -def delete_ranger_collection(options, config, collections_json_location, collections): - ranger_collection_name = config.get('ranger_collection', 'ranger_collection_name') - if ranger_collection_name in collections: - solr_urls = get_solr_urls(options, config, ranger_collection_name, collections_json_location) - response_map={} - retry(delete_collection, options, config, ranger_collection_name, solr_urls, response_map, context='[Delete {0} collection]'.format(ranger_collection_name)) - retry(monitor_solr_async_request, options, config, response_map['status_request'], response_map['request_id'], - context="[Monitor Solr async request, id: {0}]".format(response_map['request_id'])) - else: - print 'Collection {0} does not exist or filtered out. Skipping delete operation'.format(ranger_collection_name) - -def delete_collections(options, config, service_filter): - collections_json_location = COLLECTIONS_DATA_JSON_LOCATION.format("delete_collections.json") - collections=list_collections(options, config, collections_json_location) - collections=filter_collections(options, collections) - if is_ranger_available(config, service_filter): - delete_ranger_collection(options, config, collections_json_location, collections) - if is_atlas_available(config, service_filter): - delete_atlas_collections(options, config, collections_json_location, collections) - if is_logsearch_available(config, service_filter): - delete_logsearch_collections(options, config, collections_json_location, collections) - -def upgrade_ranger_schema(options, config, service_filter): - solr_znode='/infra-solr' - if is_ranger_available(config, service_filter): - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - ranger_config_set_name = config.get('ranger_collection', 'ranger_config_set_name') - copy_znode(options, config, "{0}{1}".format(INFRA_SOLR_CLIENT_BASE_PATH, RANGER_NEW_SCHEMA), - "{0}/configs/{1}/managed-schema".format(solr_znode, ranger_config_set_name), copy_from_local=True) - -def backup_ranger_configs(options, config, service_filter): - solr_znode='/infra-solr' - if is_ranger_available(config, service_filter): - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - ranger_config_set_name = config.get('ranger_collection', 'ranger_config_set_name') - backup_ranger_config_set_name = config.get('ranger_collection', 'backup_ranger_config_set_name') - copy_znode(options, config, "{0}/configs/{1}".format(solr_znode, ranger_config_set_name), - "{0}/configs/{1}".format(solr_znode, backup_ranger_config_set_name)) - -def upgrade_ranger_solrconfig_xml(options, config, service_filter): - solr_znode='/infra-solr' - if is_ranger_available(config, service_filter): - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - ranger_config_set_name = config.get('ranger_collection', 'ranger_config_set_name') - backup_ranger_config_set_name = config.get('ranger_collection', 'backup_ranger_config_set_name') - copy_znode(options, config, "{0}/configs/{1}/solrconfig.xml".format(solr_znode, ranger_config_set_name), - "{0}/configs/{1}/solrconfig.xml".format(solr_znode, backup_ranger_config_set_name)) - -def evaluate_check_shard_result(collection, result, skip_index_size = False): - evaluate_result = {} - active_shards = result['active_shards'] - all_shards = result['all_shards'] - warnings = 0 - print 30 * "-" - print "Number of shards: {0}".format(str(len(all_shards))) - for shard in all_shards: - if shard in active_shards: - print "{0}OK{1}: Found active leader replica for {2}" \ - .format(colors.OKGREEN, colors.ENDC, shard) - else: - warnings=warnings+1 - print "{0}WARNING{1}: Not found any active leader replicas for {2}, migration will probably fail, fix or delete the shard if it is possible." \ - .format(colors.WARNING, colors.ENDC, shard) - - if not skip_index_size: - index_size_map = result['index_size_map'] - host_index_size_map = result['host_index_size_map'] - if index_size_map: - print "Index size per shard for {0}:".format(collection) - for shard in index_size_map: - print " - {0}: {1}".format(shard, human_size(index_size_map[shard])) - if host_index_size_map: - print "Index size per host for {0} (consider this for backup): ".format(collection) - for host in host_index_size_map: - print " - {0}: {1}".format(host, human_size(host_index_size_map[host])) - evaluate_result['host_index_size_map'] = host_index_size_map - print 30 * "-" - evaluate_result['warnings'] = warnings - return evaluate_result - -def check_shard_for_collection(config, collection, skip_index_size = False): - result = {} - active_shards = [] - all_shards = [] - index_size_map = {} - host_index_size_map = {} - collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("check_collections.json")) - print "Checking available shards for '{0}' collection...".format(collection) - if collection in collections_data: - collection_details = collections_data[collection] - if 'shards' in collection_details: - for shard in collection_details['shards']: - all_shards.append(shard) - if 'replicas' in collection_details['shards'][shard]: - for replica in collection_details['shards'][shard]['replicas']: - if 'state' in collection_details['shards'][shard]['replicas'][replica] \ - and collection_details['shards'][shard]['replicas'][replica]['state'].lower() == 'active' \ - and 'leader' in collection_details['shards'][shard]['replicas'][replica]['properties'] \ - and collection_details['shards'][shard]['replicas'][replica]['properties']['leader'] == 'true' : - logger.debug("Found active shard for {0} (collection: {1})".format(shard, collection)) - active_shards.append(shard) - if not skip_index_size: - core_url = collection_details['shards'][shard]['replicas'][replica]['coreUrl'] - core_name = collection_details['shards'][shard]['replicas'][replica]['coreName'] - node_name = collection_details['shards'][shard]['replicas'][replica]['nodeName'] - hostname = node_name.split(":")[0] - index_size = get_replica_index_size(config, core_url, core_name) - index_bytes = parse_size(index_size) - if hostname in host_index_size_map: - last_value = host_index_size_map[hostname] - host_index_size_map[hostname] = last_value + index_bytes - else: - host_index_size_map[hostname] = index_bytes - index_size_map[shard] = index_bytes - result['active_shards'] = active_shards - result['all_shards'] = all_shards - if not skip_index_size: - result['index_size_map'] = index_size_map - result['host_index_size_map'] = host_index_size_map - - return result - -def generate_core_pairs(original_collection, collection, config, options): - core_pairs_data={} - - original_cores={} - original_collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json")) - if original_collection in original_collections_data and 'leaderCoreHostMap' in original_collections_data[original_collection]: - original_cores = original_collections_data[original_collection]['leaderCoreHostMap'] - - sorted_original_cores=[] - for key in sorted(original_cores): - sorted_original_cores.append((key, original_cores[key])) - - new_cores={} - collections_data = get_collections_data(COLLECTIONS_DATA_JSON_LOCATION.format("restore_collections.json")) - if collection in collections_data and 'leaderCoreHostMap' in collections_data[collection]: - new_cores = collections_data[collection]['leaderCoreHostMap'] - - sorted_new_cores=[] - for key in sorted(new_cores): - sorted_new_cores.append((key, new_cores[key])) - - if len(new_cores) < len(original_cores): - raise Exception("Old collection core size is: " + str(len(new_cores)) + - ". You will need at least: " + str(len(original_cores))) - else: - for index, original_core_data in enumerate(sorted_original_cores): - core_pairs_data[sorted_new_cores[index][0]]=original_core_data[0] - with open(COLLECTIONS_DATA_JSON_LOCATION.format(collection + "/restore_core_pairs.json"), 'w') as outfile: - json.dump(core_pairs_data, outfile) - return core_pairs_data - -def get_number_of_docs_map(collection_dump_file): - collections_data = get_collections_data(collection_dump_file) - doc_num_map={} - for collection in collections_data: - number_of_docs=collections_data[collection]['numberOfDocs'] - doc_num_map[collection]=number_of_docs - return doc_num_map - -def is_collection_empty(docs_map, collection): - result = False - if collection in docs_map: - num_docs=docs_map[collection] - if num_docs == -1: - print "Number of documents: -1. That means the number of docs was not provided in the collection dump." - elif num_docs == 0: - result = True - return result - -def update_state_json(original_collection, collection, config, options): - solr_znode='/infra-solr' - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - coll_data_dir = "{0}migrate/data/{1}".format(INFRA_SOLR_CLIENT_BASE_PATH, collection) - if not os.path.exists(coll_data_dir): - os.makedirs(coll_data_dir) - - copy_znode(options, config, "{0}/collections/{1}/state.json".format(solr_znode, collection), "{0}/state.json".format(coll_data_dir), copy_to_local=True) - copy_znode(options, config, "{0}/restore_metadata/{1}".format(solr_znode, collection), "{0}".format(coll_data_dir), copy_to_local=True) - - json_file_list=glob.glob("{0}/*.json".format(coll_data_dir)) - logger.debug("Downloaded json files list: {0}".format(str(json_file_list))) - - cores_data_json_list = [k for k in json_file_list if 'state.json' not in k and 'new_state.json' not in k and 'restore_core_pairs.json' not in k] - state_json_list = [k for k in json_file_list if '/state.json' in k] - - if not cores_data_json_list: - raise Exception('Cannot find any downloaded restore core metadata for {0}'.format(collection)) - if not state_json_list: - raise Exception('Cannot find any downloaded restore collection state metadata for {0}'.format(collection)) - - core_pairs = generate_core_pairs(original_collection, collection, config, options) - cores_to_skip = [] - logger.debug("Generated core pairs: {0}".format(str(core_pairs))) - if options.skip_cores: - cores_to_skip = options.skip_cores.split(',') - logger.debug("Cores to skip: {0}".format(str(cores_to_skip))) - - state_json_file=state_json_list[0] - state_data = read_json(state_json_file) - core_json_data=[] - - for core_data_json_file in cores_data_json_list: - core_json_data.append(read_json(core_data_json_file)) - - logger.debug("collection data content: {0}".format(str(state_data))) - core_details={} - for core in core_json_data: - core_details[core['core_node']]=core - logger.debug("core data contents: {0}".format(str(core_details))) - - collection_data = state_data[collection] - shards = collection_data['shards'] - new_state_json_data=copy.deepcopy(state_data) - - for shard in shards: - replicas = shards[shard]['replicas'] - for replica in replicas: - core_data = replicas[replica] - core = core_data['core'] - base_url = core_data['base_url'] - node_name = core_data['node_name'] - data_dir = core_data['dataDir'] if 'dataDir' in core_data else None - ulog_dir = core_data['ulogDir'] if 'ulogDir' in core_data else None - - if cores_to_skip and (core in cores_to_skip or (core in core_pairs and core_pairs[core] in cores_to_skip)): - print "Skipping core '{0}' as it is in skip-cores list (or its original pair: '{1}')".format(core, core_pairs[core]) - elif replica in core_details: - old_core_node=core_details[replica]['core_node'] - new_core_node=core_details[replica]['new_core_node'] - - new_state_core = copy.deepcopy(state_data[collection]['shards'][shard]['replicas'][replica]) - new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]=new_state_core - if old_core_node != new_core_node: - if old_core_node in new_state_json_data[collection]['shards'][shard]['replicas']: - del new_state_json_data[collection]['shards'][shard]['replicas'][old_core_node] - if data_dir: - new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['dataDir']=data_dir.replace(old_core_node, new_core_node) - if ulog_dir: - new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['ulogDir']=ulog_dir.replace(old_core_node, new_core_node) - old_host=core_details[replica]['old_host'] - new_host=core_details[replica]['new_host'] - if old_host != new_host and old_core_node != new_core_node: - new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['base_url']=base_url.replace(old_host, new_host) - new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['node_name']=node_name.replace(old_host, new_host) - elif old_host != new_host: - new_state_json_data[collection]['shards'][shard]['replicas'][replica]['base_url']=base_url.replace(old_host, new_host) - new_state_json_data[collection]['shards'][shard]['replicas'][replica]['node_name']=node_name.replace(old_host, new_host) - - with open("{0}/new_state.json".format(coll_data_dir), 'w') as outfile: - json.dump(new_state_json_data, outfile) - - copy_znode(options, config, "{0}/new_state.json".format(coll_data_dir), "{0}/collections/{1}/state.json".format(solr_znode, collection), copy_from_local=True) - -def delete_znodes(options, config, service_filter): - solr_znode='/infra-solr' - if is_logsearch_available(config, service_filter): - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - delete_znode(options, config, "{0}/configs/hadoop_logs".format(solr_znode)) - delete_znode(options, config, "{0}/configs/audit_logs".format(solr_znode)) - delete_znode(options, config, "{0}/configs/history".format(solr_znode)) - -def do_backup_request(options, accessor, parser, config, collection, index_location): - sys.stdout.write("Sending backup collection request ('{0}') to Ambari to process (backup destination: '{1}')..." - .format(collection, index_location)) - sys.stdout.flush() - response = backup(options, accessor, parser, config, collection, index_location) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Backup command request id: {0}'.format(request_id) - if options.async: - print "Backup {0} collection request sent to Ambari server. Check Ambari UI about the results.".format(collection) - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, 'Backup Solr collection: ' + collection) - print "Backup collection '{0}'... {1}DONE{2}".format(collection, colors.OKGREEN, colors.ENDC) - -def do_migrate_request(options, accessor, parser, config, collection, index_location): - sys.stdout.write("Sending migrate collection request ('{0}') to Ambari to process (migrate folder: '{1}')..." - .format(collection, index_location)) - sys.stdout.flush() - response = migrate(options, accessor, parser, config, collection, index_location) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Migrate command request id: {0}'.format(request_id) - if options.async: - print "Migrate {0} collection index request sent to Ambari server. Check Ambari UI about the results.".format(collection) - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, 'Migrate Solr collection index: ' + collection) - print "Migrate index '{0}'... {1}DONE{2}".format(collection, colors.OKGREEN, colors.ENDC) - -def do_restore_request(options, accessor, parser, config, original_collection, collection, config_set, index_location, shards, hdfs_path): - sys.stdout.write("Sending restore collection request ('{0}') to Ambari to process (backup location: '{1}')..." - .format(collection, index_location)) - sys.stdout.flush() - response = restore(options, accessor, parser, config, original_collection, collection, config_set, index_location, hdfs_path, shards) - request_id = get_request_id(response) - sys.stdout.write(colors.OKGREEN + 'DONE\n' + colors.ENDC) - sys.stdout.flush() - print 'Restore command request id: {0}'.format(request_id) - if options.async: - print "Restore {0} collection request sent to Ambari server. Check Ambari UI about the results.".format(collection) - sys.exit(0) - else: - sys.stdout.write("Start monitoring Ambari request with id {0} ...".format(request_id)) - sys.stdout.flush() - cluster = config.get('ambari_server', 'cluster') - monitor_request(options, accessor, cluster, request_id, 'Restore Solr collection: ' + collection) - print "Restoring collection '{0}'... {1}DONE{2}".format(collection, colors.OKGREEN, colors.ENDC) - -def get_ranger_index_location(collection, config, options): - ranger_index_location = None - if options.index_location: - ranger_index_location = os.path.join(options.index_location, "ranger") - elif options.ranger_index_location: - ranger_index_location = options.ranger_index_location - elif config.has_option('ranger_collection', 'backup_path'): - ranger_index_location = config.get('ranger_collection', 'backup_path') - else: - print "'backup_path'is missing from config file and --index-location or --ranger-index-location options are missing as well. Backup collection {0} {1}FAILED{2}." \ - .format(collection, colors.FAIL, colors.ENDC) - sys.exit(1) - return ranger_index_location - -def get_atlas_index_location(collection, config, options): - atlas_index_location = None - if options.index_location: - atlas_index_location = os.path.join(options.index_location, "atlas", collection) - elif options.ranger_index_location: - atlas_index_location = os.path.join(options.atlas_index_location, collection) - elif config.has_option('atlas_collections', 'backup_path'): - atlas_index_location = os.path.join(config.get('atlas_collections', 'backup_path'), collection) - else: - print "'backup_path'is missing from config file and --index-location or --atlas-index-location options are missing as well. Backup collection {0} {1}FAILED{2}." \ - .format(collection, colors.FAIL, colors.ENDC) - sys.exit(1) - return atlas_index_location - -def backup_collections(options, accessor, parser, config, service_filter): - collections=list_collections(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json"), include_number_of_docs=True) - collections=filter_collections(options, collections) - num_docs_map = get_number_of_docs_map(COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json")) - if is_ranger_available(config, service_filter): - collection_name = config.get('ranger_collection', 'ranger_collection_name') - if collection_name in collections: - if is_collection_empty(num_docs_map, collection_name): - print "Collection '{0}' is empty. Backup is not required.".format(collection_name) - else: - ranger_index_location=get_ranger_index_location(collection_name, config, options) - do_backup_request(options, accessor, parser, config, collection_name, ranger_index_location) - else: - print 'Collection {0} does not exist or filtered out. Skipping backup operation.'.format(collection_name) - if is_atlas_available(config, service_filter): - fulltext_index_collection = config.get('atlas_collections', 'fulltext_index_name') - if fulltext_index_collection in collections: - if is_collection_empty(num_docs_map, fulltext_index_collection): - print "Collection '{0}' is empty. Backup is not required.".format(fulltext_index_collection) - else: - fulltext_index_location = get_atlas_index_location(fulltext_index_collection, config, options) - do_backup_request(options, accessor, parser, config, fulltext_index_collection, fulltext_index_location) - else: - print 'Collection {0} does not exist or filtered out. Skipping backup operation.'.format(fulltext_index_collection) - vertex_index_collection = config.get('atlas_collections', 'vertex_index_name') - if vertex_index_collection in collections: - if is_collection_empty(num_docs_map, vertex_index_collection): - print "Collection '{0}' is empty. Backup is not required.".format(vertex_index_collection) - else: - vertex_index_location = get_atlas_index_location(vertex_index_collection, config, options) - do_backup_request(options, accessor, parser, config, vertex_index_collection, vertex_index_location) - else: - print 'Collection {0} does not exist or filtered out. Skipping backup operation.'.format(vertex_index_collection) - edge_index_collection = config.get('atlas_collections', 'edge_index_name') - if edge_index_collection in collections: - if is_collection_empty(num_docs_map, edge_index_collection): - print "Collection '{0}' is empty. Backup is not required.".format(edge_index_collection) - else: - edge_index_location = get_atlas_index_location(edge_index_collection, config, options) - do_backup_request(options, accessor, parser, config, edge_index_collection, edge_index_location) - else: - print 'Collection {0} does not exist or filtered out. Skipping backup operation.'.format(edge_index_collection) - -def migrate_snapshots(options, accessor, parser, config, service_filter): - if is_ranger_available(config, service_filter): - collection_name = config.get('ranger_collection', 'ranger_collection_name') - if options.collection is None or options.collection == collection_name: - ranger_index_location=get_ranger_index_location(collection_name, config, options) - do_migrate_request(options, accessor, parser, config, collection_name, ranger_index_location) - else: - print "Collection '{0}' backup index has filtered out. Skipping migrate operation.".format(collection_name) - if is_atlas_available(config, service_filter): - fulltext_index_collection = config.get('atlas_collections', 'fulltext_index_name') - if options.collection is None or options.collection == fulltext_index_collection: - fulltext_index_location=get_atlas_index_location(fulltext_index_collection, config, options) - do_migrate_request(options, accessor, parser, config, fulltext_index_collection, fulltext_index_location) - else: - print "Collection '{0}' backup index has filtered out. Skipping migrate operation.".format(fulltext_index_collection) - vertex_index_collection = config.get('atlas_collections', 'vertex_index_name') - if options.collection is None or options.collection == vertex_index_collection: - vertex_index_location=get_atlas_index_location(vertex_index_collection, config, options) - do_migrate_request(options, accessor, parser, config, vertex_index_collection, vertex_index_location) - else: - print "Collection '{0}' backup index has filtered out. Skipping migrate operation.".format(vertex_index_collection) - edge_index_collection = config.get('atlas_collections', 'edge_index_name') - if options.collection is None or options.collection == edge_index_collection: - edge_index_location=get_atlas_index_location(edge_index_collection, config, options) - do_migrate_request(options, accessor, parser, config, edge_index_collection, edge_index_location) - else: - print "Collection '{0}' backup index has filtered out. Skipping migrate operation.".format(edge_index_collection) - -def create_backup_collections(options, accessor, parser, config, service_filter): - collections_json_location = COLLECTIONS_DATA_JSON_LOCATION.format("before_restore_collections.json") - num_docs_map = get_number_of_docs_map(COLLECTIONS_DATA_JSON_LOCATION.format("backup_collections.json")) - collections=list_collections(options, config, collections_json_location) - replica_number = "1" # hard coded - if is_ranger_available(config, service_filter): - original_ranger_collection = config.get('ranger_collection', 'ranger_collection_name') - backup_ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') - if original_ranger_collection in collections: - if is_collection_empty(num_docs_map, original_ranger_collection): - print "Collection '{0}' was empty during backup. It won't need a backup collection.".format(original_ranger_collection) - else: - if backup_ranger_collection not in collections: - if options.collection is not None and options.collection != backup_ranger_collection: - print "Collection {0} has filtered out. Skipping create operation.".format(backup_ranger_collection) - else: - solr_urls = get_solr_urls(options, config, backup_ranger_collection, collections_json_location) - backup_ranger_config_set = config.get('ranger_collection', 'backup_ranger_config_set_name') - backup_ranger_shards = config.get('ranger_collection', 'ranger_collection_shards') - backup_ranger_max_shards = config.get('ranger_collection', 'ranger_collection_max_shards_per_node') - retry(create_collection, options, config, solr_urls, backup_ranger_collection, backup_ranger_config_set, - backup_ranger_shards, replica_number, backup_ranger_max_shards, context="[Create Solr Collections]") - else: - print "Collection {0} has already exist. Skipping create operation.".format(backup_ranger_collection) - if is_atlas_available(config, service_filter): - backup_atlas_config_set = config.get('atlas_collections', 'config_set') - backup_fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') - original_fulltext_index_name = config.get('atlas_collections', 'fulltext_index_name') - if original_fulltext_index_name in collections: - if is_collection_empty(num_docs_map, original_fulltext_index_name): - print "Collection '{0}' was empty during backup. It won't need a backup collection.".format(original_fulltext_index_name) - else: - if backup_fulltext_index_name not in collections: - if options.collection is not None and options.collection != backup_fulltext_index_name: - print "Collection {0} has filtered out. Skipping create operation.".format(backup_fulltext_index_name) - else: - solr_urls = get_solr_urls(options, config, backup_fulltext_index_name, collections_json_location) - backup_fulltext_index_shards = config.get('atlas_collections', 'fulltext_index_shards') - backup_fulltext_index_max_shards = config.get('atlas_collections', 'fulltext_index_max_shards_per_node') - retry(create_collection, options, config, solr_urls, backup_fulltext_index_name, backup_atlas_config_set, - backup_fulltext_index_shards, replica_number, backup_fulltext_index_max_shards, context="[Create Solr Collections]") - else: - print "Collection {0} has already exist. Skipping create operation.".format(backup_fulltext_index_name) - - backup_edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') - original_edge_index_name = config.get('atlas_collections', 'edge_index_name') - if original_edge_index_name in collections: - if is_collection_empty(num_docs_map, original_edge_index_name): - print "Collection '{0}' was empty during backup. It won't need a backup collection.".format(original_edge_index_name) - else: - if backup_edge_index_name not in collections: - if options.collection is not None and options.collection != backup_edge_index_name: - print "Collection {0} has filtered out. Skipping create operation.".format(backup_edge_index_name) - else: - solr_urls = get_solr_urls(options, config, backup_edge_index_name, collections_json_location) - backup_edge_index_shards = config.get('atlas_collections', 'edge_index_shards') - backup_edge_index_max_shards = config.get('atlas_collections', 'edge_index_max_shards_per_node') - retry(create_collection, options, config, solr_urls, backup_edge_index_name, backup_atlas_config_set, - backup_edge_index_shards, replica_number, backup_edge_index_max_shards, context="[Create Solr Collections]") - else: - print "Collection {0} has already exist. Skipping create operation.".format(backup_edge_index_name) - - backup_vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') - original_vertex_index_name = config.get('atlas_collections', 'vertex_index_name') - if original_vertex_index_name in collections: - if is_collection_empty(num_docs_map, original_vertex_index_name): - print "Collection '{0}' was empty during backup. It won't need a backup collection.".format(original_vertex_index_name) - else: - if backup_vertex_index_name not in collections: - if options.collection is not None and options.collection != backup_vertex_index_name: - print "Collection {0} has filtered out. Skipping create operation.".format(backup_vertex_index_name) - else: - solr_urls = get_solr_urls(options, config, backup_vertex_index_name, collections_json_location) - backup_vertex_index_shards = config.get('atlas_collections', 'vertex_index_shards') - backup_vertex_index_max_shards = config.get('atlas_collections', 'vertex_index_max_shards_per_node') - retry(create_collection, options, config, solr_urls, backup_vertex_index_name, backup_atlas_config_set, - backup_vertex_index_shards, replica_number, backup_vertex_index_max_shards, context="[Create Solr Collections]") - else: - print "Collection {0} has already exist. Skipping create operation.".format(backup_fulltext_index_name) - -def restore_collections(options, accessor, parser, config, service_filter): - collections=list_collections(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("restore_collections.json")) - collections=filter_collections(options, collections) - if 'RANGER' in service_filter and config.has_section('ranger_collection') and config.has_option('ranger_collection', 'enabled') \ - and config.get('ranger_collection', 'enabled') == 'true': - collection_name = config.get('ranger_collection', 'ranger_collection_name') - backup_ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') - backup_ranger_config_set_name = config.get('ranger_collection', 'backup_ranger_config_set_name') - - hdfs_base_path = None - if options.ranger_hdfs_base_path: - hdfs_base_path = options.ranger_hdfs_base_path - elif options.hdfs_base_path: - hdfs_base_path = options.hdfs_base_path - elif config.has_option('ranger_collection', 'hdfs_base_path'): - hdfs_base_path = config.get('ranger_collection', 'hdfs_base_path') - if backup_ranger_collection in collections: - backup_ranger_shards = config.get('ranger_collection', 'ranger_collection_shards') - ranger_index_location=get_ranger_index_location(collection_name, config, options) - do_restore_request(options, accessor, parser, config, collection_name, backup_ranger_collection, backup_ranger_config_set_name, ranger_index_location, backup_ranger_shards, hdfs_base_path) - else: - print "Collection '{0}' does not exist or filtered out. Skipping restore operation.".format(backup_ranger_collection) - - if is_atlas_available(config, service_filter): - hdfs_base_path = None - if options.ranger_hdfs_base_path: - hdfs_base_path = options.atlas_hdfs_base_path - elif options.hdfs_base_path: - hdfs_base_path = options.hdfs_base_path - elif config.has_option('atlas_collections', 'hdfs_base_path'): - hdfs_base_path = config.get('atlas_collections', 'hdfs_base_path') - atlas_config_set = config.get('atlas_collections', 'config_set') - - fulltext_index_collection = config.get('atlas_collections', 'fulltext_index_name') - backup_fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') - if backup_fulltext_index_name in collections: - backup_fulltext_index_shards = config.get('atlas_collections', 'fulltext_index_shards') - fulltext_index_location=get_atlas_index_location(fulltext_index_collection, config, options) - do_restore_request(options, accessor, parser, config, fulltext_index_collection, backup_fulltext_index_name, atlas_config_set, fulltext_index_location, backup_fulltext_index_shards, hdfs_base_path) - else: - print "Collection '{0}' does not exist or filtered out. Skipping restore operation.".format(fulltext_index_collection) - - edge_index_collection = config.get('atlas_collections', 'edge_index_name') - backup_edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') - if backup_edge_index_name in collections: - backup_edge_index_shards = config.get('atlas_collections', 'edge_index_shards') - edge_index_location=get_atlas_index_location(edge_index_collection, config, options) - do_restore_request(options, accessor, parser, config, edge_index_collection, backup_edge_index_name, atlas_config_set, edge_index_location, backup_edge_index_shards, hdfs_base_path) - else: - print "Collection '{0}' does not exist or filtered out. Skipping restore operation.".format(edge_index_collection) - - vertex_index_collection = config.get('atlas_collections', 'vertex_index_name') - backup_vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') - if backup_vertex_index_name in collections: - backup_vertex_index_shards = config.get('atlas_collections', 'vertex_index_shards') - vertex_index_location=get_atlas_index_location(vertex_index_collection, config, options) - do_restore_request(options, accessor, parser, config, vertex_index_collection, backup_vertex_index_name, atlas_config_set, vertex_index_location, backup_vertex_index_shards, hdfs_base_path) - else: - print "Collection '{0}' does not exist or filtered out. Skipping restore operation.".format(vertex_index_collection) - -def reload_collections(options, accessor, parser, config, service_filter): - collections_json_location = COLLECTIONS_DATA_JSON_LOCATION.format("reload_collections.json") - collections=list_collections(options, config, collections_json_location) - collections=filter_collections(options, collections) - if is_ranger_available(config, service_filter): - backup_ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') - if backup_ranger_collection in collections: - solr_urls = get_solr_urls(options, config, backup_ranger_collection, collections_json_location) - retry(reload_collection, options, config, solr_urls, backup_ranger_collection, context="[Reload Solr Collections]") - else: - print "Collection '{0}' does not exist or filtered out. Skipping reload operation.".format(backup_ranger_collection) - if is_atlas_available(config, service_filter): - backup_fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') - if backup_fulltext_index_name in collections: - solr_urls = get_solr_urls(options, config, backup_fulltext_index_name, collections_json_location) - retry(reload_collection, options, config, solr_urls, backup_fulltext_index_name, context="[Reload Solr Collections]") - else: - print "Collection '{0}' does not exist or filtered out. Skipping reload operation.".format(backup_fulltext_index_name) - backup_edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') - if backup_edge_index_name in collections: - solr_urls = get_solr_urls(options, config, backup_edge_index_name, collections_json_location) - retry(reload_collection, options, config, solr_urls, backup_edge_index_name, context="[Reload Solr Collections]") - else: - print "Collection '{0}' does not exist or filtered out. Skipping reload operation.".format(backup_edge_index_name) - backup_vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') - if backup_vertex_index_name in collections: - solr_urls = get_solr_urls(options, config, backup_vertex_index_name, collections_json_location) - retry(reload_collection, options, config, solr_urls, backup_vertex_index_name, context="[Reload Solr Collections]") - else: - print "Collection '{0}' does not exist or filtered out. Skipping reload operation.".format(backup_fulltext_index_name) - -def validate_ini_file(options, parser): - if options.ini_file is None: - parser.print_help() - print 'ini-file option is missing' - sys.exit(1) - elif not os.path.isfile(options.ini_file): - parser.print_help() - print 'ini file ({0}) does not exist'.format(options.ini_file) - sys.exit(1) - -def rolling_restart(options, accessor, parser, config, service_name, component_name, context): - cluster = config.get('ambari_server', 'cluster') - component_hosts = get_solr_hosts(options, accessor, cluster) - interval_secs = options.batch_interval - fault_tolerance = options.batch_fault_tolerance - request_body = create_batch_command("RESTART", component_hosts, cluster, service_name, component_name, interval_secs, fault_tolerance, "Rolling restart Infra Solr Instances") - post_json(accessor, BATCH_REQUEST_API_URL.format(cluster), request_body) - print "{0} request sent. (check Ambari UI about the requests)".format(context) - -def update_state_jsons(options, accessor, parser, config, service_filter): - collections=list_collections(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("collections.json")) - collections=filter_collections(options, collections) - if is_ranger_available(config, service_filter): - original_ranger_collection = config.get('ranger_collection', 'ranger_collection_name') - backup_ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') - if backup_ranger_collection in collections: - update_state_json(original_ranger_collection, backup_ranger_collection, config, options) - else: - print "Collection '{0}' does not exist or filtered out. Skipping update collection state operation.".format(backup_ranger_collection) - if is_atlas_available(config, service_filter): - original_fulltext_index_name = config.get('atlas_collections', 'fulltext_index_name') - backup_fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') - if backup_fulltext_index_name in collections: - update_state_json(original_fulltext_index_name, backup_fulltext_index_name, config, options) - else: - print "Collection '{0}' does not exist or filtered out. Skipping update collection state operation.".format(backup_fulltext_index_name) - original_edge_index_name = config.get('atlas_collections', 'edge_index_name') - backup_edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') - if backup_edge_index_name in collections: - update_state_json(original_edge_index_name, backup_edge_index_name, config, options) - else: - print "Collection '{0}' does not exist or filtered out. Skipping update collection state operation.".format(backup_edge_index_name) - original_vertex_index_name = config.get('atlas_collections', 'vertex_index_name') - backup_vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') - if backup_vertex_index_name in collections: - update_state_json(original_vertex_index_name, backup_vertex_index_name, config, options) - else: - print "Collection '{0}' does not exist or filtered out. Skipping update collection state operation.".format(backup_fulltext_index_name) - -def set_solr_authorization(options, accessor, parser, config, enable_authorization, fix_kerberos_config = False): - solr_znode='/infra-solr' - if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'): - solr_znode=config.get('infra_solr', 'znode') - kerberos_enabled='false' - if config.has_section('cluster') and config.has_option('cluster', 'kerberos_enabled'): - kerberos_enabled=config.get('cluster', 'kerberos_enabled') - if kerberos_enabled == 'true': - infra_solr_props = get_infra_solr_props(config, accessor) - if enable_authorization: - print "Enable Solr security.json management by Ambari ... " - set_solr_security_management(infra_solr_props, accessor, enable = False) - if fix_kerberos_config: - set_solr_name_rules(infra_solr_props, accessor, False) - else: - print "Disable Solr authorization by uploading a new security.json and turn on security.json management by Ambari..." - set_solr_security_management(infra_solr_props, accessor, enable = True) - copy_znode(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("security-without-authr.json"), - "{0}/security.json".format(solr_znode), copy_from_local=True) - if fix_kerberos_config: - set_solr_name_rules(infra_solr_props, accessor, True) - else: - if fix_kerberos_config: - print "Security is not enabled. Skipping enable/disable Solr authorization + fix infra-solr-env kerberos config operation." - else: - print "Security is not enabled. Skipping enable/disable Solr authorization operation." - -def summarize_shard_check_result(check_results, skip_warnings = False, skip_index_size = False): - warnings = 0 - index_size_per_host = {} - for collection in check_results: - warnings=warnings+check_results[collection]['warnings'] - if not skip_index_size and 'host_index_size_map' in check_results[collection]: - host_index_size_map = check_results[collection]['host_index_size_map'] - for host in host_index_size_map: - if host in index_size_per_host: - last_value=index_size_per_host[host] - index_size_per_host[host]=last_value+host_index_size_map[host] - else: - index_size_per_host[host]=host_index_size_map[host] - pass - if not skip_index_size and index_size_per_host: - print "Full index size per hosts: (consider this for backup)" - for host in index_size_per_host: - print " - {0}: {1}".format(host, human_size(index_size_per_host[host])) - - print "All warnings: {0}".format(warnings) - if warnings != 0 and not skip_warnings: - print "Check shards - {0}FAILED{1} (warnings: {2}, fix warnings or use --skip-warnings flag to PASS) ".format(colors.FAIL, colors.ENDC, warnings) - sys.exit(1) - else: - print "Check shards - {0}PASSED{1}".format(colors.OKGREEN, colors.ENDC) - -def check_shards(options, accessor, parser, config, backup_shards = False): - collections=list_collections(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("check_collections.json")) - collections=filter_collections(options, collections) - check_results={} - if is_ranger_available(config, service_filter): - ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') if backup_shards \ - else config.get('ranger_collection', 'ranger_collection_name') - if ranger_collection in collections: - ranger_collection_details = check_shard_for_collection(config, ranger_collection, options.skip_index_size) - check_results[ranger_collection]=evaluate_check_shard_result(ranger_collection, ranger_collection_details, options.skip_index_size) - else: - print "Collection '{0}' does not exist or filtered out. Skipping check collection operation.".format(ranger_collection) - if is_atlas_available(config, service_filter): - fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') if backup_shards \ - else config.get('atlas_collections', 'fulltext_index_name') - if fulltext_index_name in collections: - fulltext_collection_details = check_shard_for_collection(config, fulltext_index_name, options.skip_index_size) - check_results[fulltext_index_name]=evaluate_check_shard_result(fulltext_index_name, fulltext_collection_details, options.skip_index_size) - else: - print "Collection '{0}' does not exist or filtered out. Skipping check collection operation.".format(fulltext_index_name) - edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') if backup_shards \ - else config.get('atlas_collections', 'edge_index_name') - if edge_index_name in collections: - edge_collection_details = check_shard_for_collection(config, edge_index_name, options.skip_index_size) - check_results[edge_index_name]=evaluate_check_shard_result(edge_index_name, edge_collection_details, options.skip_index_size) - else: - print "Collection '{0}' does not exist or filtered out. Skipping check collection operation.".format(edge_index_name) - vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') if backup_shards \ - else config.get('atlas_collections', 'vertex_index_name') - if vertex_index_name in collections: - vertex_collection_details = check_shard_for_collection(config, vertex_index_name, options.skip_index_size) - check_results[vertex_index_name]=evaluate_check_shard_result(vertex_index_name, vertex_collection_details, options.skip_index_size) - else: - print "Collection '{0}' does not exist or filtered out. Skipping check collection operation.".format(fulltext_index_name) - summarize_shard_check_result(check_results, options.skip_warnings, options.skip_index_size) - -def check_docs(options, accessor, parser, config): - collections=list_collections(options, config, COLLECTIONS_DATA_JSON_LOCATION.format("check_docs_collections.json"), include_number_of_docs=True) - if collections: - print "Get the number of documents per collections ..." - docs_map = get_number_of_docs_map(COLLECTIONS_DATA_JSON_LOCATION.format("check_docs_collections.json")) - for collection_docs_data in docs_map: - print "Collection: '{0}' - Number of docs: {1}".format(collection_docs_data, docs_map[collection_docs_data]) - else: - print "Check number of documents - Not found any collections." - -def run_solr_data_manager_on_collection(options, config, collections, src_collection, target_collection, - collections_json_location, num_docs, skip_date_usage = True): - if target_collection in collections and src_collection in collections: - source_solr_urls = get_solr_urls(options, config, src_collection, collections_json_location) - target_solr_urls = get_solr_urls(options, config, target_collection, collections_json_location) - if is_collection_empty(num_docs, src_collection): - print "Collection '{0}' is empty. Skipping transport data operation.".format(target_collection) - else: - src_solr_url, target_solr_url = get_input_output_solr_url(source_solr_urls, target_solr_urls) - keytab, principal = get_keytab_and_principal(config) - date_format = "%Y-%m-%dT%H:%M:%S.%fZ" - d = datetime.now() + timedelta(days=365) - end = d.strftime(date_format) - print "Running solrDataManager.py (solr input collection: {0}, solr output collection: {1})"\ - .format(src_collection, target_collection) - solr_data_manager.verbose = options.verbose - solr_data_manager.set_log_level(True) - solr_data_manager.save("archive", src_solr_url, src_collection, "evtTime", "id", end, - options.transport_read_block_size, options.transport_write_block_size, - False, None, None, keytab, principal, False, "none", None, None, None, - None, None, None, None, None, target_collection, - target_solr_url, "_version_", skip_date_usage) - else: - print "Collection '{0}' or {1} does not exist or filtered out. Skipping transport data operation.".format(target_collection, src_collection) - -def transfer_old_data(options, accessor, parser, config): - collections_json_location = COLLECTIONS_DATA_JSON_LOCATION.format("transport_collections.json") - collections=list_collections(options, config, collections_json_location, include_number_of_docs=True) - collections=filter_collections(options, collections) - docs_map = get_number_of_docs_map(collections_json_location) if collections else {} - if is_ranger_available(config, service_filter): - original_ranger_collection = config.get('ranger_collection', 'ranger_collection_name') - backup_ranger_collection = config.get('ranger_collection', 'backup_ranger_collection_name') - run_solr_data_manager_on_collection(options, config, collections, backup_ranger_collection, - original_ranger_collection, collections_json_location, docs_map, skip_date_usage=False) - if is_atlas_available(config, service_filter): - original_fulltext_index_name = config.get('atlas_collections', 'fulltext_index_name') - backup_fulltext_index_name = config.get('atlas_collections', 'backup_fulltext_index_name') - run_solr_data_manager_on_collection(options, config, collections, backup_fulltext_index_name, - original_fulltext_index_name, collections_json_location, docs_map) - - original_edge_index_name = config.get('atlas_collections', 'edge_index_name') - backup_edge_index_name = config.get('atlas_collections', 'backup_edge_index_name') - run_solr_data_manager_on_collection(options, config, collections, backup_edge_index_name, - original_edge_index_name, collections_json_location, docs_map) - - original_vertex_index_name = config.get('atlas_collections', 'vertex_index_name') - backup_vertex_index_name = config.get('atlas_collections', 'backup_vertex_index_name') - run_solr_data_manager_on_collection(options, config, collections, backup_vertex_index_name, - original_vertex_index_name, collections_json_location, docs_map) - - -if __name__=="__main__": - parser = optparse.OptionParser("usage: %prog [options]") - - parser.add_option("-a", "--action", dest="action", type="string", help="delete-collections | backup | cleanup-znodes | backup-and-cleanup | migrate | restore |' \ - ' rolling-restart-solr | rolling-restart-atlas | rolling-restart-ranger | check-shards | check-backup-shards | enable-solr-authorization | disable-solr-authorization |'\ - ' fix-solr5-kerberos-config | fix-solr7-kerberos-config | upgrade-solr-clients | upgrade-solr-instances | upgrade-logsearch-portal | upgrade-logfeeders | stop-logsearch |'\ - ' restart-solr |restart-logsearch | restart-ranger | restart-atlas | transport-old-data") - parser.add_option("-i", "--ini-file", dest="ini_file", type="string", help="Config ini file to parse (required)") - parser.add_option("-f", "--force", dest="force", default=False, action="store_true", help="force index upgrade even if it's the right version") - parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="use for verbose logging") - parser.add_option("-s", "--service-filter", dest="service_filter", default=None, type="string", help="run commands only selected services (comma separated: LOGSEARCH,ATLAS,RANGER)") - parser.add_option("-c", "--collection", dest="collection", default=None, type="string", help="selected collection to run an operation") - parser.add_option("--async", dest="async", action="store_true", default=False, help="async Ambari operations (backup | restore | migrate)") - parser.add_option("--index-location", dest="index_location", type="string", help="location of the index backups. add ranger/atlas prefix after the path. required only if no backup path in the ini file") - parser.add_option("--atlas-index-location", dest="atlas_index_location", type="string", help="location of the index backups (for atlas). required only if no backup path in the ini file") - parser.add_option("--ranger-index-location", dest="ranger_index_location", type="string", help="location of the index backups (for ranger). required only if no backup path in the ini file") - - parser.add_option("--version", dest="index_version", type="string", default="6.6.2", help="lucene index version for migration (6.6.2 or 7.4.0)") - parser.add_option("--solr-async-request-tries", dest="solr_async_request_tries", type="int", default=400, help="number of max tries for async Solr requests (e.g.: delete operation)") - parser.add_option("--request-tries", dest="request_tries", type="int", help="number of tries for BACKUP/RESTORE status api calls in the request") - parser.add_option("--request-time-interval", dest="request_time_interval", type="int", help="time interval between BACKUP/RESTORE status api calls in the request") - parser.add_option("--request-async", dest="request_async", action="store_true", default=False, help="skip BACKUP/RESTORE status api calls from the command") - parser.add_option("--transport-read-block-size", dest="transport_read_block_size", type="string", help="block size to use for reading from solr during transport",default=10000) - parser.add_option("--transport-write-block-size", dest="transport_write_block_size", type="string", help="number of records in the output files during transport", default=100000) - parser.add_option("--include-solr-hosts", dest="include_solr_hosts", type="string", help="comma separated list of included solr hosts") - parser.add_option("--exclude-solr-hosts", dest="exclude_solr_hosts", type="string", help="comma separated list of excluded solr hosts") - parser.add_option("--disable-solr-host-check", dest="disable_solr_host_check", action="store_true", default=False, help="Disable to check solr hosts are good for the collection backups") - parser.add_option("--core-filter", dest="core_filter", default=None, type="string", help="core filter for replica folders") - parser.add_option("--skip-cores", dest="skip_cores", default=None, type="string", help="specific cores to skip (comma separated)") - parser.add_option("--hdfs-base-path", dest="hdfs_base_path", default=None, type="string", help="hdfs base path where the collections are located (e.g.: /user/infrasolr). Use if both atlas and ranger collections are on hdfs.") - parser.add_option("--ranger-hdfs-base-path", dest="ranger_hdfs_base_path", default=None, type="string", help="hdfs base path where the ranger collection is located (e.g.: /user/infra-solr). Use if only ranger collection is on hdfs.") - parser.add_option("--atlas-hdfs-base-path", dest="atlas_hdfs_base_path", default=None, type="string", help="hdfs base path where the atlas collections are located (e.g.: /user/infra-solr). Use if only atlas collections are on hdfs.") - parser.add_option("--keep-backup", dest="keep_backup", default=False, action="store_true", help="If it is turned on, Snapshot Solr data will not be deleted from the filesystem during restore.") - parser.add_option("--batch-interval", dest="batch_interval", type="int", default=60 ,help="batch time interval (seconds) between requests (for restarting INFRA SOLR, default: 60)") - parser.add_option("--batch-fault-tolerance", dest="batch_fault_tolerance", type="int", default=0 ,help="fault tolerance of tasks for batch request (for restarting INFRA SOLR, default: 0)") - parser.add_option("--shared-drive", dest="shared_drive", default=False, action="store_true", help="Use if the backup location is shared between hosts. (override config from config ini file)") - parser.add_option("--skip-json-dump-files", dest="skip_json_dump_files", type="string", help="comma separated list of files that won't be download during collection dump (could be useful if it is required to change something in manually in the already downloaded file)") - parser.add_option("--skip-index-size", dest="skip_index_size", default=False, action="store_true", help="Skip index size check for check-shards or check-backup-shards") - parser.add_option("--skip-warnings", dest="skip_warnings", default=False, action="store_true", help="Pass check-shards or check-backup-shards even if there are warnings") - (options, args) = parser.parse_args() - - set_log_level(options.verbose) - - if options.verbose: - print "Run command with args: {0}".format(str(sys.argv)) - - validate_ini_file(options, parser) - - config = ConfigParser.RawConfigParser() - config.read(options.ini_file) - - command_start_time = time.time() - - service_filter=options.service_filter.upper().split(',') if options.service_filter is not None else ['LOGSEARCH', 'ATLAS', 'RANGER'] - - if options.action is None: - parser.print_help() - print 'action option is missing' - sys.exit(1) - else: - if config.has_section('ambari_server'): - host = config.get('ambari_server', 'host') - port = config.get('ambari_server', 'port') - protocol = config.get('ambari_server', 'protocol') - username = config.get('ambari_server', 'username') - password = config.get('ambari_server', 'password') - accessor = api_accessor(host, username, password, protocol, port) - - if config.has_section('infra_solr') and config.has_option('infra_solr', 'hosts'): - local_host = socket.getfqdn() - solr_hosts = config.get('infra_solr', 'hosts') - if solr_hosts and local_host not in solr_hosts.split(","): - print "{0}WARNING{1}: Host '{2}' is not found in Infra Solr hosts ({3}). Migration commands won't work from here." \ - .format(colors.WARNING, colors.ENDC, local_host, solr_hosts) - if options.action.lower() == 'backup': - backup_ranger_configs(options, config, service_filter) - backup_collections(options, accessor, parser, config, service_filter) - elif options.action.lower() == 'delete-collections': - delete_collections(options, config, service_filter) - delete_znodes(options, config, service_filter) - upgrade_ranger_schema(options, config, service_filter) - elif options.action.lower() == 'cleanup-znodes': - delete_znodes(options, config, service_filter) - upgrade_ranger_schema(options, config, service_filter) - elif options.action.lower() == 'backup-and-cleanup': - backup_ranger_configs(options, config, service_filter) - backup_collections(options, accessor, parser, config, service_filter) - delete_collections(options, config, service_filter) - delete_znodes(options, config, service_filter) - upgrade_ranger_schema(options, config, service_filter) - elif options.action.lower() == 'restore': - upgrade_ranger_solrconfig_xml(options, config, service_filter) - create_backup_collections(options, accessor, parser, config, service_filter) - restore_collections(options, accessor, parser, config, service_filter) - update_state_jsons(options, accessor, parser, config, service_filter) - elif options.action.lower() == 'update-collection-state': - update_state_jsons(options, accessor, parser, config, service_filter) - elif options.action.lower() == 'reload': - reload_collections(options, accessor, parser, config, service_filter) - elif options.action.lower() == 'migrate': - migrate_snapshots(options, accessor, parser, config, service_filter) - elif options.action.lower() == 'upgrade-solr-clients': - upgrade_solr_clients(options, accessor, parser, config) - elif options.action.lower() == 'upgrade-solr-instances': - upgrade_solr_instances(options, accessor, parser, config) - elif options.action.lower() == 'upgrade-logsearch-portal': - if is_logsearch_available(config, service_filter): - upgrade_logsearch_portal(options, accessor, parser, config) - else: - print "LOGSEARCH service has not found in the config or filtered out." - elif options.action.lower() == 'upgrade-logfeeders': - if is_logsearch_available(config, service_filter): - upgrade_logfeeders(options, accessor, parser, config) - else: - print "LOGSEARCH service has not found in the config or filtered out." - elif options.action.lower() == 'stop-logsearch': - if is_logsearch_available(config, service_filter): - service_components_command(options, accessor, parser, config, LOGSEARCH_SERVICE_NAME, LOGSEARCH_SERVER_COMPONENT_NAME, "STOP", "Stop") - service_components_command(options, accessor, parser, config, LOGSEARCH_SERVICE_NAME, LOGSEARCH_LOGFEEDER_COMPONENT_NAME, "STOP", "Stop") - else: - print "LOGSEARCH service has not found in the config or filtered out." - elif options.action.lower() == 'restart-solr': - service_components_command(options, accessor, parser, config, SOLR_SERVICE_NAME, SOLR_COMPONENT_NAME, "RESTART", "Restart") - elif options.action.lower() == 'restart-logsearch': - if is_logsearch_available(config, service_filter): - service_components_command(options, accessor, parser, config, LOGSEARCH_SERVICE_NAME, LOGSEARCH_SERVER_COMPONENT_NAME, "RESTART", "Restart") - service_components_command(options, accessor, parser, config, LOGSEARCH_SERVICE_NAME, LOGSEARCH_LOGFEEDER_COMPONENT_NAME, "RESTART", "Restart") - else: - print "LOGSEARCH service has not found in the config or filtered out." - elif options.action.lower() == 'restart-atlas': - if is_atlas_available(config, service_filter): - service_components_command(options, accessor, parser, config, ATLAS_SERVICE_NAME, ATLAS_SERVER_COMPONENT_NAME, "RESTART", "Restart") - else: - print "ATLAS service has not found in the config or filtered out." - elif options.action.lower() == 'restart-ranger': - if is_ranger_available(config, service_filter): - service_components_command(options, accessor, parser, config, RANGER_SERVICE_NAME, RANGER_ADMIN_COMPONENT_NAME, "RESTART", "Restart") - else: - print "RANGER service has not found in the config or filtered out." - elif options.action.lower() == 'rolling-restart-ranger': - if is_ranger_available(config, service_filter): - rolling_restart(options, accessor, parser, config, RANGER_SERVICE_NAME, RANGER_ADMIN_COMPONENT_NAME, "Rolling Restart Ranger Admin Instances") - else: - print "RANGER service has not found in the config or filtered out." - elif options.action.lower() == 'rolling-restart-atlas': - if is_atlas_available(config, service_filter): - rolling_restart(options, accessor, parser, config, ATLAS_SERVICE_NAME, ATLAS_SERVER_COMPONENT_NAME, "Rolling Restart Atlas Server Instances") - else: - print "ATLAS service has not found in the config or filtered out." - elif options.action.lower() == 'rolling-restart-solr': - rolling_restart(options, accessor, parser, config, SOLR_SERVICE_NAME, SOLR_COMPONENT_NAME, "Rolling Restart Infra Solr Instances") - elif options.action.lower() == 'enable-solr-authorization': - set_solr_authorization(options, accessor, parser, config, True) - elif options.action.lower() == 'disable-solr-authorization': - set_solr_authorization(options, accessor, parser, config, False) - elif options.action.lower() == 'fix-solr5-kerberos-config': - set_solr_authorization(options, accessor, parser, config, False, True) - elif options.action.lower() == 'fix-solr7-kerberos-config': - set_solr_authorization(options, accessor, parser, config, True, True) - elif options.action.lower() == 'check-shards': - check_shards(options, accessor, parser, config) - elif options.action.lower() == 'check-backup-shards': - check_shards(options, accessor, parser, config, backup_shards=True) - elif options.action.lower() == 'check-docs': - check_docs(options, accessor, parser, config) - elif options.action.lower() == 'transport-old-data': - check_docs(options, accessor, parser, config) - transfer_old_data(options, accessor, parser, config) - check_docs(options, accessor, parser, config) - else: - parser.print_help() - print 'action option is invalid (available actions: delete-collections | backup | cleanup-znodes | backup-and-cleanup | migrate | restore |' \ - ' rolling-restart-solr | rolling-restart-ranger | rolling-restart-atlas | check-shards | check-backup-shards | check-docs | enable-solr-authorization |'\ - ' disable-solr-authorization | fix-solr5-kerberos-config | fix-solr7-kerberos-config | upgrade-solr-clients | upgrade-solr-instances | upgrade-logsearch-portal |' \ - ' upgrade-logfeeders | stop-logsearch | restart-solr |' \ - ' restart-logsearch | restart-ranger | restart-atlas | transport-old-data )' - sys.exit(1) - command_elapsed_time = time.time() - command_start_time - time_to_print = time.strftime("%H:%M:%S", time.gmtime(command_elapsed_time)) - print 30 * "-" - print "Command elapsed time: {0}".format(time_to_print) - print 30 * "-" - print "Migration helper command {0}FINISHED{1}".format(colors.OKGREEN, colors.ENDC) \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py deleted file mode 100755 index e02c4919366..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py +++ /dev/null @@ -1,823 +0,0 @@ -#!/usr/bin/python - -''' -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -import gzip -import hashlib -import json -import logging -import optparse -import os -import shutil -import signal -import sys -import tarfile -import time -from datetime import datetime, timedelta -from subprocess import call, Popen, PIPE -from urllib import quote, unquote -from zipfile import ZipFile, ZIP_DEFLATED - -VERSION = "1.0" - -logger = logging.getLogger() -handler = logging.StreamHandler() -formatter = logging.Formatter("%(asctime)s - %(message)s") -handler.setFormatter(formatter) -logger.addHandler(handler) -verbose = False - -def parse_arguments(): - parser = optparse.OptionParser("usage: %prog [options]", version="Solr Data Manager {0}".format(VERSION)) - - parser.add_option("-m", "--mode", dest="mode", type="string", help="archive | delete | save") - parser.add_option("-s", "--solr-url", dest="solr_url", type="string", help="the url of the solr server including the port and protocol") - parser.add_option("-c", "--collection", dest="collection", type="string", help="the name of the solr collection") - parser.add_option("-f", "--filter-field", dest="filter_field", type="string", help="the name of the field to filter on") - parser.add_option("-r", "--read-block-size", dest="read_block_size", type="int", help="block size to use for reading from solr", - default=1000) - parser.add_option("-w", "--write-block-size", dest="write_block_size", type="int", help="number of records in the output files", - default=100000) - parser.add_option("-i", "--id-field", dest="id_field", type="string", help="the name of the id field", default="id") - - end_group = optparse.OptionGroup(parser, "specifying the end of the range") - end_group.add_option("-e", "--end", dest="end", type="string", help="end of the range") - end_group.add_option("-d", "--days", dest="days", type="int", help="number of days to keep") - parser.add_option_group(end_group) - - parser.add_option("-o", "--date-format", dest="date_format", type="string", help="the date format to use for --days", - default="%Y-%m-%dT%H:%M:%S.%fZ") - - parser.add_option("-q", "--additional-filter", dest="additional_filter", type="string", help="additional solr filter") - parser.add_option("-j", "--name", dest="name", type="string", help="name included in result files") - - parser.add_option("-g", "--ignore-unfinished-uploading", dest="ignore_unfinished_uploading", action="store_true", default=False) - - parser.add_option("--json-file", dest="json_file", help="create a json file instead of line delimited json", action="store_true", default=False) - parser.add_option("-z", "--compression", dest="compression", help="none | tar.gz | tar.bz2 | zip | gz", default="gz") - - parser.add_option("-k", "--solr-keytab", dest="solr_keytab", type="string", help="the keytab for a kerberized solr") - parser.add_option("-n", "--solr-principal", dest="solr_principal", type="string", help="the principal for a kerberized solr") - - parser.add_option("-a", "--hdfs-keytab", dest="hdfs_keytab", type="string", help="the keytab for a kerberized hdfs") - parser.add_option("-l", "--hdfs-principal", dest="hdfs_principal", type="string", help="the principal for a kerberized hdfs") - - parser.add_option("-u", "--hdfs-user", dest="hdfs_user", type="string", help="the user for accessing hdfs") - parser.add_option("-p", "--hdfs-path", dest="hdfs_path", type="string", help="the hdfs path to upload to") - - parser.add_option("-t", "--key-file-path", dest="key_file_path", type="string", help="the file that contains S3 ,") - parser.add_option("-b", "--bucket", dest="bucket", type="string", help="the bucket name for S3 upload") - parser.add_option("-y", "--key-prefix", dest="key_prefix", type="string", help="the key prefix for S3 upload") - - parser.add_option("-x", "--local-path", dest="local_path", type="string", help="the local path to save the files to") - - parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False) - - parser.add_option("--solr-output-collection", dest="solr_output_collection", help="target output solr collection for archive", type="string", default=None) - parser.add_option("--solr-output-url", dest="solr_output_url", default=None, type="string", help="the url of the output solr server including the port and protocol") - parser.add_option("--exclude-fields", dest="exclude_fields", help="Comma separated list of excluded fields from json response", type="string", default=None) - parser.add_option("--skip-date-usage", dest="skip_date_usage", action="store_true", default=False, help="datestamp field won't be used for queries (sort based on id field)") - - (options, args) = parser.parse_args() - - for r in ["mode", "solr_url", "collection"]: - if options.__dict__[r] is None: - print "argument '{0}' is mandatory".format(r) - parser.print_help() - sys.exit() - - if not options.skip_date_usage: - if options.filter_field is None: - print "argument 'filter_field' is mandatory" - parser.print_help() - sys.exit() - - mode_values = ["archive", "delete", "save"] - if options.mode not in mode_values: - print "mode must be one of {0}".format(" | ".join(mode_values)) - parser.print_help() - sys.exit() - - if options.mode == "delete": - for r in ["name", "hdfs_keytab", "hdfs_principal", "hdfs_user", "hdfs_path", "key_file_path", "bucket", "key_prefix", "local_path"]: - if options.__dict__[r] is not None: - print "argument '{0}' may not be specified in delete mode".format(r) - parser.print_help() - sys.exit() - - if not options.skip_date_usage and options.__dict__["end"] is None and options.__dict__["days"] is None or \ - options.__dict__["end"] is not None and options.__dict__["days"] is not None: - print "exactly one of 'end' or 'days' must be specfied" - parser.print_help() - sys.exit() - - is_any_solr_kerberos_property = options.__dict__["solr_keytab"] is not None or options.__dict__["solr_principal"] is not None - is_all_solr_kerberos_property = options.__dict__["solr_keytab"] is not None and options.__dict__["solr_principal"] is not None - if is_any_solr_kerberos_property and not is_all_solr_kerberos_property: - print "either both 'solr-keytab' and 'solr-principal' must be specfied, or neither of them" - parser.print_help() - sys.exit() - - compression_values = ["none", "tar.gz", "tar.bz2", "zip", "gz"] - if options.compression not in compression_values: - print "compression must be one of {0}".format(" | ".join(compression_values)) - parser.print_help() - sys.exit() - - is_any_solr_output_property = options.__dict__["solr_output_collection"] is not None - - is_any_hdfs_kerberos_property = options.__dict__["hdfs_keytab"] is not None or options.__dict__["hdfs_principal"] is not None - is_all_hdfs_kerberos_property = options.__dict__["hdfs_keytab"] is not None and options.__dict__["hdfs_principal"] is not None - if is_any_hdfs_kerberos_property and not is_all_hdfs_kerberos_property: - print "either both 'hdfs_keytab' and 'hdfs_principal' must be specfied, or neither of them" - parser.print_help() - sys.exit() - - is_any_hdfs_property = options.__dict__["hdfs_user"] is not None or options.__dict__["hdfs_path"] is not None - is_all_hdfs_property = options.__dict__["hdfs_user"] is not None and options.__dict__["hdfs_path"] is not None - if is_any_hdfs_property and not is_all_hdfs_property: - print "either both 'hdfs_user' and 'hdfs_path' must be specfied, or neither of them" - parser.print_help() - sys.exit() - - is_any_s3_property = options.__dict__["key_file_path"] is not None or options.__dict__["bucket"] is not None or \ - options.__dict__["key_prefix"] is not None - is_all_s3_property = options.__dict__["key_file_path"] is not None and options.__dict__["bucket"] is not None and \ - options.__dict__["key_prefix"] is not None - if is_any_s3_property and not is_all_s3_property: - print "either all the S3 arguments ('key_file_path', 'bucket', 'key_prefix') must be specfied, or none of them" - parser.print_help() - sys.exit() - - if options.mode in ["archive", "save"]: - count = (1 if is_any_solr_output_property else 0) + (1 if is_any_hdfs_property else 0) + \ - (1 if is_any_s3_property else 0) + (1 if options.__dict__["local_path"] is not None else 0) - if count != 1: - print "exactly one of the HDFS arguments ('hdfs_user', 'hdfs_path') or the S3 arguments ('key_file_path', 'bucket', 'key_prefix') or the solr arguments ('solr_output_collection') or the 'local_path' argument must be specified" - parser.print_help() - sys.exit() - - if options.__dict__["hdfs_keytab"] is not None and options.__dict__["hdfs_user"] is None: - print "HDFS kerberos keytab and principal may only be specified if the upload target is HDFS" - parser.print_help() - sys.exit() - - print("You are running Solr Data Manager {0} with arguments:".format(VERSION)) - print(" mode: " + options.mode) - print(" solr-url: " + options.solr_url) - print(" collection: " + options.collection) - if options.__dict__["filter_field"] is not None: - print(" filter-field: " + options.filter_field) - if options.mode in ["archive", "save"]: - print(" id-field: " + options.id_field) - if options.__dict__["exclude_fields"] is not None: - print(" exclude fields: " + options.exclude_fields) - if options.__dict__["end"] is not None: - print(" end: " + options.end) - else: - print(" days: " + str(options.days)) - print(" date-format: " + options.date_format) - if options.__dict__["additional_filter"] is not None: - print(" additional-filter: " + str(options.additional_filter)) - if options.__dict__["name"] is not None: - print(" name: " + str(options.name)) - if options.mode in ["archive", "save"]: - print(" read-block-size: " + str(options.read_block_size)) - print(" write-block-size: " + str(options.write_block_size)) - print(" ignore-unfinished-uploading: " + str(options.ignore_unfinished_uploading)) - if (options.__dict__["solr_keytab"] is not None): - print(" solr-keytab: " + options.solr_keytab) - print(" solr-principal: " + options.solr_principal) - if options.mode in ["archive", "save"]: - print(" output: " + ("json" if options.json_file else "line-delimited-json")) - print(" compression: " + options.compression) - if options.__dict__["solr_output_collection"] is not None: - print(" solr output collection: " + options.solr_output_collection) - if options.__dict__["solr_output_url"] is not None: - print(" solr output url: " + options.solr_output_collection) - if (options.__dict__["hdfs_keytab"] is not None): - print(" hdfs-keytab: " + options.hdfs_keytab) - print(" hdfs-principal: " + options.hdfs_principal) - if (options.__dict__["hdfs_user"] is not None): - print(" hdfs-user: " + options.hdfs_user) - print(" hdfs-path: " + options.hdfs_path) - if (options.__dict__["key_file_path"] is not None): - print(" key-file-path: " + options.key_file_path) - print(" bucket: " + options.bucket) - print(" key-prefix: " + options.key_prefix) - if (options.__dict__["local_path"] is not None): - print(" local-path: " + options.local_path) - print (" skip-date-usage: " + str(options.skip_date_usage)) - print(" verbose: " + str(options.verbose)) - print - - if options.__dict__["additional_filter"] is not None and options.__dict__["name"] is None: - go = False - while not go: - sys.stdout.write("It is recommended to set --name in case of any additional filter is set.\n") - sys.stdout.write("Are you sure that you want to proceed without a name (yes/no)? ") - choice = raw_input().lower() - if choice in ['yes', 'ye', 'y']: - go = True - elif choice in ['no', 'n']: - sys.exit() - - return options - -def set_log_level(disable=False): - if verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - if disable: - logger.removeHandler(handler) - -def get_end(options): - if options.end: - return options.end - else: - d = datetime.now() - timedelta(days=options.days) - end = d.strftime(options.date_format) - logger.info("The end date will be: %s", end) - return end - -def delete(solr_url, collection, filter_field, end, solr_keytab, solr_principal, skip_date_usage): - logger.info("Deleting data where %s <= %s", filter_field, end) - solr_kinit_command = None - if solr_keytab: - solr_kinit_command = "kinit -kt {0} {1}".format(solr_keytab, solr_principal) - curl_prefix = "curl -k --negotiate -u : " - else: - curl_prefix = "curl -k" - if skip_date_usage: - delete_query = "*:*" - else: - delete_query = "{0}:[* TO \"{1}\"]".format(filter_field, end) - delete_command = "{0}/{1}/update?commit=true&wt=json".format(solr_url, collection) - delete_data = "{0}".format(delete_query) - - query_solr(solr_kinit_command, delete_command, "{0} -H Content-Type:text/xml {1}".format(curl_prefix, delete_command), "Deleting", delete_data) - -def save(mode, solr_url, collection, filter_field, id_field, range_end, read_block_size, write_block_size, - ignore_unfinished_uploading, additional_filter, name, solr_keytab, solr_principal, json_file, - compression, hdfs_keytab, hdfs_principal, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path, - solr_output_collection, solr_output_url, exclude_fields, skip_date_usage): - solr_kinit_command = None - if solr_keytab: - solr_kinit_command = "kinit -kt {0} {1}".format(solr_keytab, solr_principal) - curl_prefix = "curl -k --negotiate -u : " - else: - curl_prefix = "curl -k" - - hdfs_kinit_command = None - if hdfs_keytab: - hdfs_kinit_command = "sudo -u {0} kinit -kt {1} {2}".format(hdfs_user, hdfs_keytab, hdfs_principal) - - if hdfs_path: - ensure_hdfs_path(hdfs_kinit_command, hdfs_user, hdfs_path) - - working_dir = get_working_dir(solr_url, collection) - if mode == "archive": - handle_unfinished_uploading(solr_kinit_command, hdfs_kinit_command, curl_prefix, working_dir, - ignore_unfinished_uploading, skip_date_usage) - - save_data(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field, - range_end, read_block_size, write_block_size, working_dir, additional_filter, name, json_file, compression, - hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path, solr_output_collection, solr_output_url, - exclude_fields, skip_date_usage) - -def ensure_hdfs_path(hdfs_kinit_command, hdfs_user, hdfs_path): - if hdfs_kinit_command: - run_kinit(hdfs_kinit_command, "HDFS") - - try: - hdfs_create_dir_command = "sudo -u {0} hadoop fs -mkdir -p {1}".format(hdfs_user, hdfs_path) - logger.debug("Ensuring that the HDFS path %s exists:\n%s", hdfs_path, hdfs_create_dir_command) - result = call(hdfs_create_dir_command.split()) - except Exception as e: - print - logger.warn("Could not execute hdfs ensure dir command:\n%s", hdfs_create_dir_command) - logger.warn(str(e)) - sys.exit() - - if result != 0: - print - logger.warn("Could not ensure HDFS dir command:\n%s", hdfs_create_dir_command) - logger.warn(str(err)) - sys.exit() - -def get_working_dir(solr_url, collection): - md5 = hashlib.md5() - md5.update(solr_url) - md5.update(collection) - hash = md5.hexdigest() - working_dir = "/tmp/solrDataManager/{0}".format(hash) - - if not(os.path.isdir(working_dir)): - os.makedirs(working_dir) - - logger.debug("Working directory is %s", working_dir) - return working_dir - -def handle_unfinished_uploading(solr_kinit_command, hdfs_kinit_command, curl_prefix, working_dir, ignore_unfinished_uploading, skip_date_usage): - command_json_path = "{0}/command.json".format(working_dir) - if os.path.isfile(command_json_path): - with open(command_json_path) as command_file: - command = json.load(command_file) - - if "upload" in command.keys() and ignore_unfinished_uploading: - logger.info("Ignoring unfinished uploading left by previous run") - os.remove(command_json_path) - return - - if "upload" in command.keys(): - logger.info("Previous run has left unfinished uploading") - logger.info("You may try to run the program with '-g' or '--ignore-unfinished-uploading' to ignore it if it keeps on failing") - - if command["upload"]["type"] == "solr": - upload_file_to_solr(solr_kinit_command, curl_prefix, command["upload"]["command"], command["upload"]["upload_file_path"], - command["upload"]["solr_output_collection"]) - elif command["upload"]["type"] == "hdfs": - upload_file_hdfs(hdfs_kinit_command, command["upload"]["command"], command["upload"]["upload_file_path"], - command["upload"]["hdfs_path"], command["upload"]["hdfs_user"]) - elif command["upload"]["type"] == "s3": - upload_file_s3(command["upload"]["command"], command["upload"]["upload_file_path"], command["upload"]["bucket"], - command["upload"]["key_prefix"]) - elif command["upload"]["type"] == "local": - upload_file_local(command["upload"]["command"], command["upload"]["upload_file_path"], command["upload"]["local_path"]) - else: - logger.warn("Unknown upload type: %s", command["upload"]["type"]) - sys.exit() - - if "delete" in command.keys(): - delete_data(solr_kinit_command, curl_prefix, command["delete"]["command"], command["delete"]["collection"], - command["delete"]["filter_field"], command["delete"]["id_field"], command["delete"]["prev_lot_end_value"], - command["delete"]["prev_lot_end_id"], skip_date_usage) - - os.remove(command_json_path) - -def save_data(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field, - range_end, read_block_size, write_block_size, working_dir, additional_filter, name, json_file, - compression, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path, solr_output_collection, - solr_output_url, exclude_fields, skip_date_usage): - logger.info("Starting to save data") - - tmp_file_path = "{0}/tmp.json".format(working_dir) - - prev_lot_end_value = None - prev_lot_end_id = None - - if skip_date_usage: - if additional_filter: - q = quote("*:*+AND+{0}".format(additional_filter), safe="/+\"*") - else: - q = quote("*:*", safe="/+\"*") - sort = quote("{0}+asc".format(id_field), safe="/+\"*") - else: - if additional_filter: - q = quote("{0}+AND+{1}:[*+TO+\"{2}\"]".format(additional_filter, filter_field, range_end), safe="/+\"*") - else: - q = quote("{0}:[*+TO+\"{1}\"]".format(filter_field, range_end), safe="/+\"*") - sort = quote("{0}+asc,{1}+asc".format(filter_field, id_field), safe="/+\"*") - - solr_query_url_prefix = "{0}/{1}/select?q={2}&sort={3}&rows={4}&wt=json".format(solr_url, collection, q, sort, read_block_size) - - exclude_field_list = exclude_fields.split(',') if exclude_fields else None - if solr_output_collection and not exclude_field_list: - exclude_field_list = ['_version_'] - - done = False - total_records = 0 - while not done: - results = create_block(tmp_file_path, solr_kinit_command, curl_prefix, solr_query_url_prefix, filter_field, - id_field, range_end, write_block_size, prev_lot_end_value, prev_lot_end_id, json_file, - exclude_field_list, skip_date_usage) - done = results[0] - records = results[1] - prev_lot_end_value = results[2] - prev_lot_end_id = results[3] - - if records > 0: - upload_block(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, - id_field, working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user, - hdfs_path, key_file_path, bucket, key_prefix, local_path, compression, solr_output_collection, - solr_output_url, skip_date_usage) - total_records += records - logger.info("A total of %d records are saved", total_records) - -def create_block(tmp_file_path, solr_kinit_command, curl_prefix, solr_query_url_prefix, filter_field, id_field, range_end, - write_block_size, prev_lot_end_value, prev_lot_end_id, json_file, exclude_field_list, skip_date_usage): - if os.path.exists(tmp_file_path): - os.remove(tmp_file_path) - tmp_file = open(tmp_file_path, 'w') - logger.debug("Created tmp file %s", tmp_file_path) - - init_file(tmp_file, json_file) - records = 0 - done = False - while records < write_block_size: - if skip_date_usage: - if prev_lot_end_id: - fq = "({0}:{{\"{1}\"+TO+*])".format(id_field, prev_lot_end_id) - url = "{0}&fq={1}".format(solr_query_url_prefix, quote(fq, safe="/+\"*")) - else: - url = "{0}".format(solr_query_url_prefix) - else: - if prev_lot_end_value: - fq_prev_end_rest = "({0}:\"{1}\"+AND+{2}:{{\"{3}\"+TO+*])".format(filter_field, prev_lot_end_value, id_field, - prev_lot_end_id) - fq_new = "{0}:{{\"{1}\"+TO+\"{2}\"]".format(filter_field, prev_lot_end_value, range_end) - fq = "{0}+OR+{1}".format(fq_prev_end_rest, fq_new) - else: - fq = "{0}:[*+TO+\"{1}\"]".format(filter_field, range_end) - - url = "{0}&fq={1}".format(solr_query_url_prefix, quote(fq, safe="/+\"*")) - - curl_command = "{0} {1}".format(curl_prefix, url) - - rsp = query_solr(solr_kinit_command, url, curl_command, "Obtaining") - - if rsp['response']['numFound'] == 0: - done = True - break - - for doc in rsp['response']['docs']: - last_doc = doc - add_line(tmp_file, doc, json_file, records, exclude_field_list) - records += 1 - if records == write_block_size: - break - - prev_lot_end_value = last_doc[filter_field] if not skip_date_usage else prev_lot_end_value - prev_lot_end_id = last_doc[id_field] - sys.stdout.write("\r{0} records are written".format(records)) - sys.stdout.flush() - if verbose and records < write_block_size: - print - logger.debug("Collecting next lot of data") - - finish_file(tmp_file, json_file) - sys.stdout.write("\n") - logger.debug("Finished data collection") - return [done, records, prev_lot_end_value, prev_lot_end_id] - -def init_file(tmp_file, json_file): - if json_file: - tmp_file.write("{\n") - -def add_line(tmp_file, doc, json_file, records, exclude_fields): - if records > 0: - if json_file: - tmp_file.write(",\n") - else: - tmp_file.write("\n") - if exclude_fields: - for exclude_field in exclude_fields: - if doc and exclude_field in doc: - del doc[exclude_field] - - tmp_file.write(json.dumps(doc)) - -def finish_file(tmp_file, json_file): - if json_file: - tmp_file.write("\n}") - -def upload_block(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, - id_field, working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, - key_file_path, bucket, key_prefix, local_path, compression, solr_output_collection, solr_output_url, - skip_date_usage): - if name: - file_name = "{0}_-_{1}_-_{2}_-_{3}".format(collection, name, prev_lot_end_value, prev_lot_end_id).replace(':', '_') - else: - file_name = "{0}_-_{1}_-_{2}".format(collection, prev_lot_end_value, prev_lot_end_id).replace(':', '_') - - upload_file_path = compress_file(working_dir, tmp_file_path, file_name, compression) - - upload_command = create_command_file(mode, True, working_dir, upload_file_path, solr_url, collection, filter_field, - id_field, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, - key_file_path, bucket, key_prefix, local_path, solr_output_collection, solr_output_url, - skip_date_usage) - if solr_output_collection: - upload_file_to_solr(solr_kinit_command, curl_prefix, upload_command, upload_file_path, solr_output_collection) - elif hdfs_user: - upload_file_hdfs(hdfs_kinit_command, upload_command, upload_file_path, hdfs_path, hdfs_user) - elif key_file_path: - upload_file_s3(upload_command, upload_file_path, bucket, key_prefix) - elif local_path: - upload_file_local(upload_command, upload_file_path, local_path) - else: - logger.warn("Unknown upload destination") - sys.exit() - - delete_command = create_command_file(mode, False, working_dir, upload_file_path, solr_url, collection, filter_field, - id_field, prev_lot_end_value, prev_lot_end_id, None, None, None, None, None, None, None, - None, skip_date_usage) - if mode == "archive": - delete_data(solr_kinit_command, curl_prefix, delete_command, collection, filter_field, id_field, prev_lot_end_value, - prev_lot_end_id, skip_date_usage) - os.remove("{0}/command.json".format(working_dir)) - -def compress_file(working_dir, tmp_file_path, file_name, compression): - data_file_name = "{0}.json".format(file_name) - if compression == "none": - upload_file_path = "{0}/{1}.json".format(working_dir, file_name) - os.rename(tmp_file_path, upload_file_path) - elif compression == "tar.gz": - upload_file_path = "{0}/{1}.json.tar.gz".format(working_dir, file_name) - tar = tarfile.open(upload_file_path, mode="w:gz") - try: - tar.add(tmp_file_path, arcname=data_file_name) - finally: - tar.close() - elif compression == "tar.bz2": - upload_file_path = "{0}/{1}.json.tar.bz2".format(working_dir, file_name) - tar = tarfile.open(upload_file_path, mode="w:bz2") - try: - tar.add(tmp_file_path, arcname=data_file_name) - finally: - tar.close() - elif compression == "zip": - upload_file_path = "{0}/{1}.json.zip".format(working_dir, file_name) - zip = ZipFile(upload_file_path, 'w') - zip.write(tmp_file_path, data_file_name, ZIP_DEFLATED) - elif compression == "gz": - upload_file_path = "{0}/{1}.json.gz".format(working_dir, file_name) - gz = gzip.open(upload_file_path, mode="wb") - f = open(tmp_file_path) - try: - shutil.copyfileobj(f, gz) - finally: - gz.close() - f.close() - else: - logger.warn("Unknown compression type") - sys.exit() - - logger.info("Created data file %s", data_file_name) - - - return upload_file_path - -def create_command_file(mode, upload, working_dir, upload_file_path, solr_url, collection, filter_field, id_field, - prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, - local_path, solr_output_collection, solr_output_url, skip_date_usage): - commands = {} - - if upload: - logger.debug("Creating command file with upload and delete instructions in case of an interruption") - else: - logger.debug("Creating command file with delete instructions in case of an interruption") - - if upload: - if solr_output_collection: - command_url = solr_output_url if solr_output_url else solr_url - upload_command = "{0}/{1}/update/json/docs?commit=true&wt=json --data-binary @{2}"\ - .format(command_url, solr_output_collection, upload_file_path) - upload_command_data = {} - upload_command_data["type"] = "solr" - upload_command_data["command"] = upload_command - upload_command_data["upload_file_path"] = upload_file_path - upload_command_data["solr_output_collection"] = solr_output_collection - commands["upload"] = upload_command_data - elif hdfs_path: - upload_command = "sudo -u {0} hadoop fs -put {1} {2}".format(hdfs_user, upload_file_path, hdfs_path) - upload_command_data = {} - upload_command_data["type"] = "hdfs" - upload_command_data["command"] = upload_command - upload_command_data["upload_file_path"] = upload_file_path - upload_command_data["hdfs_path"] = hdfs_path - upload_command_data["hdfs_user"] = hdfs_user - commands["upload"] = upload_command_data - elif key_file_path: - upload_command = "java -cp {0}/libs/* org.apache.ambari.infra.solr.S3Uploader {1} {2} {3} {4}".format( \ - os.path.dirname(os.path.realpath(__file__)), key_file_path, bucket, key_prefix, upload_file_path) - upload_command_data = {} - upload_command_data["type"] = "s3" - upload_command_data["command"] = upload_command - upload_command_data["upload_file_path"] = upload_file_path - upload_command_data["bucket"] = bucket - upload_command_data["key_prefix"] = key_prefix - commands["upload"] = upload_command_data - elif local_path: - upload_command = "mv {0} {1}".format(upload_file_path, local_path) - upload_command_data = {} - upload_command_data["type"] = "local" - upload_command_data["command"] = upload_command - upload_command_data["upload_file_path"] = upload_file_path - upload_command_data["local_path"] = local_path - commands["upload"] = upload_command_data - else: - logger.warn("Unknown upload destination") - sys.exit() - - if mode == "save": - return upload_command - - if skip_date_usage: - delete_query = "({0}:[*+TO+\"{1}\"])".format(id_field, prev_lot_end_id) - else: - delete_prev = "{0}:[*+TO+\"{1}\"]".format(filter_field, prev_lot_end_value) - delete_last = "({0}:\"{1}\"+AND+{2}:[*+TO+\"{3}\"])".format(filter_field, prev_lot_end_value, id_field, prev_lot_end_id) - delete_query = "{0}+OR+{1}".format(delete_prev, delete_last) - - delete_command = "{0}/{1}/update?commit=true&wt=json --data-binary {2}" \ - .format(solr_url, collection, delete_query) - if mode == "save": - return delete_command - - delete_command_data = {} - delete_command_data["command"] = delete_command - delete_command_data["collection"] = collection - delete_command_data["filter_field"] = filter_field - delete_command_data["id_field"] = id_field - delete_command_data["prev_lot_end_value"] = prev_lot_end_value - delete_command_data["prev_lot_end_id"] = prev_lot_end_id - commands["delete"] = delete_command_data - - command_file_path = "{0}/command.json".format(working_dir) - command_file_path_tmp = "{0}.tmp".format(command_file_path) - cft = open(command_file_path_tmp, 'w') - cft.write(json.dumps(commands, indent=4)) - os.rename(command_file_path_tmp, command_file_path) - - logger.debug("Command file %s was created", command_file_path) - - if upload: - return upload_command - else: - return delete_command - -def upload_file_hdfs(hdfs_kinit_command, upload_command, upload_file_path, hdfs_path, hdfs_user): - if hdfs_kinit_command: - run_kinit(hdfs_kinit_command, "HDFS") - - try: - hdfs_file_exists_command = "sudo -u {0} hadoop fs -test -e {1}".format(hdfs_user, hdfs_path + os.path.basename(upload_file_path)) - logger.debug("Checking if file already exists on hdfs:\n%s", hdfs_file_exists_command) - hdfs_file_exists = (0 == call(hdfs_file_exists_command.split())) - except Exception as e: - print - logger.warn("Could not execute command to check if file already exists on HDFS:\n%s", hdfs_file_exists_command) - logger.warn(str(e)) - sys.exit() - - if os.path.isfile(upload_file_path) and not hdfs_file_exists: - try: - logger.debug("Uploading file to hdfs:\n%s", upload_command) - result = call(upload_command.split()) - except Exception as e: - print - logger.warn("Could not execute command to upload file to HDFS:\n%s", upload_command) - logger.warn(str(e)) - sys.exit() - - if result != 0: - logger.warn("Could not upload file to HDFS with command:\n%s", upload_command) - sys.exit() - - logger.info("File %s was uploaded to hdfs %s", os.path.basename(upload_file_path), hdfs_path) - os.remove(upload_file_path) - -def upload_file_s3(upload_command, upload_file_path, bucket, key_prefix): - if os.path.isfile(upload_file_path): - try: - logger.debug("Uploading file to s3:\n%s", upload_command) - result = call(upload_command.split()) - except Exception as e: - print - logger.warn("Could not execute command to upload file to S3:\n%s", upload_command) - logger.warn(str(e)) - sys.exit() - - if result != 0: - logger.warn("Could not upload file to S3 with command:\n%s", upload_command) - sys.exit() - - logger.info("File %s was uploaded to s3 bucket '%s', key '%s'", os.path.basename(upload_file_path), bucket, - key_prefix + os.path.basename(upload_file_path)) - os.remove(upload_file_path) - -def upload_file_local(upload_command, upload_file_path, local_path): - if os.path.exists(local_path) and not os.path.isdir(local_path): - logger.warn("Local path %s exists, but not a directory, can not save there", local_path) - if not os.path.isdir(local_path): - os.mkdir(local_path) - logger.debug("Directory %s was created", local_path) - - try: - logger.debug("Moving file to local directory %s with command\n%s", local_path, upload_command) - call(upload_command.split()) - logger.info("File %s was moved to local directory %s", os.path.basename(upload_file_path), local_path) - except Exception as e: - print - logger.warn("Could not execute move command command:\n%s", upload_command) - logger.warn(str(e)) - sys.exit() - -def upload_file_to_solr(solr_kinit_command, curl_prefix, upload_command, upload_file_path, collection): - if os.path.isfile(upload_file_path): - query_solr(solr_kinit_command, upload_command, "{0} -H Content-type:application/json {1}".format(curl_prefix, upload_command), "Saving") - logger.info("Save data to collection: %s", collection) - -def delete_data(solr_kinit_command, curl_prefix, delete_command, collection, filter_field, id_field, prev_lot_end_value, - prev_lot_end_id, skip_date_usage): - delete_cmd = delete_command.split(" --data-binary")[0] - delete_query_data = delete_command.split("--data-binary ")[1].replace("+", " ") - query_solr(solr_kinit_command, delete_cmd, "{0} -H Content-Type:text/xml {1}".format(curl_prefix, delete_cmd), "Deleting", delete_query_data) - if skip_date_usage: - logger.info("Deleted data from collection %s where %s < %s", collection, id_field, prev_lot_end_id) - else: - logger.info("Deleted data from collection %s where %s,%s < %s,%s", collection, filter_field, id_field, prev_lot_end_value, - prev_lot_end_id) - -def query_solr(solr_kinit_command, url, curl_command, action, data=None): - if solr_kinit_command: - run_kinit(solr_kinit_command, "Solr") - - try: - cmd = curl_command.split() - if data: - cmd.append("--data-binary") - cmd.append(data) - logger.debug("%s data from solr:\n%s", action, ' '.join(cmd)) - process = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) - except Exception as e: - print - logger.warn("Could not execute curl command:\n%s", ' '.join(cmd)) - logger.warn(str(e)) - sys.exit() - - out, err = process.communicate() - if process.returncode != 0: - print - logger.warn("Could not execute curl command:\n%s", ' '.join(cmd)) - logger.warn(str(err)) - sys.exit() - - true = True # needed to be able to eval 'true' in the returned json - rsp = eval(str(out)) - if rsp["responseHeader"]["status"] != 0: - print - logger.warn("Could not execute solr query:\n%s", unquote(url)) - logger.warn(rsp["error"]["msg"]) - sys.exit() - - return rsp - -def run_kinit(kinit_command, program): - try: - logger.debug("Running kinit for %s:\n%s", program, kinit_command) - result = call(kinit_command.split()) - except Exception as e: - print - logger.warn("Could not execute %s kinit command:\n%s", program, kinit_command) - logger.warn(str(e)) - sys.exit() - - if result != 0: - print - logger.warn("%s kinit command was not successful:\n%s", program, kinit_command) - sys.exit() - -if __name__ == '__main__': - try: - start_time = time.time() - - options = parse_arguments() - verbose = options.verbose - set_log_level() - - end = get_end(options) if not options.skip_date_usage else None - - if options.mode == "delete": - delete(options.solr_url, options.collection, options.filter_field, end, options.solr_keytab, options.solr_principal, options.skip_date_usage) - elif options.mode in ["archive", "save"]: - save(options.mode, options.solr_url, options.collection, options.filter_field, options.id_field, end, - options.read_block_size, options.write_block_size, options.ignore_unfinished_uploading, - options.additional_filter, options.name, options.solr_keytab, options.solr_principal, options.json_file, - options.compression, options.hdfs_keytab, options.hdfs_principal, options.hdfs_user, options.hdfs_path, - options.key_file_path, options.bucket, options.key_prefix, options.local_path, options.solr_output_collection, - options.solr_output_url, options.exclude_fields, options.skip_date_usage) - else: - logger.warn("Unknown mode: %s", options.mode) - - print("--- %s seconds ---" % (time.time() - start_time)) - except KeyboardInterrupt: - print - sys.exit(128 + signal.SIGINT) diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/ambariSolrMigration.sh b/ambari-infra/ambari-infra-solr-client/src/main/resources/ambariSolrMigration.sh deleted file mode 100755 index e054a898ab8..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/ambariSolrMigration.sh +++ /dev/null @@ -1,322 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -function print_help() { - cat << EOF - Usage: /usr/lib/ambari-infra-solr-client/ambariSolrMigration.sh --mode --ini-file [additional options] - - -m, --mode available migration modes: delete-only | backup-only | migrate-restore | all | transport - -i, --ini-file ini-file location (used by migrationHelper.py) - -s, --migration-script-location migrateHelper.py location (default: /usr/lib/ambari-infra-solr-client/migrationHelper.py) - -w, --wait-between-steps wait between different migration steps in seconds (default: 15) - -p, --python-path python location, default: /usr/bin/python - -b, --batch-interval seconds between batch tasks for rolling restart solr at last step (default: 60) - -k, --keep-backup keep backup data (more secure, useful if you have enough space for that) - --skip-solr-client-upgrade skip ambari-infra-solr-client package upgrades - --skip-solr-server-upgrade skip ambari-infra-solr package upgrades - --skip-logsearch-upgrade skip ambari-logsearch-portal and ambari-logsearch-logfeeder package upgrades - --skip-warnings skip warnings at check-shards step - -h, --help print help -EOF -} - -function handle_result() { - local result_code=${1:?"usage: "} - local step=${2:?"usage: "} - local start_date=${4:?"usage: "} - if [[ "$result_code" != "0" ]] ; then - end_date=$(date +%s) - runtime=$($python_location -c "print '%02u:%02u:%02u' % ((${end_date} - ${start_date})/3600, ((${end_date} - ${start_date})/60)%60, (${end_date} - ${start_date})%60)") - echo "Total Runtime: $runtime" - echo "$step command FAILED. Stop migration commands ..." - exit 1 - fi -} - -function wait() { - local seconds=${1:?"usage: "} - echo "Waiting $seconds seconds before next step ..." - sleep $seconds -} - -function log_command() { - local command_to_execute=${1:?"usage: "} - echo "Execute command: $command_to_execute" -} - -function run_migrate_commands() { - local mode=${1:?"usage: "} - local script_location=${2:?"usage: "} - local python_location=${3:?"usage: "} - local ini_file=${4:?"usage "} - local time_sleep=${5:?"usage "} - local skip_solr_client_upgrade=${6:?"usage "} - local skip_solr_server_upgrade=${7:?"usage "} - local skip_logsearch_upgrade=${8:?"usage "} - local skip_warnings=${9:?"usage "} - local batch_interval=${10:?"usage "} - local keep_backup=${11:?"usage "} - local verbose=${12:?"usage "} - - local verbose_val="" - if [[ "$verbose" == "true" ]]; then - verbose_val="--verbose" - fi - - local skip_warnings_val="" - if [[ "$skip_warnings" == "true" ]]; then - skip_warnings_val="--skip-warnings" - fi - - local keep_backup_val="" - if [[ "$keep_backup" == "true" ]]; then - keep_backup_val="--keep-backup" - fi - - start_date=$(date +%s) - - # execute on: transport - if [[ "$mode" == "transport" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action transport-old-data $verbose_val" - $python_location $script_location --ini-file $ini_file --action transport-old-data $verbose_val - handle_result "$?" "Transport Old Solr Data" "$python_location" "$start_date" - fi - - # execute on: backup - all - if [[ "$mode" == "backup" || "$mode" == "all" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action check-shards $verbose_val $skip_warnings_val" - $python_location $script_location --ini-file $ini_file --action check-shards $verbose_val $skip_warnings_val - handle_result "$?" "Check Shards" "$python_location" "$start_date" - fi - - # execute on: backup - delete - all - if [[ "$mode" == "delete" || "$mode" == "backup" || "$mode" == "all" ]] ; then - if [[ "$skip_solr_client_upgrade" != "true" ]]; then - log_command "$python_location $script_location --ini-file $ini_file --action upgrade-solr-clients $verbose_val" - $python_location $script_location --ini-file $ini_file --action upgrade-solr-clients $verbose_val - handle_result "$?" "Upgrade Solr Clients" "$python_location" "$start_date" - fi - log_command "$python_location $script_location --ini-file $ini_file --action check-docs $verbose_val" - $python_location $script_location --ini-file $ini_file --action check-docs $verbose_val - handle_result "$?" "Check Documents" "$python_location" "$start_date" - fi - - # ececute on: backup - all - if [[ "$mode" == "backup" || "$mode" == "all" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action backup $verbose_val" - $python_location $script_location --ini-file $ini_file --action backup $verbose_val - handle_result "$?" "Backup" "$python_location" "$start_date" - fi - - # execute on: delete - all - if [[ "$mode" == "delete" || "$mode" == "all" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action delete-collections $verbose_val" - $python_location $script_location --ini-file $ini_file --action delete-collections $verbose_val - handle_result "$?" "Delete collections" "$python_location" "$start_date" - fi - - # execute on: delete - all - if [[ "$mode" == "delete" || "$mode" == "all" ]] ; then - if [[ "$skip_solr_server_upgrade" != "true" ]]; then - log_command "$python_location $script_location --ini-file $ini_file --action upgrade-solr-instances $verbose_val" - $python_location $script_location --ini-file $ini_file --action upgrade-solr-instances $verbose_val - handle_result "$?" "Upgrade Solr Instances" "$python_location" "$start_date" - fi - fi - - # execute on: delete - all - if [[ "$mode" == "delete" || "$mode" == "all" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action restart-solr $verbose_val" - $python_location $script_location --ini-file $ini_file --action restart-solr $verbose_val - handle_result "$?" "Restart Solr Instances" "$python_location" "$start_date" - wait $time_sleep - - log_command "$python_location $script_location --ini-file $ini_file --action restart-ranger $verbose_val" - $python_location $script_location --ini-file $ini_file --action restart-ranger $verbose_val - handle_result "$?" "Restart Ranger Admins" "$python_location" "$start_date" - wait $time_sleep - if [[ "$skip_logsearch_upgrade" != "true" ]]; then - log_command "$python_location $script_location --ini-file $ini_file --action upgrade-logsearch-portal $verbose_val" - $python_location $script_location --ini-file $ini_file --action upgrade-logsearch-portal $verbose_val - handle_result "$?" "Upgrade Log Search Portal" "$python_location" "$start_date" - - log_command "$python_location $script_location --ini-file $ini_file --action upgrade-logfeeders $verbose_val" - $python_location $script_location --ini-file $ini_file --action upgrade-logfeeders $verbose_val - handle_result "$?" "Upgrade Log Feeders" "$python_location" "$start_date" - fi - log_command "$python_location $script_location --ini-file $ini_file --action restart-logsearch $verbose_val" - $python_location $script_location --ini-file $ini_file --action restart-logsearch $verbose_val - handle_result "$?" "Restart Log Search" "$python_location" "$start_date" - wait $time_sleep - - log_command "$python_location $script_location --ini-file $ini_file --action restart-atlas $verbose_val" - $python_location $script_location --ini-file $ini_file --action restart-atlas $verbose_val - handle_result "$?" "Restart Atlas Servers" "$python_location" "$start_date" - wait $time_sleep - fi - - # execute on migrate-restore - all - if [[ "$mode" == "migrate-restore" || "$mode" == "all" ]] ; then - log_command "$python_location $script_location --ini-file $ini_file --action check-docs $verbose_val" - $python_location $script_location --ini-file $ini_file --action check-docs $verbose_val - handle_result "$?" "Check Documents" "$python_location" "$start_date" - - log_command "$python_location $script_location --ini-file $ini_file --action migrate $verbose_val" - $python_location $script_location --ini-file $ini_file --action migrate $verbose_val - handle_result "$?" "Migrate Index" "$python_location" "$start_date" - - log_command "$python_location $script_location --ini-file $ini_file --action restore $keep_backup_val $verbose_val" - $python_location $script_location --ini-file $ini_file --action restore $keep_backup_val $verbose_val - handle_result "$?" "Restore" "$python_location" "$start_date" - - log_command "$python_location $script_location --ini-file $ini_file --action rolling-restart-solr $verbose_val --batch-interval $batch_interval" - $python_location $script_location --ini-file $ini_file --action rolling-restart-solr $verbose_val --batch-interval $batch_interval - handle_result "$?" "Rolling Restart Solr" "$python_location" "$start_date" - fi - - end_date=$(date +%s) - runtime=$($python_location -c "print '%02u:%02u:%02u' % ((${end_date} - ${start_date})/3600, ((${end_date} - ${start_date})/60)%60, (${end_date} - ${start_date})%60)") - echo "Total Runtime: $runtime" -} - -function main() { - while [[ $# -gt 0 ]] - do - key="$1" - case $key in - -m|--mode) - local MODE="$2" - shift 2 - ;; - -i|--ini-file) - local INI_FILE="$2" - shift 2 - ;; - -w|--wait-between-steps) - local WAIT="$2" - shift 2 - ;; - -s|--migration-script-location) - local SCRIPT_LOCATION="$2" - shift 2 - ;; - -p|--python-path) - local PYTHON_PATH_FOR_MIGRATION="$2" - shift 2 - ;; - -b|--batch-interval) - local BATCH_INTERVAL="$2" - shift 2 - ;; - -k|--keep-backup) - local KEEP_BACKUP="true" - shift 1 - ;; - --skip-solr-client-upgrade) - local SKIP_SOLR_CLIENT_UPGRADE="true" - shift 1 - ;; - --skip-solr-server-upgrade) - local SKIP_SOLR_SERVER_UPGRADE="true" - shift 1 - ;; - --skip-logsearch-upgrade) - local SKIP_LOGSEARCH_UPGRADE="true" - shift 1 - ;; - --skip-warnings) - local SKIP_WARNINGS="true" - shift 1 - ;; - -v|--verbose) - local VERBOSE="true" - shift 1 - ;; - -h|--help) - shift 1 - print_help - exit 0 - ;; - *) - echo "Unknown option: $1" - exit 1 - ;; - esac - done - - if [[ -z "$SCRIPT_LOCATION" ]] ; then - SCRIPT_LOCATION="/usr/lib/ambari-infra-solr-client/migrationHelper.py" - fi - - if [[ -z "$PYTHON_PATH_FOR_MIGRATION" ]] ; then - PYTHON_PATH_FOR_MIGRATION="/usr/bin/python" - fi - - if [[ -z "$WAIT" ]] ; then - WAIT="15" - fi - - if [[ -z "$BATCH_INTERVAL" ]] ; then - BATCH_INTERVAL="60" - fi - - if [[ -z "$VERBOSE" ]] ; then - VERBOSE="false" - fi - - if [[ -z "$SKIP_WARNINGS" ]] ; then - SKIP_WARNINGS="false" - fi - - if [[ -z "$SKIP_SOLR_CLIENT_UPGRADE" ]] ; then - SKIP_SOLR_CLIENT_UPGRADE="false" - fi - - if [[ -z "$SKIP_SOLR_SERVER_UPGRADE" ]] ; then - SKIP_SOLR_SERVER_UPGRADE="false" - fi - - if [[ -z "$SKIP_LOGSEARCH_UPGRADE" ]] ; then - SKIP_LOGSEARCH_UPGRADE="false" - fi - - if [[ -z "$KEEP_BACKUP" ]] ; then - KEEP_BACKUP="false" - fi - - if [[ -z "$INI_FILE" ]] ; then - echo "ini-file argument is required (-i or --ini-file)." - print_help - exit 1 - fi - - if [[ -z "$MODE" ]] ; then - echo "mode argument is required (-m or --mode)." - print_help - exit 1 - else - if [[ "$MODE" == "delete" || "$MODE" == "backup" || "$MODE" == "migrate-restore" || "$MODE" == "all" || "$MODE" == "transport" ]]; then - run_migrate_commands "$MODE" "$SCRIPT_LOCATION" "$PYTHON_PATH_FOR_MIGRATION" "$INI_FILE" "$WAIT" "$SKIP_SOLR_CLIENT_UPGRADE" "$SKIP_SOLR_SERVER_UPGRADE" "$SKIP_LOGSEARCH_UPGRADE" "$SKIP_WARNINGS" "$BATCH_INTERVAL" "$KEEP_BACKUP" "$VERBOSE" - else - echo "mode '$MODE' is not supported" - print_help - exit 1 - fi - fi -} - -main ${1+"$@"} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/data/security-without-authr.json b/ambari-infra/ambari-infra-solr-client/src/main/resources/data/security-without-authr.json deleted file mode 100644 index 7c3690529b8..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/data/security-without-authr.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "authentication": - { - "class": "org.apache.solr.security.KerberosPlugin" - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/log4j.properties b/ambari-infra/ambari-infra-solr-client/src/main/resources/log4j.properties deleted file mode 100644 index 4779596755e..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/log4j.properties +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2011 The Apache Software Foundation -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=INFO,stdout,stderr - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Threshold=INFO -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%m%n -log4j.appender.stdout.filter.lvlRangeFilter=org.apache.log4j.varia.LevelRangeFilter -log4j.appender.stdout.filter.lvlRangeFilter.LevelMax=WARN - -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.Threshold=ERROR -log4j.appender.stderr.Target=System.err -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=%m%n -log4j.appender.stderr.filter.lvlRangeFilter=org.apache.log4j.varia.LevelRangeFilter -log4j.appender.stderr.filter.lvlRangeFilter.LevelMin=ERROR \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/managed-schema b/ambari-infra/ambari-infra-solr-client/src/main/resources/managed-schema deleted file mode 100644 index 6c87af7cf9e..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/managed-schema +++ /dev/null @@ -1,93 +0,0 @@ - - - - id - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh deleted file mode 100644 index 69f9b2c0e4c..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrCloudCli.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -JVM="java" -sdir="`dirname \"$0\"`" -ldir="`dirname "$(readlink -f "$0")"`" - -DIR="$sdir" -if [ "$sdir" != "$ldir" ]; then - DIR="$ldir" -fi - -PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR:$DIR/libs/*" $INFRA_SOLR_CLI_OPTS org.apache.ambari.infra.solr.AmbariSolrCloudCLI ${1+"$@"} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh deleted file mode 100755 index 5cd5b5f6f17..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh +++ /dev/null @@ -1,237 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -: ${JAVA_HOME:?"Please set the JAVA_HOME variable!"} - -JVM="java" -sdir="`dirname \"$0\"`" -ldir="`dirname "$(readlink -f "$0")"`" - -DIR="$sdir" -if [ "$sdir" != "$ldir" ]; then - DIR="$ldir" -fi - -function print_help() { - cat << EOF - - Usage: [] [] - - commands: - upgrade-index Check and upgrade solr index data in core directories. - run-check-index-tool call 'java -cp ... org.apache.lucene.index.IndexUpgrader' directly - run-upgrade-index-tool call 'java -cp ... org.apache.lucene.index.CheckIndex' directly - help print usage - - - upgrade-index command arguments: - -d, --index-data-dir Location of the solr cores (e.g.: /opt/ambari_infra_solr/data) - -c, --core-filter Comma separated name filters of core directoies (default: hadoop_logs,audit_logs,history) - -b, --backup-enabled Use indexer tool with backup snapshots. (core filter won't be used) - -g, --debug Enable debug mode, IndexUpgrader output will be verbose. - -f, --force Force to start index upgrade, even is the version is at least 6. - -v, --version Lucene version to upgrade (default: 6.6.2, available: 6.6.2, 7.4.0) -EOF -} - -function upgrade_core() { - local INDEX_DIR=${1:?"usage: e.g.: /opt/ambari_infra_solr/data"} - local FORCE_UPDATE=${2:?"usage e.g.: true"} - local SOLR_CORE_FILTERS=${3:?"usage: e.g.: hadoop_logs,audit_logs,history"} - local LUCENE_VERSION=${4:?"usage e.g.: 7.4.0"} - local BACKUP_MODE=${5:?"usage e.g.: true"} - local DEBUG_MODE=${6:?"usage e.g.: true"} - SOLR_CORE_FILTER_ARR=$(echo $SOLR_CORE_FILTERS | sed "s/,/ /g") - - local version_prefix="$(echo $LUCENE_VERSION | head -c 1)" - local write_lock_exists="false" - local core_str="Core" - if [[ "$BACKUP_MODE" == "true" ]]; then - core_str="Snapshot" - fi - - local verbose="" - if [[ "$DEBUG_MODE" == "true" ]]; then - verbose="-verbose" - fi - - if [[ -f "$INDEX_DIR/write.lock" ]]; then - echo "Deleting $INDEX_DIR/write.lock file..." - write_lock_exists="true" - rm "$INDEX_DIR/write.lock" - fi - - for coll in $SOLR_CORE_FILTER_ARR; do - if [[ "$1" == *"$coll"* ]]; then - echo "$core_str '$1' dir name contains $coll (core filter)'"; - version=$(PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR/migrate/lucene-core-$LUCENE_VERSION.jar:$DIR/migrate/lucene-backward-codecs-$LUCENE_VERSION.jar" org.apache.lucene.index.CheckIndex -fast $1|grep " version="|sed -e 's/.*=//g'|head -1) - if [ -z $version ] ; then - echo "$core_str '$1' - Empty index?" - return - fi - majorVersion=$(echo $version|cut -c 1) - if [ $majorVersion -ge $version_prefix ] && [ $FORCE_UPDATE == "false" ] ; then - echo "$core_str '$1' - Already on version $version, not upgrading. Use -f or --force option to run upgrade anyway." - else - echo "$core_str '$1' - Index version is $version, upgrading ..." - echo "Run: PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR/migrate/lucene-core-$LUCENE_VERSION.jar:$DIR/migrate/lucene-backward-codecs-$LUCENE_VERSION.jar" org.apache.lucene.index.IndexUpgrader -delete-prior-commits $verbose $1" - PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR/migrate/lucene-core-$LUCENE_VERSION.jar:$DIR/migrate/lucene-backward-codecs-$LUCENE_VERSION.jar" org.apache.lucene.index.IndexUpgrader -delete-prior-commits $verbose $1 - echo "Upgrading core '$1' has finished" - fi - fi - done - - if [[ "$write_lock_exists" == "true" ]]; then - echo "Putting write.lock file back..." - touch "$INDEX_DIR/write.lock" - fi -} - -function upgrade_index() { - while [[ $# -gt 0 ]] - do - key="$1" - case $key in - -c|--core-filters) - local SOLR_CORE_FILTERS="$2" - shift 2 - ;; - -f|--force) - local FORCE_UPDATE="true" - shift - ;; - -b|--backup-enabled) - local BACKUP_ENABLED="true" - shift - ;; - -g|--debug) - local DEBUG_ENABLED="true" - shift - ;; - -d|--index-data-dir) - local INDEX_DIR="$2" - shift 2 - ;; - -v|--version) - local LUCENE_VERSION="$2" - shift 2 - ;; - *) - echo "Unknown option: $1" - exit 1 - ;; - esac - done - if [[ -z "$INDEX_DIR" ]] ; then - echo "Index data dirctory option is required (-d or --index-data-dir). Exiting..." - exit 1 - fi - - if [[ -z "$BACKUP_ENABLED" ]] ; then - BACKUP_ENABLED="false" - else - if [[ -z "$SOLR_CORE_FILTERS" ]]; then - SOLR_CORE_FILTERS="snapshot" - echo "NOTE: Use 'snapshot' as filter." - fi - fi - - if [[ -z "$SOLR_CORE_FILTERS" ]] ; then - SOLR_CORE_FILTERS="hadoop_logs,audit_logs,history" - echo "NOTE: Use 'hadoop_logs,audit_logs,history' as filte." - fi - - if [[ -z "$LUCENE_VERSION" ]] ; then - LUCENE_VERSION="6.6.2" - fi - - if [[ -z "$FORCE_UPDATE" ]] ; then - FORCE_UPDATE="false" - else - echo "NOTE: Forcing index upgrade is set." - fi - - if [[ -z "$DEBUG_ENABLED" ]] ; then - DEBUG_ENABLED="false" - else - echo "NOTE: Debug mode is enabled." - fi - - if [[ "$BACKUP_ENABLED" == "true" ]]; then - for SNAPSHOT_DIR in $(find $INDEX_DIR -maxdepth 1 -mindepth 1); do - if $(test -d ${SNAPSHOT_DIR}); then - abspath=$(cd "$(dirname "$SNAPSHOT_DIR")"; pwd)/$(basename "$SNAPSHOT_DIR") - echo "--------------------------------" - echo "Checking snapshot: $abspath" - upgrade_core "$abspath" "$FORCE_UPDATE" "$SOLR_CORE_FILTERS" "$LUCENE_VERSION" "$BACKUP_ENABLED" "$DEBUG_ENABLED" - fi; - done - else - CORES=$(for replica_dir in `find $INDEX_DIR -name data`; do dirname $replica_dir; done); - if [[ -z "$CORES" ]] ; then - echo "No indices found on path $INDEX_DIR" - else - for c in $CORES ; do - if find $c/data -maxdepth 1 -type d -name 'index*' 1> /dev/null 2>&1; then - name=$(echo $c | sed -e 's/.*\///g') - abspath=$(cd "$(dirname "$c")"; pwd)/$(basename "$c") - find $c/data -maxdepth 1 -type d -name 'index*' | while read indexDir; do - echo "--------------------------------" - echo "Checking core $name - $abspath" - upgrade_core "$indexDir" "$FORCE_UPDATE" "$SOLR_CORE_FILTERS" "$LUCENE_VERSION" "$BACKUP_ENABLED" "$DEBUG_ENABLED" - done - else - echo "No index folder found for $name" - fi - done - echo "DONE" - fi - fi -} - -function upgrade_index_tool() { - # see: https://cwiki.apache.org/confluence/display/solr/IndexUpgrader+Tool - : ${INDEX_VERSION:?"Please set the INDEX_VERSION variable! (6.6.2 or 7.4.0)"} - PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR/migrate/lucene-core-$INDEX_VERSION.jar:$DIR/migrate/lucene-backward-codecs-$INDEX_VERSION.jar" org.apache.lucene.index.IndexUpgrader ${@} -} - -function check_index_tool() { - : ${INDEX_VERSION:?"Please set the INDEX_VERSION variable! (6.6.2 or 7.4.0)"} - PATH=$JAVA_HOME/bin:$PATH $JVM -classpath "$DIR/migrate/lucene-core-$INDEX_VERSION.jar:$DIR/migrate/lucene-backward-codecs-$INDEX_VERSION.jar" org.apache.lucene.index.CheckIndex ${@} -} - -function main() { - command="$1" - case $command in - "upgrade-index") - upgrade_index "${@:2}" - ;; - "run-check-index-tool") - check_index_tool "${@:2}" - ;; - "run-upgrade-index-tool") - upgrade_index_tool "${@:2}" - ;; - "help") - print_help - ;; - *) - echo "Available commands: (upgrade-index | run-check-index-tool | run-upgrade-index-tool | help)" - ;; - esac -} - -main ${1+"$@"} diff --git a/ambari-infra/ambari-infra-solr-client/src/test/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientTest.java b/ambari-infra/ambari-infra-solr-client/src/test/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientTest.java deleted file mode 100644 index 44f3ec5cf91..00000000000 --- a/ambari-infra/ambari-infra-solr-client/src/test/java/org/apache/ambari/infra/solr/AmbariSolrCloudClientTest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr; - -import static org.easymock.EasyMock.anyString; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.createMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.util.NamedList; -import org.junit.Before; -import org.junit.Test; - -import java.util.Arrays; -import java.util.List; - -public class AmbariSolrCloudClientTest { - - private AmbariSolrCloudClient underTest; - - private CloudSolrClient mockedSolrClient; - - private SolrZkClient mockedSolrZkClient; - - private CollectionAdminResponse mockedResponse; - - @Before - public void setUp() { - AmbariSolrCloudClientBuilder builder = new AmbariSolrCloudClientBuilder(); - - mockedSolrClient = createMock(CloudSolrClient.class); - mockedSolrZkClient = createMock(SolrZkClient.class); - mockedResponse = createMock(CollectionAdminResponse.class); - - builder.solrCloudClient = mockedSolrClient; - builder.solrZkClient = mockedSolrZkClient; - - underTest = builder - .withZkConnectString("localhost1:2181,localhost2:2182") - .withCollection("collection1") - .withConfigSet("configSet") - .withShards(1) - .withReplication(1) - .withMaxShardsPerNode(2) - .withInterval(1) - .withRetry(2) - .withRouterName("routerName") - .withRouterField("routerField") - .build(); - } - - @Test - public void testCreateCollectionWhenCollectionDoesNotExist() throws Exception { - // GIVEN - NamedList namedList = new NamedList<>(); - namedList.add("collections", Arrays.asList("collection1", "collection2")); - - expect(mockedSolrClient.request(anyObject(CollectionAdminRequest.class), anyString())).andReturn(namedList).times(1); - replay(mockedSolrClient); - - // WHEN - String result = underTest.createCollection(); - // THEN - assertEquals("collection1", result); - verify(mockedSolrClient); - } - - @Test - public void testCreateCollectionWhenCollectionExists() throws Exception { - // GIVEN - NamedList namedList = new NamedList<>(); - namedList.add("collections", Arrays.asList("collection2", "collection3")); - - expect(mockedSolrClient.request(anyObject(CollectionAdminRequest.class), anyString())).andReturn(namedList).times(2); - replay(mockedSolrClient); - - // WHEN - String result = underTest.createCollection(); - // THEN - assertEquals("collection1", result); - verify(mockedSolrClient); - } - - @Test - public void testListCollections() throws Exception { - // GIVEN - NamedList namedList = new NamedList<>(); - namedList.add("collections", Arrays.asList("collection1", "collection2")); - - expect(mockedSolrClient.request(anyObject(CollectionAdminRequest.class), anyString())).andReturn(namedList); - - replay(mockedSolrClient); - // WHEN - List result = underTest.listCollections(); - - // THEN - assertTrue(result.contains("collection1")); - assertTrue(result.contains("collection2")); - assertEquals(2, result.size()); - } - - @Test(expected = AmbariSolrCloudClientException.class) - public void testRetries() throws Exception { - // GIVEN - expect(mockedSolrClient.request(anyObject(CollectionAdminRequest.class), anyString())).andThrow(new RuntimeException("ex")).times(2); - replay(mockedSolrClient); - // WHEN - underTest.listCollections(); - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/.gitignore b/ambari-infra/ambari-infra-solr-plugin/.gitignore deleted file mode 100644 index 379dea75037..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -Profile -.env -ambari-infra-solr-*.rpm \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-plugin/docker/Dockerfile b/ambari-infra/ambari-infra-solr-plugin/docker/Dockerfile deleted file mode 100644 index d1835106a36..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/docker/Dockerfile +++ /dev/null @@ -1,46 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM centos:centos6 - - -RUN yum clean all -y && yum update -y -RUN yum -y install vim wget rpm-build sudo which telnet tar openssh-server openssh-clients ntp git httpd lsof - -ENV HOME /root - -#Install JAVA -ENV JAVA_VERSION 8u131 -ENV BUILD_VERSION b11 -RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm -RUN rpm -ivh jdk-8-linux-x64.rpm -ENV JAVA_HOME /usr/java/default/ - - -#Install Solr -ADD ambari-infra-solr-2.0.0.0-SNAPSHOT.noarch.rpm /root/ambari-infra-solr.rpm -RUN rpm -ivh /root/ambari-infra-solr.rpm - -RUN mkdir -p /root/solr_index/data -ENV SOLR_HOME /root/solr_index/data -ADD solr.xml /root/solr_index/data/solr.xml - -ENV PATH $PATH:$JAVA_HOME/bin:/usr/lib/ambari-infra-solr/bin - -#Enable G1 GC -#ENV GC_TUNE="-XX:+UseG1GC -XX:+PerfDisableSharedMem -XX:+ParallelRefProcEnabled -XX:G1HeapRegionSize=3m -XX:MaxGCPauseMillis=250 -XX:InitiatingHeapOccupancyPercent=75 -XX:+UseLargePages -XX:+AggressiveOpts" - -# Start in debug mode -#ENV SOLR_OPTS -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005 - -WORKDIR /root -CMD /usr/lib/ambari-infra-solr/bin/solr start -force -f diff --git a/ambari-infra/ambari-infra-solr-plugin/docker/docker-compose.yml b/ambari-infra/ambari-infra-solr-plugin/docker/docker-compose.yml deleted file mode 100644 index 98469383657..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/docker/docker-compose.yml +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - zookeeper: - image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10} - restart: always - hostname: zookeeper - networks: - - infra-network - ports: - - 2181:2181 - environment: - ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=zookeeper:2888:3888 - solr: - image: ambari-infra-solr:v1.0 - restart: always - hostname: solr - ports: - - "8983:8983" - - "5005:5005" - networks: - - infra-network - env_file: - - Profile - entrypoint: - - solr - - start - - "-f" - - "-force" - - "-c" - - "-z" - - ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION/ambari-logsearch/ambari-logsearch-server/src/main/configsets:/usr/lib/ambari-infra-solr/server/solr/configsets - extra_hosts: - - metrics_collector:$DOCKERIP -networks: - infra-network: - driver: bridge - diff --git a/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh b/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh deleted file mode 100755 index 502d87a642d..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr-docker-compose.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -sdir="`dirname \"$0\"`" -: ${1:?"argument is missing: (start|stop|create_collection)"} -command="$1" - -function start_containers() { - check_env_files - kill_containers - pushd $sdir/../ - local AMBARI_SOLR_MANAGER_LOCATION=$(pwd) - echo $AMBARI_SOLR_MANAGER_LOCATION - cd $AMBARI_SOLR_MANAGER_LOCATION/docker - echo "Start containers ..." - docker-compose up -d - popd - echo "Containers started" -} - -function check_env_files() { - local count=0; - - check_env_file .env setup_env - count=$((count + $?)); - check_env_file Profile setup_profile - count=$((count + $?)); - - if [[ "$count" -gt 0 ]] - then - echo "Exit" - exit; - fi -} - -function check_env_file() { - if [ -f "$sdir/$1" ]; - then - echo "$1 file exists" - return 0; - else - echo "$1 file does not exist, Creating a new one..." - $2 - echo "$1 file has been created. Check it out before starting Ambari Infra Manager. ($sdir/$1)" - return 1; - fi -} - -function setup_env() { - pushd $sdir/../../ - local AMBARI_LOCATION=$(pwd) - popd - local docker_ip=$(get_docker_ip) - cat << EOF > $sdir/.env -DOCKERIP=$docker_ip -MAVEN_REPOSITORY_LOCATION=$HOME/.m2 -AMBARI_LOCATION=$AMBARI_LOCATION - -ZOOKEEPER_VERSION=3.4.10 -ZOOKEEPER_CONNECTION_STRING=zookeeper:2181 - -SOLR_VERSION=7.4.0 -EOF -} - -function get_docker_ip() { - local ip=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}') - echo $ip -} - -function setup_profile() { - cat << EOF > $sdir/Profile -EOF -} - -function kill_containers() { - pushd $sdir/../ - local AMBARI_SOLR_MANAGER_LOCATION=$(pwd) - echo "Try to remove containers if exists ..." - echo $AMBARI_SOLR_MANAGER_LOCATION - cd $AMBARI_SOLR_MANAGER_LOCATION/docker - docker-compose rm -f -s solr - docker-compose rm -f -s zookeeper - popd -} - -function create_collection() { - pushd $sdir/../ - local AMBARI_SOLR_MANAGER_LOCATION=$(pwd) - cd $AMBARI_SOLR_MANAGER_LOCATION/docker - docker exec docker_solr_1 solr create_collection -force -c hadoop_logs -d /usr/lib/ambari-infra-solr/server/solr/configsets/hadoop_logs/conf -n hadoop_logs_conf - docker exec docker_solr_1 solr create_collection -force -c audit_logs -d /usr/lib/ambari-infra-solr/server/solr/configsets/audit_logs/conf -n audit_logs_conf - popd -} - -case $command in - "start") - start_containers - ;; - "create_collection") - create_collection - ;; - "stop") - kill_containers - ;; - *) - echo "Available commands: (start|stop|create_collection)" - ;; -esac diff --git a/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr.sh b/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr.sh deleted file mode 100755 index bb5a6bd054e..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/docker/infra-solr.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -sdir="`dirname \"$0\"`" -: ${1:?"argument is missing: (build|build-docker|start|stop)"} -command="$1" - -function build() { - pushd $sdir/.. - mvn clean install - popd - - pushd $sdir/../../ambari-infra-assembly - mvn clean install -Dbuild-rpm - popd - - build_docker -} - -function build_docker() { - pushd $sdir - cp ././../../ambari-infra-assembly/target/rpm/ambari-infra-solr/RPMS/noarch/ambari-infra-solr-2.0.0.0-SNAPSHOT.noarch.rpm . - docker build -t ambari-infra-solr:v1.0 . - popd -} - -function start() { - pushd $sdir - docker run -d -p 8983:8983 -p 5005:5005 --name infra-solr ambari-infra-solr:v1.0 - popd -} - -function stop() { - pushd $sdir - docker kill infra-solr - docker rm infra-solr - popd -} - -case $command in - "build") - build - ;; - "build-docker") - build_docker - ;; - "start") - start - ;; - "stop") - stop - ;; - *) - echo "Available commands: (build|build-docker|start|stop)" - ;; -esac diff --git a/ambari-infra/ambari-infra-solr-plugin/docker/solr.xml b/ambari-infra/ambari-infra-solr-plugin/docker/solr.xml deleted file mode 100644 index 65aa8a41e19..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/docker/solr.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - - ${host:} - ${jetty.port:} - ${hostContext:solr} - ${zkClientTimeout:15000} - ${genericCoreNodeNames:true} - - - - 5 - metrics_collector - 6188 - http - threads.count - threads.deadlock.count - memory.heap.used - memory.heap.max - memory.non-heap.used - memory.non-heap.max - memory.pools.CMS-Old-Gen.used - memory.pools.CMS-Old-Gen.max - gc.ConcurrentMarkSweep.count - gc.ConcurrentMarkSweep.time - gc.ParNew.count - gc.ParNew.time - memory.pools.Metaspace.used - memory.pools.Metaspace.max - memory.pools.Par-Eden-Space.used - memory.pools.Par-Eden-Space.max - memory.pools.Par-Survivor-Space.used - memory.pools.Par-Survivor-Space.max - gc.G1-Old-Generation.count - gc.G1-Old-Generation.time - gc.G1-Young-Generation.count - gc.G1-Young-Generation.time - memory.pools.G1-Eden-Space.used - memory.pools.G1-Eden-Space.max - memory.pools.G1-Survivor-Space.used - memory.pools.G1-Survivor-Space.max - os.processCpuLoad - os.systemCpuLoad - os.openFileDescriptorCount - - - - - 5 - metrics_collector - 6188 - http - UPDATE.updateHandler.adds - UPDATE.updateHandler.deletesById - UPDATE.updateHandler.errors - UPDATE.updateHandler.docsPending - QUERY./select.requests - QUERY./select.requestTimes - UPDATE./update.requests - UPDATE./update.requestTimes - QUERY./get.requests - QUERY./get.requestTimes - ADMIN./admin/luke.requests - ADMIN./admin/luke.requestTimes - QUERY./query.requests - QUERY./query.requestTimes - INDEX.sizeInBytes - - - - - 5 - metrics_collector - 6188 - http - CACHE.searcher.filterCache - CACHE.searcher.queryResultCache - CACHE.searcher.documentCache - hitratio, size, warmupTime - - - - - 5 - metrics_collector - 6188 - http - CACHE.core.fieldCache - entries_count - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-plugin/pom.xml b/ambari-infra/ambari-infra-solr-plugin/pom.xml deleted file mode 100644 index c8e84d5ceb4..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/pom.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - - - ambari-infra - org.apache.ambari - 2.0.0.0-SNAPSHOT - - Ambari Infra Solr Plugin - http://maven.apache.org - 4.0.0 - ambari-infra-solr-plugin - - - org.apache.solr - solr-core - ${solr.version} - - - org.apache.ambari - ambari-metrics-common - 2.0.0.0-SNAPSHOT - - - org.apache.solr - solr-test-framework - ${solr.version} - test - - - - - - src/main/resources - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.3 - - ${jdk.version} - ${jdk.version} - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - copy-dependencies - package - - copy-dependencies - - - true - ${basedir}/target/libs - false - false - true - runtime - - - - - - - \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSCacheReporter.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSCacheReporter.java deleted file mode 100644 index e25b80effda..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSCacheReporter.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import static org.apache.ambari.infra.solr.metrics.reporters.MetricsUtils.toTimelineMetric; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.apache.solr.metrics.SolrMetricManager; - -public class AMSCacheReporter extends AMSReporter> { - - private List fields; - - public AMSCacheReporter(SolrMetricManager metricManager, String registryName) { - super(metricManager, registryName); - } - - public void setFields(String fields) { - this.fields = Arrays.stream(fields.split(",")).map(String::trim).collect(Collectors.toList()); - } - - @Override - protected GaugeConverter> gaugeConverter() { - return (metricName, gauge, currentMillis) -> fields.stream() - .map(field -> toTimelineMetric( - String.format("%s.%s", metricName, field), - gauge.getValue().get(field).doubleValue(), - currentMillis)) - .collect(Collectors.toList()); - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSProtocol.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSProtocol.java deleted file mode 100644 index 404fc5c5756..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSProtocol.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -public enum AMSProtocol { - http, - https -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSReporter.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSReporter.java deleted file mode 100644 index 6e31fd473d3..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/AMSReporter.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import static org.apache.ambari.infra.solr.metrics.reporters.AMSProtocol.https; -import static org.apache.commons.lang.StringUtils.isBlank; - -import java.util.concurrent.TimeUnit; - -import org.apache.solr.metrics.FilteringSolrMetricReporter; -import org.apache.solr.metrics.SolrMetricManager; - -public abstract class AMSReporter extends FilteringSolrMetricReporter { - - private ScheduledAMSReporter reporter; - - private String amsCollectorHosts; - private int amsCollectorPort; - private AMSProtocol amsCollectorProtocol; - private String trustStoreLocation; - private String trustStoreType; - private String trustStorePassword; - - public AMSReporter(SolrMetricManager metricManager, String registryName) { - super(metricManager, registryName); - } - - protected abstract GaugeConverter gaugeConverter(); - - @Override - protected void doInit() { - reporter = new ScheduledAMSReporterBuilder() - .setRegistry(metricManager.registry(registryName)) - .setRateUnit(TimeUnit.SECONDS) - .setDurationUnit(TimeUnit.MILLISECONDS) - .setFilter(newMetricFilter()) - .setAMSClient(new SolrMetricsSink(amsCollectorHosts.split(","), amsCollectorPort, amsCollectorProtocol, - new SolrMetricsSecurityConfig(trustStoreLocation, trustStoreType, trustStorePassword))) - .setGaugeConverter(gaugeConverter()) - .setRegistryName(registryName) - .build(); - reporter.start(period, TimeUnit.SECONDS); - } - - @Override - protected void validate() throws IllegalStateException { - if (period < 1) { - throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1."); - } - if (https == amsCollectorProtocol) { - if (isBlank(trustStoreLocation)) - throw new IllegalStateException("If amsCollectorProtocol is https then trustStoreLocation can not be null or empty!"); - if (isBlank(trustStoreType)) - throw new IllegalStateException("If amsCollectorProtocol is https then trustStoreType can not be null or empty!"); - } - } - - @Override - public void close() { - if (reporter != null) { - reporter.close(); - } - } - - public void setAmsCollectorHosts(String amsCollectorHosts) { - this.amsCollectorHosts = amsCollectorHosts; - } - - public void setAmsCollectorPort(int amsCollectorPort) { - this.amsCollectorPort = amsCollectorPort; - } - - public void setAmsCollectorProtocol(String amsCollectorProtocol) { - this.amsCollectorProtocol = AMSProtocol.valueOf(amsCollectorProtocol); - } - - public void setTrustStoreLocation(String trustStoreLocation) { - this.trustStoreLocation = trustStoreLocation; - } - - public void setTrustStoreType(String trustStoreType) { - this.trustStoreType = trustStoreType; - } - - public void setTrustStorePassword(String trustStorePassword) { - this.trustStorePassword = trustStorePassword; - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/GaugeConverter.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/GaugeConverter.java deleted file mode 100644 index 31d452b99ec..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/GaugeConverter.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import java.util.Collection; - -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; - -import com.codahale.metrics.Gauge; - -public interface GaugeConverter { - Collection convert(String metricName, Gauge gauge, long currentMillis); -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java deleted file mode 100644 index 525c41924f0..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/MetricsUtils.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.TreeMap; - -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MetricsUtils { - private static final Logger LOG = LoggerFactory.getLogger(MetricsUtils.class); - private static final String APPID = "ambari-infra-solr"; - public static final String NAME_PREFIX = "infra."; - - private static final String hostName = initHostName(); - - private static String initHostName() { - String hostName = null; - try { - InetAddress ip = InetAddress.getLocalHost(); - String ipAddress = ip.getHostAddress(); - String ipHostName = ip.getHostName(); - String canonicalHostName = ip.getCanonicalHostName(); - if (!canonicalHostName.equalsIgnoreCase(ipAddress)) { - LOG.info("Using InetAddress.getCanonicalHostName()={}", canonicalHostName); - hostName = canonicalHostName; - } else { - LOG.info("Using InetAddress.getHostName()={}", ipHostName); - hostName = ipHostName; - } - LOG.info("ipAddress={}, ipHostName={}, canonicalHostName={}, hostName={}", ipAddress, ipHostName, canonicalHostName, hostName); - } catch (UnknownHostException e) { - LOG.error("Error getting hostname.", e); - } - - return hostName; - } - - public static String getHostName() { - return hostName; - } - - public static TimelineMetric toTimelineMetric(String name, double value, long currentMillis) { - TimelineMetric timelineMetric = newTimelineMetric(); - timelineMetric.setMetricName(name); - timelineMetric.setStartTime(currentMillis); - timelineMetric.setType("Long"); - TreeMap metricValues = new TreeMap<>(); - metricValues.put(currentMillis, value); - timelineMetric.setMetricValues(metricValues); - return timelineMetric; - } - - private static TimelineMetric newTimelineMetric() { - TimelineMetric timelineMetric = new TimelineMetric(); - timelineMetric.setAppId(APPID); - timelineMetric.setHostName(getHostName()); - return timelineMetric; - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java deleted file mode 100644 index 9a837faa91c..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporter.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import static org.apache.ambari.infra.solr.metrics.reporters.MetricsUtils.NAME_PREFIX; -import static org.apache.ambari.infra.solr.metrics.reporters.MetricsUtils.toTimelineMetric; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.SortedMap; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.codahale.metrics.Counter; -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Histogram; -import com.codahale.metrics.Meter; -import com.codahale.metrics.MetricAttribute; -import com.codahale.metrics.MetricFilter; -import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.ScheduledReporter; -import com.codahale.metrics.Snapshot; -import com.codahale.metrics.Timer; - -public class ScheduledAMSReporter extends ScheduledReporter { - - private static final Logger LOG = LoggerFactory.getLogger(ScheduledAMSReporter.class); - - private final SolrMetricsSink amsClient; - private final String namePrefix; - private final GaugeConverter gaugeConverter; - - protected ScheduledAMSReporter(String registryName, - MetricRegistry registry, - String name, - MetricFilter filter, - TimeUnit rateUnit, - TimeUnit durationUnit, - ScheduledExecutorService executor, - boolean shutdownExecutorOnStop, Set disabledMetricAttributes, - SolrMetricsSink amsClient, - GaugeConverter gaugeConverter) { - super(registry, name, filter, rateUnit, durationUnit, executor, shutdownExecutorOnStop, disabledMetricAttributes); - this.amsClient = amsClient; - namePrefix = String.format("%s%s.", NAME_PREFIX, registryName); - this.gaugeConverter = gaugeConverter; - } - - @Override - public void report(SortedMap gauges, - SortedMap counters, - SortedMap histograms, - SortedMap meters, - SortedMap timers) { - try { - long currentMillis = System.currentTimeMillis(); - List timelineMetricList = new ArrayList<>(); - gauges.forEach((metricName, gauge) -> - addTimelineMetrics(namePrefix + metricName, gauge, currentMillis, timelineMetricList)); - counters.forEach((metricName, counter) -> - timelineMetricList.add(toTimelineMetric(namePrefix + metricName, counter.getCount(), currentMillis))); - timers.forEach((metricName, timer) -> - addTimelineMetrics(namePrefix + metricName, timer, currentMillis, timelineMetricList)); - - if (timelineMetricList.isEmpty()) - return; - - TimelineMetrics timelineMetrics = new TimelineMetrics(); - timelineMetrics.setMetrics(timelineMetricList); - amsClient.emitMetrics(timelineMetrics); - } - catch (Exception ex) { - LOG.error("Unable to collect and send metrics", ex); - } - } - - private void addTimelineMetrics(String metricName, Gauge gauge, long currentMillis, List timelineMetricList) { - try { - timelineMetricList.addAll(gaugeConverter.convert(metricName, gauge, currentMillis)); - } catch (Exception ex) { - LOG.error("Unable to get value of gauge metric " + metricName, ex); - } - } - - private void addTimelineMetrics(String metricName, Timer timer, long currentTime, List timelineMetricList) { - try { - timelineMetricList.add(toTimelineMetric(metricName + ".avgRequestsPerSecond", timer.getMeanRate(), currentTime)); - Snapshot snapshot = timer.getSnapshot(); - timelineMetricList.add(toTimelineMetric(metricName + ".avgTimePerRequest", snapshot.getMean(), currentTime)); - timelineMetricList.add(toTimelineMetric(metricName + ".medianRequestTime", snapshot.getMedian(), currentTime)); - } catch (Exception ex) { - LOG.error("Unable to get value of timer metric " + metricName, ex); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporterBuilder.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporterBuilder.java deleted file mode 100644 index b76325202dd..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/ScheduledAMSReporterBuilder.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import com.codahale.metrics.MetricAttribute; -import com.codahale.metrics.MetricFilter; -import com.codahale.metrics.MetricRegistry; - -public class ScheduledAMSReporterBuilder { - private String registryName; - private MetricRegistry registry; - private String name; - private MetricFilter filter; - private TimeUnit rateUnit; - private TimeUnit durationUnit; - private ScheduledExecutorService executor; - private boolean shutdownExecutorOnStop; - private Set disabledMetricAttributes; - private SolrMetricsSink amsClient; - private GaugeConverter gaugeConverter; - - public ScheduledAMSReporterBuilder setRegistryName(String name) { - this.registryName = name; - return this; - } - - public ScheduledAMSReporterBuilder setRegistry(MetricRegistry registry) { - this.registry = registry; - return this; - } - - public ScheduledAMSReporterBuilder setName(String name) { - this.name = name; - return this; - } - - public ScheduledAMSReporterBuilder setFilter(MetricFilter filter) { - this.filter = filter; - return this; - } - - public ScheduledAMSReporterBuilder setRateUnit(TimeUnit rateUnit) { - this.rateUnit = rateUnit; - return this; - } - - public ScheduledAMSReporterBuilder setDurationUnit(TimeUnit durationUnit) { - this.durationUnit = durationUnit; - return this; - } - - public ScheduledAMSReporterBuilder setExecutor(ScheduledExecutorService executor) { - this.executor = executor; - return this; - } - - public ScheduledAMSReporterBuilder setShutdownExecutorOnStop(boolean shutdownExecutorOnStop) { - this.shutdownExecutorOnStop = shutdownExecutorOnStop; - return this; - } - - public ScheduledAMSReporterBuilder setDisabledMetricAttributes(Set disabledMetricAttributes) { - this.disabledMetricAttributes = disabledMetricAttributes; - return this; - } - - public ScheduledAMSReporterBuilder setAMSClient(SolrMetricsSink amsClient) { - this.amsClient = amsClient; - return this; - } - - public ScheduledAMSReporterBuilder setGaugeConverter(GaugeConverter gaugeConverter) { - this.gaugeConverter = gaugeConverter; - return this; - } - - public ScheduledAMSReporter build() { - return new ScheduledAMSReporter<>(registryName, registry, name, filter, rateUnit, durationUnit, executor, - shutdownExecutorOnStop, disabledMetricAttributes, amsClient, gaugeConverter); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SimpleAMSReporter.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SimpleAMSReporter.java deleted file mode 100644 index af6fd15da64..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SimpleAMSReporter.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import static java.util.Collections.singletonList; -import static org.apache.ambari.infra.solr.metrics.reporters.MetricsUtils.toTimelineMetric; - -import org.apache.solr.metrics.SolrMetricManager; - -public class SimpleAMSReporter extends AMSReporter { - - public SimpleAMSReporter(SolrMetricManager metricManager, String registryName) { - super(metricManager, registryName); - } - - @Override - protected GaugeConverter gaugeConverter() { - return (metricName, gauge, currentMillis) -> - singletonList(toTimelineMetric(metricName, gauge.getValue().doubleValue(), currentMillis)); - } - -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSecurityConfig.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSecurityConfig.java deleted file mode 100644 index 247bd33bab3..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSecurityConfig.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -public class SolrMetricsSecurityConfig { - - private final String trustStoreLocation; - private final String trustStoreType; - private final String trustStorePassword; - - public SolrMetricsSecurityConfig(String trustStoreLocation, String trustStoreType, String trustStorePassword) { - this.trustStoreLocation = trustStoreLocation; - this.trustStoreType = trustStoreType; - this.trustStorePassword = trustStorePassword; - } - - public String getTrustStoreLocation() { - return trustStoreLocation; - } - - public String getTrustStoreType() { - return trustStoreType; - } - - public String getTrustStorePassword() { - return trustStorePassword; - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java deleted file mode 100644 index 201c7977867..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/solr/metrics/reporters/SolrMetricsSink.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.infra.solr.metrics.reporters; - -import static java.util.Arrays.asList; -import static org.apache.ambari.infra.solr.metrics.reporters.AMSProtocol.https; -import static org.apache.commons.lang.StringUtils.join; - -import java.util.Collection; - -import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SolrMetricsSink extends AbstractTimelineMetricsSink { - private static final Logger LOG = LoggerFactory.getLogger(SolrMetricsSink.class); - - private final Collection collectorHosts; - private final int port; - private final AMSProtocol protocol; - - public SolrMetricsSink(String[] collectorHosts, int port, AMSProtocol protocol, SolrMetricsSecurityConfig securityConfig) { - LOG.info("Setting up SolrMetricsSink protocol={} hosts={} port={}", protocol.name(), join(collectorHosts, ","), port); - this.collectorHosts = asList(collectorHosts); - this.port = port; - this.protocol = protocol; - - if (protocol == https) - loadTruststore(securityConfig.getTrustStoreLocation(), securityConfig.getTrustStoreType(), securityConfig.getTrustStorePassword()); - } - - @Override - protected String getCollectorUri(String host) { - return constructTimelineMetricUri(this.protocol.name(), host, getCollectorPort()); - } - - @Override - protected String getCollectorProtocol() { - return protocol.name(); - } - - @Override - protected String getCollectorPort() { - return Integer.toString(port); - } - - @Override - protected int getTimeoutSeconds() { - return 0; - } - - @Override - protected String getZookeeperQuorum() { - return null; - } - - @Override - protected Collection getConfiguredCollectorHosts() { - return collectorHosts; - } - - @Override - protected String getHostname() { - return MetricsUtils.getHostName(); - } - - @Override - protected boolean isHostInMemoryAggregationEnabled() { - return false; - } - - @Override - protected int getHostInMemoryAggregationPort() { - return 0; - } - - @Override - protected String getHostInMemoryAggregationProtocol() { - return protocol.name(); - } - - @Override - public boolean emitMetrics(TimelineMetrics metrics) { - return super.emitMetrics(metrics); - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraKerberosHostValidator.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraKerberosHostValidator.java deleted file mode 100644 index 606adcaf741..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraKerberosHostValidator.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.hadoop.security.authentication.util.KerberosName; - -import java.security.Principal; -import java.util.Map; -import java.util.Set; - -/** - * Validate that the user has the right access based on the hostname in the kerberos principal - */ -public class InfraKerberosHostValidator { - - public boolean validate(Principal principal, Map> userVsHosts, Map userVsHostRegex) { - if (principal instanceof AuthenticationToken) { - AuthenticationToken authenticationToken = (AuthenticationToken) principal; - KerberosName kerberosName = new KerberosName(authenticationToken.getName()); - String hostname = kerberosName.getHostName(); - String serviceUserName = kerberosName.getServiceName(); - if (MapUtils.isNotEmpty(userVsHostRegex)) { - String regex = userVsHostRegex.get(serviceUserName); - return hostname.matches(regex); - } - if (MapUtils.isNotEmpty(userVsHosts)) { - Set hosts = userVsHosts.get(serviceUserName); - if (CollectionUtils.isNotEmpty(hosts)) { - return hosts.contains(hostname); - } - } - } - return true; - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraRuleBasedAuthorizationPlugin.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraRuleBasedAuthorizationPlugin.java deleted file mode 100644 index cc55d7db3de..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraRuleBasedAuthorizationPlugin.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.security.Principal; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Function; - -import org.apache.solr.common.util.CommandOperation; -import org.apache.solr.common.util.Utils; -import org.apache.solr.common.util.ValidatingJsonMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static java.util.Arrays.asList; -import static java.util.Collections.singleton; -import static java.util.Collections.unmodifiableMap; -import static java.util.function.Function.identity; -import static java.util.stream.Collectors.toMap; -import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue; -import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue; - -/** - * Modified copy of solr.RuleBasedAuthorizationPlugin to handle role - permission mappings with KereberosPlugin - * Added 2 new JSON map: (precedence: user-host-regex > user-host) - * 1. "user-host": user host mappings (array) for hostname validation - * 2. "user-host-regex": user host regex mapping (string) for hostname validation - */ -public class InfraRuleBasedAuthorizationPlugin extends RuleBasedAuthorizationPlugin { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final Map> usersVsRoles = new HashMap<>(); - private final Map mapping = new HashMap<>(); - private final List permissions = new ArrayList<>(); - private final Map> userVsHosts = new HashMap<>(); - private final Map userVsHostRegex = new HashMap<>(); - - private InfraKerberosHostValidator infraKerberosHostValidator = new InfraKerberosHostValidator(); - private InfraUserRolesLookupStrategy infraUserRolesLookupStrategy = new InfraUserRolesLookupStrategy(); - - - private static class WildCardSupportMap extends HashMap> { - final Set wildcardPrefixes = new HashSet<>(); - - @Override - public List put(String key, List value) { - if (key != null && key.endsWith("/*")) { - key = key.substring(0, key.length() - 2); - wildcardPrefixes.add(key); - } - return super.put(key, value); - } - - @Override - public List get(Object key) { - List result = super.get(key); - if (key == null || result != null) return result; - if (!wildcardPrefixes.isEmpty()) { - for (String s : wildcardPrefixes) { - if (key.toString().startsWith(s)) { - List l = super.get(s); - if (l != null) { - result = result == null ? new ArrayList<>() : new ArrayList<>(result); - result.addAll(l); - } - } - } - } - return result; - } - } - - @Override - public AuthorizationResponse authorize(AuthorizationContext context) { - List collectionRequests = context.getCollectionRequests(); - if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) { - InfraRuleBasedAuthorizationPlugin.MatchStatus flag = checkCollPerm(mapping.get(null), context); - return flag.rsp; - } - - for (AuthorizationContext.CollectionRequest collreq : collectionRequests) { - //check permissions for each collection - InfraRuleBasedAuthorizationPlugin.MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context); - if (flag != InfraRuleBasedAuthorizationPlugin.MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp; - } - //check wildcard (all=*) permissions. - InfraRuleBasedAuthorizationPlugin.MatchStatus flag = checkCollPerm(mapping.get("*"), context); - return flag.rsp; - } - - private InfraRuleBasedAuthorizationPlugin.MatchStatus checkCollPerm(Map> pathVsPerms, - AuthorizationContext context) { - if (pathVsPerms == null) return InfraRuleBasedAuthorizationPlugin.MatchStatus.NO_PERMISSIONS_FOUND; - - String path = context.getResource(); - InfraRuleBasedAuthorizationPlugin.MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context); - if (flag != InfraRuleBasedAuthorizationPlugin.MatchStatus.NO_PERMISSIONS_FOUND) return flag; - return checkPathPerm(pathVsPerms.get(null), context); - } - - private InfraRuleBasedAuthorizationPlugin.MatchStatus checkPathPerm(List permissions, AuthorizationContext context) { - if (permissions == null || permissions.isEmpty()) return InfraRuleBasedAuthorizationPlugin.MatchStatus.NO_PERMISSIONS_FOUND; - Principal principal = context.getUserPrincipal(); - loopPermissions: - for (int i = 0; i < permissions.size(); i++) { - Permission permission = permissions.get(i); - if (PermissionNameProvider.values.containsKey(permission.name)) { - if (context.getHandler() instanceof PermissionNameProvider) { - PermissionNameProvider handler = (PermissionNameProvider) context.getHandler(); - PermissionNameProvider.Name permissionName = handler.getPermissionName(context); - if (permissionName == null || !permission.name.equals(permissionName.name)) { - continue; - } - } else { - //all is special. it can match any - if(permission.wellknownName != PermissionNameProvider.Name.ALL) continue; - } - } else { - if (permission.method != null && !permission.method.contains(context.getHttpMethod())) { - //this permissions HTTP method does not match this rule. try other rules - continue; - } - if (permission.params != null) { - for (Map.Entry> e : permission.params.entrySet()) { - String[] paramVal = context.getParams().getParams(e.getKey()); - if(!e.getValue().apply(paramVal)) continue loopPermissions; - } - } - } - - if (permission.role == null) { - //no role is assigned permission.That means everybody is allowed to access - return InfraRuleBasedAuthorizationPlugin.MatchStatus.PERMITTED; - } - if (principal == null) { - log.info("request has come without principal. failed permission {} ",permission); - //this resource needs a principal but the request has come without - //any credential. - return InfraRuleBasedAuthorizationPlugin.MatchStatus.USER_REQUIRED; - } else if (permission.role.contains("*")) { - return InfraRuleBasedAuthorizationPlugin.MatchStatus.PERMITTED; - } - - for (String role : permission.role) { - Set userRoles = infraUserRolesLookupStrategy.getUserRolesFromPrincipal(usersVsRoles, principal); - boolean validHostname = infraKerberosHostValidator.validate(principal, userVsHosts, userVsHostRegex); - if (!validHostname) { - log.warn("Hostname is not valid for principal {}", principal); - return MatchStatus.FORBIDDEN; - } - if (userRoles != null && userRoles.contains(role)) return MatchStatus.PERMITTED; - } - log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", permission, principal); - return InfraRuleBasedAuthorizationPlugin.MatchStatus.FORBIDDEN; - } - log.debug("No permissions configured for the resource {} . So allowed to access", context.getResource()); - return InfraRuleBasedAuthorizationPlugin.MatchStatus.NO_PERMISSIONS_FOUND; - } - - @Override - public void init(Map initInfo) { - mapping.put(null, new InfraRuleBasedAuthorizationPlugin.WildCardSupportMap()); - Map map = getMapValue(initInfo, "user-role"); - for (Object o : map.entrySet()) { - Map.Entry e = (Map.Entry) o; - String roleName = (String) e.getKey(); - usersVsRoles.put(roleName, Permission.readValueAsSet(map, roleName)); - } - List perms = getListValue(initInfo, "permissions"); - for (Map o : perms) { - Permission p; - try { - p = Permission.load(o); - } catch (Exception exp) { - log.error("Invalid permission ", exp); - continue; - } - permissions.add(p); - add2Mapping(p); - // adding user-host - Map userHostsMap = getMapValue(initInfo, "user-host"); - for (Object userHost : userHostsMap.entrySet()) { - Map.Entry e = (Map.Entry) userHost; - String roleName = (String) e.getKey(); - userVsHosts.put(roleName, readValueAsSet(userHostsMap, roleName)); - } - // adding user-host-regex - Map userHostRegexMap = getMapValue(initInfo, "user-host-regex"); - for (Map.Entry entry : userHostRegexMap.entrySet()) { - userVsHostRegex.put(entry.getKey(), entry.getValue().toString()); - } - } - } - - /** - * read a key value as a set. if the value is a single string , - * return a singleton set - * - * @param m the map from which to lookup - * @param key the key with which to do lookup - */ - static Set readValueAsSet(Map m, String key) { - Set result = new HashSet<>(); - Object val = m.get(key); - if (val == null) { - if("collection".equals(key)){ - //for collection collection: null means a core admin/ collection admin request - // otherwise it means a request where collection name is ignored - return m.containsKey(key) ? singleton((String) null) : singleton("*"); - } - return null; - } - if (val instanceof Collection) { - Collection list = (Collection) val; - for (Object o : list) result.add(String.valueOf(o)); - } else if (val instanceof String) { - result.add((String) val); - } else { - throw new RuntimeException("Bad value for : " + key); - } - return result.isEmpty() ? null : Collections.unmodifiableSet(result); - } - - //this is to do optimized lookup of permissions for a given collection/path - private void add2Mapping(Permission permission) { - for (String c : permission.collections) { - InfraRuleBasedAuthorizationPlugin.WildCardSupportMap m = mapping.get(c); - if (m == null) mapping.put(c, m = new InfraRuleBasedAuthorizationPlugin.WildCardSupportMap()); - for (String path : permission.path) { - List perms = m.get(path); - if (perms == null) m.put(path, perms = new ArrayList<>()); - perms.add(permission); - } - } - } - - - @Override - public void close() throws IOException { } - - enum MatchStatus { - USER_REQUIRED(AuthorizationResponse.PROMPT), - NO_PERMISSIONS_FOUND(AuthorizationResponse.OK), - PERMITTED(AuthorizationResponse.OK), - FORBIDDEN(AuthorizationResponse.FORBIDDEN); - - final AuthorizationResponse rsp; - - MatchStatus(AuthorizationResponse rsp) { - this.rsp = rsp; - } - } - - - - @Override - public Map edit(Map latestConf, List commands) { - for (CommandOperation op : commands) { - AutorizationEditOperation operation = ops.get(op.name); - if (operation == null) { - op.unknownOperation(); - return null; - } - latestConf = operation.edit(latestConf, op); - if (latestConf == null) return null; - - } - return latestConf; - } - - private static final Map ops = unmodifiableMap(asList(AutorizationEditOperation.values()).stream().collect(toMap(AutorizationEditOperation::getOperationName, identity()))); - - - @Override - public ValidatingJsonMap getSpec() { - return Utils.getSpec("cluster.security.InfraRuleBasedAuthorization").getSpec(); - } -} \ No newline at end of file diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraUserRolesLookupStrategy.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraUserRolesLookupStrategy.java deleted file mode 100644 index 119a339312a..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/solr/security/InfraUserRolesLookupStrategy.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.hadoop.security.authentication.util.KerberosName; - -import java.security.Principal; -import java.util.Map; -import java.util.Set; - - -/** - * Strategy class to get roles with the principal name (in a specific format e.g.: 'name@DOMAIN') - * in case of KerberosPlugin is used for authentication - */ -public class InfraUserRolesLookupStrategy { - - public Set getUserRolesFromPrincipal(Map> usersVsRoles, Principal principal) { - if (principal instanceof AuthenticationToken) { - AuthenticationToken authenticationToken = (AuthenticationToken) principal; - KerberosName kerberosName = new KerberosName(authenticationToken.getName()); - Set rolesResult = usersVsRoles.get(String.format("%s@%s", kerberosName.getServiceName(), kerberosName.getRealm())); - if (CollectionUtils.isEmpty(rolesResult)) { - rolesResult = usersVsRoles.get(principal.getName()); - } - return rolesResult; - } else { - return usersVsRoles.get(principal.getName()); - } - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/resources/apispec/cluster.security.InfraRuleBasedAuthorization.json b/ambari-infra/ambari-infra-solr-plugin/src/main/resources/apispec/cluster.security.InfraRuleBasedAuthorization.json deleted file mode 100644 index 4a7fdbebabd..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/main/resources/apispec/cluster.security.InfraRuleBasedAuthorization.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "documentation": "https://lucene.apache.org/solr/guide/rule-based-authorization-plugin.html", - "description": "Defines roles for accessing Solr, and assigns users to those roles. Use this API to change user authorizations to each of Solr's components.", - "methods": [ - "POST" - ], - "url": { - "paths": [ - "/cluster/security/authorization" - ] - }, - "commands": { - "set-permission": { - "type":"object", - "description": "Create a new permission, overwrite an existing permission definition, or assign a pre-defined permission to a role.", - "properties": { - "name":{ - "type":"string", - "description": "The name of the permission. The name will be used to update or delete the permission later." - }, - "method":{ - "type":"string", - "enum":["GET", "POST", "DELETE","PUT"], - "description": "HTTP methods that are allowed for this permission. You could allow only GET requests, or have a role that allows PUT and POST requests. The method values that are allowed for this property are GET, POST, PUT, DELETE and HEAD." - }, - - "collection":{ - "type":"array", - "items": { - "type": "string" - }, - "description":"The collection or collections the permission will apply to. When the path that will be allowed is collection-specific, such as when setting permissions to allow use of the Schema API, omitting the collection property will allow the defined path and/or method for all collections. However, when the path is one that is non-collection-specific, such as the Collections API, the collection value must be null. In this case, two permissions may need to be created; one for collection-specific API paths allowing access to all collections, and another for non-collection-specific paths defining no collection limitations." - }, - - "path":{ - "type":"array", - "items": { - "type": "string" - }, - "description":"A request handler name, such as /update or /select. A wild card is supported, to allow for all paths as appropriate (such as, /update/*)." - }, - "index": { - "type": "integer", - "description": "The index of the permission you wish to overwrite. Skip this if it is a new permission that should be created." - }, - "before":{ - "type": "integer", - "description":"This property allows ordering of permissions. The value for this property is the name of the permission that this new permission should be placed before in security.json." - }, - "params":{ - "type":"object", - "additionalProperties":true, - "description": "The names and values of request parameters. This property can be omitted if all request parameters are allowed, but will restrict access only to the values provided if defined." - }, - "role": { - "type": "array", - "items": { - "type": "string", - "description": "The name of the role(s) to give this permission. This name will be used to map user IDs to the role to grant these permissions. The value can be wildcard such as (*), which means that any user is OK, but no user is NOT OK." - } - } - }, - "required": [ - "role" - ] - }, - "update-permission": { - "type":"object", - "properties": { - "name": { - "type": "string", - "description": "The name of the permission. The name will be used to update or delete the permission later." - }, - "method": { - "type": "string", - "description": "HTTP methods that are allowed for this permission. You could allow only GET requests, or have a role that allows PUT and POST requests. The method values that are allowed for this property are GET, POST, PUT, DELETE and HEAD." - }, - "collection": { - "type":"array", - "items": { - "type": "string" - }, - "description": "The collection or collections the permission will apply to. When the path that will be allowed is collection-specific, such as when setting permissions to allow use of the Schema API, omitting the collection property will allow the defined path and/or method for all collections. However, when the path is one that is non-collection-specific, such as the Collections API, the collection value must be null. In this case, two permissions may need to be created; one for collection-specific API paths allowing access to all collections, and another for non-collection-specific paths defining no collection limitations." - }, - "path": { - "type":"array", - "items": { - "type": "string" - }, - "description": "A request handler name, such as /update or /select. A wild card is supported, to allow for all paths as appropriate (such as, /update/*)." - }, - "index": { - "type": "integer", - "description": "The index of the permission you wish to overwrite." - }, - "before": { - "type": "integer", - "description": "This property allows ordering of permissions. The value for this property is the index of the permission that this new permission should be placed before in security.json." - }, - "role": { - "type": "array", - "items": { - "type": "string", - "description": "The name of the role(s) to give this permission. This name will be used to map user IDs to the role to grant these permissions. The value can be wildcard such as (*), which means that any user is OK, but no user is NOT OK." - } - }, - "params": { - "type": "object", - "additionalProperties": true, - "description": "The names and values of request parameters. This property can be omitted if all request parameters are allowed, but will restrict access only to the values provided if defined." - } - }, - "required": [ - "role", - "index" - ] - }, - "delete-permission":{ - "description":"delete a permission by its index", - "type":"integer" - }, - "set-user-role": { - "type":"object", - "description": "A single command allows roles to be mapped to users. To remove a user's permission, you should set the role to null. The key is always a user id and the value is one or more role names.", - "additionalProperties":true - - } - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraKerberosHostValidatorTest.java b/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraKerberosHostValidatorTest.java deleted file mode 100644 index b55051543c3..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraKerberosHostValidatorTest.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class InfraKerberosHostValidatorTest { - - private static final String DEFAULT_SERVICE_USER = "logsearch"; - - private InfraKerberosHostValidator underTest = new InfraKerberosHostValidator(); - private AuthenticationToken principal; - - - @Before - public void setUp() { - principal = new AuthenticationToken(DEFAULT_SERVICE_USER, DEFAULT_SERVICE_USER + "/c6401.ambari.apache.org@EXAMPLE.COM", "kerberos"); - } - - @Test - public void testValidateHosts() { - // GIVEN - Map> userHostsMap = generateUserHostMap("c6401.ambari.apache.org"); - // WHEN - boolean result = underTest.validate(principal, userHostsMap, new HashMap()); - // THEN - assertTrue(result); - } - - @Test - public void testValidateHostsValid() { - // GIVEN - Map> userHostsMap = generateUserHostMap("c6402.ambari.apache.org"); - // WHEN - boolean result = underTest.validate(principal, userHostsMap, new HashMap()); - // THEN - assertFalse(result); - - } - - @Test - public void testValidateHostRegex() { - // GIVEN - Map userHostRegex = generateRegexMap("c\\d+.*.apache.org"); - // WHEN - boolean result = underTest.validate(principal, new HashMap>(), userHostRegex); - // THEN - assertTrue(result); - - } - - @Test - public void testValidateHostRegexInvalid() { - // GIVEN - Map userHostRegex = generateRegexMap("c\\d+.*.org.apache"); - // WHEN - boolean result = underTest.validate(principal, new HashMap>(), userHostRegex); - // THEN - assertFalse(result); - } - - @Test - public void testPrecedence() { - // GIVEN - Map> userHostsMap = generateUserHostMap("c6402.ambari.apache.org"); - Map userHostRegex = generateRegexMap("c\\d+.*.apache.org"); - // WHEN - boolean result = underTest.validate(principal, userHostsMap, userHostRegex); - // THEN - assertTrue(result); - } - - private Map> generateUserHostMap(String... hosts) { - Map> map = new HashMap<>(); - Set hostSet = new HashSet<>(); - for (String host : hosts) { - hostSet.add(host); - } - map.put(DEFAULT_SERVICE_USER, hostSet); - return map; - } - - private Map generateRegexMap(String regex) { - Map map = new HashMap<>(); - map.put(DEFAULT_SERVICE_USER, regex); - return map; - } -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraRuleBasedAuthorizationPluginTest.java b/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraRuleBasedAuthorizationPluginTest.java deleted file mode 100644 index 1348fd34f35..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraRuleBasedAuthorizationPluginTest.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import java.security.Principal; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.solr.common.params.MapSolrParams; -import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.Utils; -import org.apache.solr.handler.UpdateRequestHandler; -import org.apache.solr.handler.admin.CollectionsHandler; -import org.apache.solr.security.AuthorizationContext.RequestType; -import org.junit.Test; - -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; -import static org.apache.solr.common.util.Utils.makeMap; -import static org.junit.Assert.assertEquals; - -public class InfraRuleBasedAuthorizationPluginTest { - - private static final String PERMISSIONS = "{" + - " user-host : {" + - " 'infra-solr@EXAMPLE.COM': [hostname, hostname2]" + - " }," + - " user-role : {" + - " 'infra-solr@EXAMPLE.COM': [admin]," + - " 'logsearch@EXAMPLE.COM': [logsearch_role,dev]," + - " 'logfeeder@EXAMPLE.COM': [logsearch_role,dev]," + - " 'atlas@EXAMPLE.COM': [atlas_role, audit_role, dev]," + - " 'knox@EXAMPLE.COM': [audit_role,dev]," + - " 'hdfs@EXAMPLE.COM': [audit_role,dev]," + - " 'hbase@EXAMPLE.COM': [audit_role,dev]," + - " 'yarn@EXAMPLE.COM': [audit_role,dev]," + - " 'knox@EXAMPLE.COM': [audit_role,dev]," + - " 'kafka@EXAMPLE.COM': [audit_role,dev]," + - " 'kms@EXAMPLE.COM': [audit_role,dev]," + - " 'storm@EXAMPLE.COM': [audit_role,dev]," + - " 'rangeradmin@EXAMPLE.COM':[ranger_role, audit_role, dev]" + - " }," + - " permissions : [" + - " {name:'collection-admin-read'," + - " role:null}," + - " {name:collection-admin-edit ," + - " role:[logsearch_role, atlas_role, ranger_role, admin]}," + - " {name:mycoll_update," + - " collection:mycoll," + - " path:'/*'," + - " role:[logsearch_role,admin]" + - " }," + - " {name:mycoll2_update," + - " collection:mycoll2," + - " path:'/*'," + - " role:[ranger_role, audit_role, admin]" + - " }," + - "{name:read , role:dev }]}"; - - @Test - public void testPermissions() { - int STATUS_OK = 200; - int FORBIDDEN = 403; - int PROMPT_FOR_CREDENTIALS = 401; - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "POST", - "userPrincipal", "tim", - "collectionRequests", "mycoll", - "handler", new UpdateRequestHandler()) - , FORBIDDEN); - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "POST", - "userPrincipal", "logsearch", - "collectionRequests", "mycoll", - "handler", new UpdateRequestHandler()) - , STATUS_OK); - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "GET", - "userPrincipal", "rangeradmin", - "collectionRequests", "mycoll", - "handler", new UpdateRequestHandler()) - , FORBIDDEN); - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "GET", - "userPrincipal", "rangeradmin", - "collectionRequests", "mycoll2", - "handler", new UpdateRequestHandler()) - , STATUS_OK); - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "GET", - "userPrincipal", "logsearch", - "collectionRequests", "mycoll2", - "handler", new UpdateRequestHandler()) - , FORBIDDEN); - - checkRules(makeMap("resource", "/update/json/docs", - "httpMethod", "POST", - "userPrincipal", "kms", - "collectionRequests", "mycoll2", - "handler", new UpdateRequestHandler()) - , STATUS_OK); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", "tim", - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , FORBIDDEN); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", null, - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", "rangeradmin", - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , STATUS_OK); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", "kms", - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , FORBIDDEN); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", "kms", - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "LIST"))) - , STATUS_OK); - - checkRules(makeMap("resource", "/admin/collections", - "userPrincipal", "rangeradmin", - "requestType", RequestType.ADMIN, - "collectionRequests", null, - "handler" , new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "LIST"))) - , STATUS_OK); - } - - private void checkRules(Map values, int expected) { - checkRules(values,expected,(Map) Utils.fromJSONString(PERMISSIONS)); - } - - private void checkRules(Map values, int expected, Map permissions) { - AuthorizationContext context = new MockAuthorizationContext(values); - InfraRuleBasedAuthorizationPlugin plugin = new InfraRuleBasedAuthorizationPlugin(); - plugin.init(permissions); - AuthorizationResponse authResp = plugin.authorize(context); - assertEquals(expected, authResp.statusCode); - } - - private static class MockAuthorizationContext extends AuthorizationContext { - private final Map values; - - private MockAuthorizationContext(Map values) { - this.values = values; - } - - @Override - public SolrParams getParams() { - SolrParams params = (SolrParams) values.get("params"); - return params == null ? new MapSolrParams(new HashMap()) : params; - } - - @Override - public Principal getUserPrincipal() { - Object userPrincipal = values.get("userPrincipal"); - return userPrincipal == null ? null : - new AuthenticationToken(String.valueOf(userPrincipal), String.format("%s%s", String.valueOf(userPrincipal), "/hostname@EXAMPLE.COM"), "kerberos"); - } - - @Override - public String getHttpHeader(String header) { - return null; - } - - @Override - public Enumeration getHeaderNames() { - return null; - } - - @Override - public String getRemoteAddr() { - return null; - } - - @Override - public String getRemoteHost() { - return null; - } - - @Override - public List getCollectionRequests() { - Object collectionRequests = values.get("collectionRequests"); - if (collectionRequests instanceof String) { - return singletonList(new CollectionRequest((String)collectionRequests)); - } - return (List) collectionRequests; - } - - @Override - public RequestType getRequestType() { - return (RequestType) values.get("requestType"); - } - - @Override - public String getHttpMethod() { - return (String) values.get("httpMethod"); - } - - @Override - public String getResource() { - return (String) values.get("resource"); - } - - @Override - public Object getHandler() { - Object handler = values.get("handler"); - return handler instanceof String ? (PermissionNameProvider) request -> PermissionNameProvider.Name.get((String) handler) : handler; - } - } - -} diff --git a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraUserRolesLookupStrategyTest.java b/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraUserRolesLookupStrategyTest.java deleted file mode 100644 index 8f568759694..00000000000 --- a/ambari-infra/ambari-infra-solr-plugin/src/test/java/org/apache/solr/security/InfraUserRolesLookupStrategyTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.solr.security; - -import com.google.common.collect.Sets; -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.http.auth.BasicUserPrincipal; -import org.junit.Test; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class InfraUserRolesLookupStrategyTest { - - private InfraUserRolesLookupStrategy underTest = new InfraUserRolesLookupStrategy(); - - @Test - public void testLookupRolesForPrincipalName() { - // GIVEN - Map> usersVsRoles = generateUserRolesMap(); - AuthenticationToken principal = new AuthenticationToken( - "logsearch", "logsearch/c6401.ambari.apache.org@EXAMPLE.COM", "kerberos"); - // WHEN - Set result = underTest.getUserRolesFromPrincipal(usersVsRoles, principal); - // THEN - assertTrue(result.contains("logsearch_user")); - assertTrue(result.contains("ranger_user")); - assertFalse(result.contains("admin")); - } - - @Test - public void testLookupRolesForNonKerberosPrincipalName() { - // GIVEN - Map> usersVsRoles = generateUserRolesMap(); - BasicUserPrincipal principal = new BasicUserPrincipal("infra-solr"); - // WHEN - Set result = underTest.getUserRolesFromPrincipal(usersVsRoles, principal); - // THEN - assertTrue(result.contains("admin")); - assertTrue(result.contains("logsearch_user")); - } - - @Test - public void testLookupRolesWithNonKerberosPrincipalWithoutRoles() { - // GIVEN - Map> usersVsRoles = generateUserRolesMap(); - BasicUserPrincipal principal = new BasicUserPrincipal("unknownuser"); - // WHEN - Set result = underTest.getUserRolesFromPrincipal(usersVsRoles, principal); - // THEN - assertTrue(result.isEmpty()); - } - - private Map> generateUserRolesMap() { - Map> usersVsRoles = new HashMap<>(); - usersVsRoles.put("logsearch@EXAMPLE.COM", Sets.newHashSet("logsearch_user", "ranger_user")); - usersVsRoles.put("infra-solr@EXAMPLE.COM", Sets.newHashSet("admin")); - usersVsRoles.put("infra-solr", Sets.newHashSet("admin", "logsearch_user")); - usersVsRoles.put("unknownuser", new HashSet()); - return usersVsRoles; - } -} diff --git a/ambari-infra/pom.xml b/ambari-infra/pom.xml deleted file mode 100644 index 4185abbc048..00000000000 --- a/ambari-infra/pom.xml +++ /dev/null @@ -1,366 +0,0 @@ - - - - 4.0.0 - org.apache.ambari - ambari-infra - 2.0.0.0-SNAPSHOT - pom - - - 1.8 - 7.4.0 - UTF-8 - python >= 2.6 - python (>= 2.6) - amd64 - ${deb.python.ver} - 3.0.0 - -Xmx1024m -Xms512m - 3.4.6.2.3.0.0-2557 - false - - - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - Apache Software Foundation - http://www.apache.org - - - jira - https://issues.apache.org/jira/browse/AMBARI - - - - - apache-hadoop - hdp - http://repo.hortonworks.com/content/groups/public/ - - - oss.sonatype.org - OSS Sonatype Staging - https://oss.sonatype.org/content/groups/staging - - - spring-milestones - Spring Milestones - http://repo.spring.io/milestone - - false - - - - ASF Staging - https://repository.apache.org/content/groups/staging/ - - - ASF Snapshots - https://repository.apache.org/content/repositories/snapshots/ - - true - - - false - - - - - - ambari-infra-assembly - ambari-infra-solr-client - ambari-infra-solr-plugin - ambari-infra-manager - ambari-infra-manager-it - - - - - - - org.codehaus.mojo - rpm-maven-plugin - 2.1.4 - - - - none - - attached-rpm - - - - - 2012, Apache Software Foundation - Development - Maven Recipe: RPM Package. - ${package-release} - ${package-version} - - - - - org.vafer - jdeb - 1.4 - - - - stub-execution - none - - jdeb - - - - - true - false - false - ${project.basedir}/../src/main/package/deb/control - - - - org.apache.maven.plugins - maven-enforcer-plugin - 1.4.1 - - - org.codehaus.mojo - properties-maven-plugin - 1.0.0 - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - - - org.apache.maven.plugins - maven-failsafe-plugin - 2.20 - - - - - - false - maven-assembly-plugin - - - ../ambari-project/src/main/assemblies/empty.xml - - - - - build-tarball - none - - single - - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - ${skipSurefireTests} - - - - maven-compiler-plugin - 3.2 - - ${jdk.version} - ${jdk.version} - - - - org.apache.maven.plugins - maven-clean-plugin - 2.5 - - - - ${basedir} - false - - **/*.pyc - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.8 - - - parse-version - validate - - parse-version - - - - regex-property - - regex-property - - - ambariVersion - ${project.version} - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).* - $1.$2.$3.$4 - false - - - - parse-package-version - - regex-property - - - package-version - ${project.version} - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).* - $1.$2.$3.$4 - true - - - - parse-package-release - - regex-property - - - package-release - ${project.version} - - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-)((([a-zA-Z]+)?([0-9]+))|(SNAPSHOT)).* - - $6 - true - - - - - - org.apache.rat - apache-rat-plugin - 0.12 - - - README.md - **/*.json - **/*.log - **/*.txt - **/docker/Profile - **/docker/.env - **/*.story - - - - - test - - check - - - - - - - - - - - junit - junit - 4.10 - - - commons-cli - commons-cli - 1.3.1 - - - commons-codec - commons-codec - 1.8 - - - commons-lang - commons-lang - 2.5 - - - commons-fileupload - commons-fileupload - 1.3.3 - - - commons-collections - commons-collections - 3.2.2 - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - commons-io - commons-io - 2.5 - - - com.fasterxml.jackson.core - jackson-databind - 2.9.4 - - - com.fasterxml.jackson.core - jackson-annotations - 2.9.4 - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - 2.9.4 - - - com.fasterxml.woodstox - woodstox-core - - - - - - - diff --git a/ambari-logsearch/.gitignore b/ambari-logsearch/.gitignore deleted file mode 100644 index 7ea6a1f8de3..00000000000 --- a/ambari-logsearch/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -target -.settings -.classpath -.project -/bin/ -node_modules/ -logs/ -node/ -*.pid - diff --git a/ambari-logsearch/README.md b/ambari-logsearch/README.md deleted file mode 100644 index fb3b56b3c48..00000000000 --- a/ambari-logsearch/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Log Search - -## Setup local environment with docker - -### Prerequisites - -- Install [docker](https://docs.docker.com/) -- For Mac OS X use [Docker for Mac](https://docs.docker.com/docker-for-mac/) -- [Docker compose](https://docs.docker.com/compose/) is also required. - -### Build and start Log Search in docker container -```bash -# to see available commands: run start-logsearch without arguments -cd docker -./logsearch-docker build-and-run # build mvn project locally, build docker image, start containers -``` -If you run the script at first time, it will generate you a new `Profile` file or an `.env` file inside docker directory (run twice if both missing and you want to generate Profile and .env as well), in .env file you should set `MAVEN_REPOSITORY_LOCATION` (point to local maven repository location, it uses `~/.m2` by default). These will be used as volumes for the docker container. Profile file holds the environment variables that are used inside the containers, the .env file is used outside of the containers - -Then you can use the `logsearch-docker` script to start the containers (`start` command). -Also you can use docker-compose manually to start/manage the containers. -```bash -docker-compose up -d -# or start all services in one container: -docker-compose -f all.yml up -d -``` -After the logsearch container is started you can enter to it with following commands: -```bash -docker exec -it docker_logsearch_1 bash -# or if you used all.yml for starting the logsearch docker container: -docker exec -it logsearch bash -``` -In case if you started the containers separately and if you would like to access Solr locally with through your external ZooKeeper container, then point `solr` to `localhost` in your `/etc/hosts` file. - -### Run applications from IDE / maven - -- [Start Log Search locally](ambari-logsearch-server/README.md) -- [Start Log Feeder locally](ambari-logsearch-logfeeder/README.md) - -## Package build process - - -1. Check out the code from GIT repository - -2. On the logsearch root folder (ambari/ambari-logsearch), please execute the following Maven command to build RPM/DPKG: -```bash -mvn -Dbuild-rpm clean package -``` - or -```bash -mvn -Dbuild-deb clean package -``` -3. Generated RPM/DPKG files will be found in ambari-logsearch-assembly/target folder - -## Running Integration Tests - -By default integration tests are not a part of the build process, you need to set -Dbackend-tests or -Dselenium-tests (or you can use -Dall-tests to run both). To running the tests you will need docker here as well (right now docker-for-mac and unix are supported by default, for boot2docker you need to pass -Ddocker.host parameter to the build). - -```bash -# from ambari-logsearch folder -mvn clean integration-test -Dbackend-tests failsafe:verify -# or run selenium tests with docker for mac, but before that you nedd to start xquartz -open -a XQuartz -# then in an another window you can start ui tests -mvn clean integration-test -Dselenium-tests failsafe:verify -# you can specify story file folde location with -Dbackend.stories.location and -Dui.stories.location (absolute file path) in the commands -``` -Also you can run from the IDE, but make sure all of the ambari logsearch modules are built. diff --git a/ambari-logsearch/ambari-logsearch-appender/.gitignore b/ambari-logsearch/ambari-logsearch-appender/.gitignore deleted file mode 100644 index a8ab5affaf9..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -logs/* -target diff --git a/ambari-logsearch/ambari-logsearch-appender/README.md b/ambari-logsearch/ambari-logsearch-appender/README.md deleted file mode 100644 index 93e3e8a2c3c..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/README.md +++ /dev/null @@ -1,57 +0,0 @@ - - - -# Ambari Logsearch Appender - -Ambari Logsearch Appender is a log4j base appender that write logs in json format. - - - -## Setup Ambari Logsearch Appender - -#### Add dependency -```xml - - org.apache.ambari - ambari-logsearch-appender - ${version} - -``` -####Dependent dependency -```xml - - log4j - log4j - 1.2.17 - - - com.google.code.gson - gson - 2.6.2 - -``` - -## Configuration -#### Sample Configuration for log4j.properties -```java -log4j.appender.logsearchJson=org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender -log4j.appender.logsearchJson.File=path/file_name.json -log4j.appender.logsearchJson.maxFileSize=10MB -log4j.appender.logsearchJson.maxBackupIndex=10 -log4j.appender.logsearchJson.Append=true -log4j.appender.logsearchJson.layout=org.apache.ambari.logsearch.appender.LogsearchConversion -``` -### OR -#### Sample Configuration for log4j.xml -```xml - - - - - - - -``` - - diff --git a/ambari-logsearch/ambari-logsearch-appender/build.properties b/ambari-logsearch/ambari-logsearch-appender/build.properties deleted file mode 100644 index 95ea4ac5935..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/build.properties +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# log4j configuration used during build and unit tests diff --git a/ambari-logsearch/ambari-logsearch-appender/build.xml b/ambari-logsearch/ambari-logsearch-appender/build.xml deleted file mode 100644 index 37708be91f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/build.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-appender/pom.xml b/ambari-logsearch/ambari-logsearch-appender/pom.xml deleted file mode 100644 index 65a68a9590d..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/pom.xml +++ /dev/null @@ -1,104 +0,0 @@ - - - - 4.0.0 - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - - ambari-logsearch-appender - jar - Ambari Logsearch Appender - - http://maven.apache.org - - - UTF-8 - 1.7 - 1.7 - ambari-logsearch-appender - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - package - - - - - - - - - run - - - - - - - - - - log4j - log4j - 1.2.17 - - - com.sun.jdmk - jmxtools - - - com.sun.jmx - jmxri - - - javax.mail - mail - - - javax.jms - jmx - - - javax.jms - jms - - - - - com.google.code.gson - gson - 2.6.2 - - - junit - junit - test - - - diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java deleted file mode 100644 index 877fa243498..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.appender; - -import java.io.PrintWriter; -import java.io.StringWriter; - -import org.apache.log4j.EnhancedPatternLayout; -import org.apache.log4j.spi.LoggingEvent; - -public class LogsearchConversion extends EnhancedPatternLayout { - - private static final String NEW_LINE = System.getProperty("line.separator"); - - public LogsearchConversion() { - } - - public String format(LoggingEvent event) { - String outputStr = createOutput(event); - return outputStr + NEW_LINE; - } - - public String createOutput(LoggingEvent event) { - Output output = new Output(); - - output.setLevel(event.getLevel().toString()); - output.setFile(event.getLocationInformation().getFileName()); - output.setLineNumber(Integer.parseInt(event.getLocationInformation().getLineNumber())); - output.setLogtime(Long.toString(event.getTimeStamp())); - output.setLoggerName(event.getLoggerName()); - output.setThreadName(event.getThreadName()); - output.setLogMessage(getLogMessage(event)); - - return output.toJson(); - } - - public String getLogMessage(LoggingEvent event) { - String logMessage = event.getMessage() != null ? event.getMessage().toString() : ""; - - if (event.getThrowableInformation() != null && event.getThrowableInformation().getThrowable() != null) { - logMessage += NEW_LINE; - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - event.getThrowableInformation().getThrowable().printStackTrace(pw); - logMessage += sw.toString(); - } - - return logMessage; - } - - @Override - public boolean ignoresThrowable() { - return false; - } -} diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java deleted file mode 100644 index 6a93db57b09..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.appender; - -import org.apache.log4j.Layout; -import org.apache.log4j.Logger; -import org.apache.log4j.RollingFileAppender; -import org.apache.log4j.spi.LoggingEvent; - -public class LogsearchRollingFileAppender extends RollingFileAppender { - private static Logger logger = Logger.getLogger(LogsearchRollingFileAppender.class); - - public LogsearchRollingFileAppender() { - logger.trace("Initializing LogsearchRollingFileAppender........... "); - } - - @Override - public void append(LoggingEvent event) { - super.append(event); - } - - @Override - public void setLayout(Layout layout) { - super.setLayout(layout); - } -} diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java deleted file mode 100644 index 8001054e76a..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.appender; - -import java.io.IOException; -import java.io.StringWriter; - -import com.google.gson.stream.JsonWriter; - -class Output { - - private String level; - private String file; - private String threadName; - private int lineNumber; - private String loggerName; - private String logtime; - private String logMessage; - - void setLevel(String level) { - this.level = level; - } - - void setFile(String file) { - this.file = file; - } - - void setThreadName(String threadName) { - this.threadName = threadName; - } - - void setLineNumber(int lineNumber) { - this.lineNumber = lineNumber; - } - - void setLoggerName(String loggerName) { - this.loggerName = loggerName; - } - - void setLogtime(String logtime) { - this.logtime = logtime; - } - - void setLogMessage(String logMessage) { - this.logMessage = logMessage; - } - - public String toJson() { - StringWriter stringWriter = new StringWriter(); - - try (JsonWriter writer = new JsonWriter(stringWriter)) { - writer.beginObject(); - - if (level != null) writer.name("level").value(level); - if (file != null) writer.name("file").value(file); - if (threadName != null) writer.name("thread_name").value(threadName); - writer.name("line_number").value(lineNumber); - if (loggerName != null) writer.name("logger_name").value(loggerName); - if (logtime != null) writer.name("logtime").value(logtime); - if (logMessage != null) writer.name("log_message").value(logMessage); - - writer.endObject(); - } catch (IOException e) { - e.printStackTrace(); - } - - return stringWriter.toString(); - } - - @Override - public String toString() { - return toJson(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-appender/src/test/java/org/apache/ambari/logsearch/appender/AppTest.java b/ambari-logsearch/ambari-logsearch-appender/src/test/java/org/apache/ambari/logsearch/appender/AppTest.java deleted file mode 100644 index 8152787e930..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/src/test/java/org/apache/ambari/logsearch/appender/AppTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.appender; - -import org.apache.log4j.Logger; - -public class AppTest { - private static Logger logger = Logger.getLogger(AppTest.class); - - public static void main(String[] args) { - - try { - throwException(); - } catch (ClassCastException castException) { - logger.error("error", castException); - } - - } - - public static void throwException() { - throw new ClassCastException("test"); - - } -} diff --git a/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties b/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties deleted file mode 100644 index 18ee520df6a..00000000000 --- a/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# log4j configuration used during build and unit tests - -# Root logger option -log4j.rootLogger=INFO, logsearchJson, socket - -# Redirect log messages to a logsearch json -log4j.appender.logsearchJson=org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender -log4j.appender.logsearchJson.File=target/jsonlog/log.json -log4j.appender.logsearchJson.maxFileSize=10MB -log4j.appender.logsearchJson.maxBackupIndex=10 -log4j.appender.logsearchJson.Append=true -log4j.appender.logsearchJson.layout=org.apache.ambari.logsearch.appender.LogsearchConversion - -log4j.appender.socket=org.apache.log4j.net.SocketAppender -log4j.appender.socket.Port=61999 -log4j.appender.socket.RemoteHost=localhost -log4j.appender.socket.ReconnectionDelay=10000 \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/pom.xml b/ambari-logsearch/ambari-logsearch-assembly/pom.xml deleted file mode 100644 index 19c3d90d269..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/pom.xml +++ /dev/null @@ -1,296 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - Ambari Logsearch Assembly - http://maven.apache.org - 4.0.0 - - /usr/lib - ambari-logsearch-portal - ${mapping.base.path}/ambari-logsearch-portal - ${project.basedir}/../ambari-logsearch-server - ambari-logsearch-logfeeder - ${mapping.base.path}/ambari-logsearch-logfeeder - ${project.basedir}/../ambari-logsearch-logfeeder - /etc/${logsearch.portal.package.name}/conf - /etc/${logsearch.logfeeder.package.name}/conf - - - ambari-logsearch-assembly - - - - rpm - - true - - - - build-rpm - - - - - - org.codehaus.mojo - rpm-maven-plugin - 2.1.4 - - 2012, Apache Software Foundation - Development - Maven Recipe: RPM Package. - false - / - noarch - linux - - ${package-version} - ${package-release} - - root - root - - - - logsearch-portal - package - - rpm - - - Development - ${logsearch.portal.package.name} - - ${project.basedir}/src/main/package/rpm/portal/postinstall.sh - utf-8 - - - ${project.basedir}/src/main/package/rpm/portal/postremove.sh - utf-8 - - - ${project.basedir}/src/main/package/rpm/portal/preinstall.sh - utf-8 - - - - ${logsearch.portal.mapping.path} - - - ${logsearch.server.dir}/target/package - - libs/checkstyle*.jar - - - - - - - - - logsearch-logfeeder - package - - rpm - - - Development - ${logsearch.logfeeder.package.name} - - ${project.basedir}/src/main/package/rpm/logfeeder/postinstall.sh - utf-8 - - - ${project.basedir}/src/main/package/rpm/logfeeder/postremove.sh - utf-8 - - - ${project.basedir}/src/main/package/rpm/logfeeder/preinstall.sh - utf-8 - - - - ${logsearch.logfeeder.mapping.path} - - - ${logsearch.logfeeder.dir}/target/package - - libs/checkstyle*.jar - - - - - - - - - - - - - - deb - - - - build-deb - - - - - - - maven-resources-plugin - 2.7 - - - - copy-resources - package - - copy-resources - - - ${project.build.directory}/resources/deb - - - ${project.basedir}/src/main/package/deb - - portal/postinst - portal/postrm - portal/preinstall - logfeeder/postinst - logfeeder/postrm - logfeeder/preinstall - - false - - - ${project.basedir}/src/main/package/deb - - portal/postinst - portal/postrm - portal/preinstall - logfeeder/postinst - logfeeder/postrm - logfeeder/preinstall - - true - - - - - - - - org.vafer - jdeb - 1.4 - - - jdeb-portal - package - - jdeb - - - ${basedir}/src/main/package/deb/portal - ${basedir}/target/${logsearch.portal.package.name}_${package-version}-${package-release}.deb - - false - false - - - ${logsearch.server.dir}/target/ambari-logsearch-portal.tar.gz - archive - - ${logsearch.portal.mapping.path} - perm - root - root - - - libs/checkstyle*.jar - - - - - - - - jdeb-logfeeder - package - - jdeb - - - ${basedir}/src/main/package/deb/logfeeder - ${basedir}/target/${logsearch.logfeeder.package.name}_${package-version}-${package-release}.deb - - false - false - - - ${logsearch.logfeeder.dir}/target/ambari-logsearch-logfeeder.tgz - archive - - ${logsearch.logfeeder.mapping.path} - perm - root - root - - - libs/checkstyle*.jar - - - - - - - - - - - - - - - org.apache.ambari - ambari-logsearch-server - ${project.version} - - - org.apache.ambari - ambari-logsearch-web - ${project.version} - - - org.apache.ambari - ambari-logsearch-logfeeder - ${project.version} - - - - diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/control b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/control deleted file mode 100644 index b853f138f96..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/control +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -Package: [[logsearch.logfeeder.package.name]] -Version: [[package-version]]-[[package-release]] -Section: [[deb.section]] -Priority: [[deb.priority]] -Depends: [[deb.dependency.list]] -Architecture: [[deb.architecture]] -Description: [[description]] -Maintainer: [[deb.publisher]] diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postinst b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postinst deleted file mode 100644 index 10dffa80d24..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postinst +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_SCRIPT_LINK_NAME="/usr/bin/logfeeder" -LOGFEEDER_SCRIPT_SOURCE="/usr/lib/ambari-logsearch-logfeeder/bin/logfeeder.sh" - -LOGFEEDER_CONF_LINK="/etc/ambari-logsearch-logfeeder/conf" -LOGFEEDER_CONF_SOURCE="/usr/lib/ambari-logsearch-logfeeder/conf" - -ln -s $LOGFEEDER_SCRIPT_SOURCE $LOGFEEDER_SCRIPT_LINK_NAME -#ln -s $LOGFEEDER_CONF_SOURCE $LOGFEEDER_CONF_LINK - -# handle old keys folder & custom jsons - -LOGFEEDER_CONF_BACKUP="/usr/lib/ambari-logsearch-logfeeder/conf-old" - -if [ -d "$LOGFEEDER_CONF_BACKUP" ]; then - if [ -d "$LOGFEEDER_CONF_BACKUP/keys" ]; then - cp -r $LOGFEEDER_CONF_BACKUP/keys $LOGFEEDER_CONF_SOURCE - fi - - custom_jsons=(`find $LOGFEEDER_CONF_BACKUP -name "*.json" ! -name 'input*.json' ! -name 'global.config.json' ! -name 'output.config.json'`) - if [ ! -z "$custom_jsons" ]; then - for custom_json_file in "${custom_jsons[@]}" - do : - cp -r $custom_json_file "$LOGFEEDER_CONF_SOURCE/" - done - fi -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postrm b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postrm deleted file mode 100644 index 5f1c623d65e..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postrm +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_SCRIPT_LINK_NAME="/usr/bin/logfeeder" -LOGFEEDER_CONF_DIR_LINK="/etc/ambari-logsearch-logfeeder/conf" - -rm -f $LOGFEEDER_SCRIPT_LINK_NAME -rm -f $LOGFEEDER_CONF_DIR_LINK \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/preinst b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/preinst deleted file mode 100644 index 01d70e04c96..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/preinst +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_CONF="/etc/ambari-logsearch-logfeeder/conf" -LOGFEEDER_CONF_BACKUP="/usr/lib/ambari-logsearch-logfeeder/conf-old" - -if [ -d $LOGFEEDER_CONF_BACKUP ]; then - rm -rf $LOGFEEDER_CONF_BACKUP -fi - -if [ -d $LOGFEEDER_CONF ]; then - mkdir -p $LOGFEEDER_CONF_BACKUP - cp -r $LOGFEEDER_CONF/* $LOGFEEDER_CONF_BACKUP - rm -rf $LOGFEEDER_CONF -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/prerm b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/prerm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/prerm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/control b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/control deleted file mode 100644 index ed9cb59c403..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/control +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -Package: [[logsearch.portal.package.name]] -Version: [[package-version]]-[[package-release]] -Section: [[deb.section]] -Priority: [[deb.priority]] -Depends: [[deb.dependency.list]] -Architecture: [[deb.architecture]] -Description: [[description]] -Maintainer: [[deb.publisher]] diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postinst b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postinst deleted file mode 100644 index e71445b41cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postinst +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_SCRIPT_LINK_NAME="/usr/bin/logsearch" -LOGSEARCH_SCRIPT_SOURCE="/usr/lib/ambari-logsearch-portal/bin/logsearch.sh" - -LOGSEARCH_CONF_LINK="/etc/ambari-logsearch-portal/conf" -LOGSEARCH_CONF_SOURCE="/usr/lib/ambari-logsearch-portal/conf" - -ln -s $LOGSEARCH_SCRIPT_SOURCE $LOGSEARCH_SCRIPT_LINK_NAME -#ln -s $LOGSEARCH_CONF_SOURCE $LOGSEARCH_CONF_LINK - -# handle old keys folder - -LOGSEARCH_CONF_BACKUP="/usr/lib/ambari-logsearch-portal/conf-old" - -if [ -d "$LOGSEARCH_CONF_BACKUP" ]; then - if [ -d "$LOGSEARCH_CONF_BACKUP/keys" ]; then - cp -r $LOGSEARCH_CONF_BACKUP/keys $LOGSEARCH_CONF_SOURCE - fi -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postrm b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postrm deleted file mode 100644 index 8de05c12990..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postrm +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_SCRIPT_LINK_NAME="/usr/bin/logsearch" -LOGSEARCH_CONF_DIR_LINK="/etc/ambari-logsearch-portal/conf" - -rm -f $LOGSEARCH_SCRIPT_LINK_NAME -rm -f $LOGSEARCH_CONF_DIR_LINK \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/preinst b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/preinst deleted file mode 100644 index dfb8752a994..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/preinst +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_CONF="/etc/ambari-logsearch-portal/conf" -LOGSEARCH_CONF_BACKUP="/usr/lib/ambari-logsearch-portal/conf-old" - -if [ -d $LOGSEARCH_CONF_BACKUP ]; then - rm -rf $LOGSEARCH_CONF_BACKUP -fi - -if [ -d $LOGSEARCH_CONF ]; then - mkdir -p $LOGSEARCH_CONF_BACKUP - cp -r $LOGSEARCH_CONF/* $LOGSEARCH_CONF_BACKUP - rm -rf $LOGSEARCH_CONF -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/prerm b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/prerm deleted file mode 100644 index 21a01faa534..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/prerm +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postinstall.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postinstall.sh deleted file mode 100755 index ce4b611ca38..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postinstall.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_SCRIPT_LINK_NAME="/usr/bin/logfeeder" -LOGFEEDER_SCRIPT_SOURCE="/usr/lib/ambari-logsearch-logfeeder/bin/logfeeder.sh" - -LOGFEEDER_ETC_FOLDER="/etc/ambari-logsearch-logfeeder" -LOGFEEDER_CONF_LINK="$LOGFEEDER_ETC_FOLDER/conf" -LOGFEEDER_CONF_SOURCE="/usr/lib/ambari-logsearch-logfeeder/conf" - -mkdir -p $LOGFEEDER_ETC_FOLDER - -ln -s $LOGFEEDER_SCRIPT_SOURCE $LOGFEEDER_SCRIPT_LINK_NAME -#ln -s $LOGFEEDER_CONF_SOURCE $LOGFEEDER_CONF_LINK - -# handle old keys folder & custom jsons - -LOGFEEDER_CONF_BACKUP="/usr/lib/ambari-logsearch-logfeeder/conf-old" - -if [ -d "$LOGFEEDER_CONF_BACKUP" ]; then - if [ -d "$LOGFEEDER_CONF_BACKUP/keys" ]; then - cp -r $LOGFEEDER_CONF_BACKUP/keys $LOGFEEDER_CONF_SOURCE - fi - - custom_jsons=(`find $LOGFEEDER_CONF_BACKUP -name "*.json" ! -name 'input*.json' ! -name 'global.config.json' ! -name 'output.config.json'`) - if [ ! -z "$custom_jsons" ]; then - for custom_json_file in "${custom_jsons[@]}" - do : - cp -r $custom_json_file "$LOGFEEDER_CONF_SOURCE/" - done - fi -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postremove.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postremove.sh deleted file mode 100755 index 4583980f198..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/postremove.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_SCRIPT_LINK_NAME="/usr/bin/logfeeder" -LOGFEEDER_ETC_FOLDER="/etc/ambari-logsearch-logfeeder" -LOGFEEDER_CONF_DIR_LINK="$LOGFEEDER_ETC_FOLDER/conf" - -rm -f $LOGFEEDER_SCRIPT_LINK_NAME -rm -f $LOGFEEDER_CONF_DIR_LINK -rm -f $LOGFEEDER_ETC_FOLDER \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/preinstall.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/preinstall.sh deleted file mode 100644 index 01d70e04c96..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/logfeeder/preinstall.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGFEEDER_CONF="/etc/ambari-logsearch-logfeeder/conf" -LOGFEEDER_CONF_BACKUP="/usr/lib/ambari-logsearch-logfeeder/conf-old" - -if [ -d $LOGFEEDER_CONF_BACKUP ]; then - rm -rf $LOGFEEDER_CONF_BACKUP -fi - -if [ -d $LOGFEEDER_CONF ]; then - mkdir -p $LOGFEEDER_CONF_BACKUP - cp -r $LOGFEEDER_CONF/* $LOGFEEDER_CONF_BACKUP - rm -rf $LOGFEEDER_CONF -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postinstall.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postinstall.sh deleted file mode 100644 index 97523f4f10e..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postinstall.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_SCRIPT_LINK_NAME="/usr/bin/logsearch" -LOGSEARCH_SCRIPT_SOURCE="/usr/lib/ambari-logsearch-portal/bin/logsearch.sh" - -LOGSEARCH_ETC_FOLDER="/etc/ambari-logsearch-portal" -LOGSEARCH_CONF_LINK="$LOGSEARCH_ETC_FOLDER/conf" -LOGSEARCH_CONF_SOURCE="/usr/lib/ambari-logsearch-portal/conf" - -mkdir -p $LOGSEARCH_ETC_FOLDER - -ln -s $LOGSEARCH_SCRIPT_SOURCE $LOGSEARCH_SCRIPT_LINK_NAME -#ln -s $LOGSEARCH_CONF_SOURCE $LOGSEARCH_CONF_LINK - -# handle old keys folder - -LOGSEARCH_CONF_BACKUP="/usr/lib/ambari-logsearch-portal/conf-old" - -if [ -d "$LOGSEARCH_CONF_BACKUP" ]; then - if [ -d "$LOGSEARCH_CONF_BACKUP/keys" ]; then - cp -r $LOGSEARCH_CONF_BACKUP/keys $LOGSEARCH_CONF_SOURCE - fi -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postremove.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postremove.sh deleted file mode 100644 index b07a1adc835..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/postremove.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_SCRIPT_LINK_NAME="/usr/bin/logsearch" -LOGSEARCH_ETC_FOLDER="/etc/ambari-logsearch-portal" -LOGSEARCH_CONF_DIR_LINK="$LOGSEARCH_ETC_FOLDER/conf" - -rm -f $LOGSEARCH_SCRIPT_LINK_NAME -rm -f $LOGSEARCH_CONF_DIR_LINK -rm -f $LOGSEARCH_ETC_FOLDER \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/preinstall.sh b/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/preinstall.sh deleted file mode 100644 index dfb8752a994..00000000000 --- a/ambari-logsearch/ambari-logsearch-assembly/src/main/package/rpm/portal/preinstall.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -LOGSEARCH_CONF="/etc/ambari-logsearch-portal/conf" -LOGSEARCH_CONF_BACKUP="/usr/lib/ambari-logsearch-portal/conf-old" - -if [ -d $LOGSEARCH_CONF_BACKUP ]; then - rm -rf $LOGSEARCH_CONF_BACKUP -fi - -if [ -d $LOGSEARCH_CONF ]; then - mkdir -p $LOGSEARCH_CONF_BACKUP - cp -r $LOGSEARCH_CONF/* $LOGSEARCH_CONF_BACKUP - rm -rf $LOGSEARCH_CONF -fi \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-api/.gitignore b/ambari-logsearch/ambari-logsearch-config-api/.gitignore deleted file mode 100644 index ae3c1726048..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/ambari-logsearch/ambari-logsearch-config-api/pom.xml b/ambari-logsearch/ambari-logsearch-config-api/pom.xml deleted file mode 100644 index 59286a6f6fe..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/pom.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-config-api - jar - Ambari Logsearch Config Api - http://maven.apache.org - - - UTF-8 - - - - - junit - junit - test - - - org.slf4j - slf4j-api - 1.7.20 - - - diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java deleted file mode 100644 index 746c14cebce..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.List; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; - -/** - * Monitors input configuration changes. - */ -public interface InputConfigMonitor { - /** - * @return A list of json strings for all the global config jsons. - */ - List getGlobalConfigJsons(); - - /** - * Notification of a new input configuration. - * - * @param serviceName The name of the service for which the input configuration was created. - * @param inputConfig The input configuration. - * @throws Exception - */ - void loadInputConfigs(String serviceName, InputConfig inputConfig) throws Exception; - - /** - * Notification of the removal of an input configuration. - * - * @param serviceName The name of the service of which's input configuration was removed. - */ - void removeInputs(String serviceName); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterManager.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterManager.java deleted file mode 100644 index 00df7bf2328..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterManager.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; - -public interface LogLevelFilterManager { - - /** - * Uploads the log level filter of a log. - * - * @param clusterName The name of the cluster where the log is. - * @param logId The id of the log. - * @param filter The log level filter for the log. - * @throws Exception - */ - void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception; - - /** - * Modifies the log level filters for all the logs. - * - * @param clusterName The name of the cluster where the logs are. - * @param filters The log level filters to set. - * @throws Exception - */ - void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception; - - /** - * Returns the Log Level Filters of a cluster. - * - * @param clusterName The name of the cluster which's log level filters are required. - * @return All the log level filters of the cluster. - */ - LogLevelFilterMap getLogLevelFilters(String clusterName); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java deleted file mode 100644 index 841f09e8043..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterMonitor.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Monitors log level filter changes. - */ -package org.apache.ambari.logsearch.config.api; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; - -import java.util.Map; - -public interface LogLevelFilterMonitor { - /** - * Notification of a new or updated log level filter. - * - * @param logId The log for which the log level filter was created/updated. - * @param logLevelFilter The log level filter to apply from now on to the log. - */ - void setLogLevelFilter(String logId, LogLevelFilter logLevelFilter); - - /** - * Notification of the removal of a log level filter. - * - * @param logId The log of which's log level filter was removed. - */ - void removeLogLevelFilter(String logId); - - /** - * Helper function to get all log level filters - */ - Map getLogLevelFilters(); - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterUpdater.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterUpdater.java deleted file mode 100644 index 425cdc806cc..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogLevelFilterUpdater.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Used for connect a remote source periodically to get / set log level filters. - */ -public abstract class LogLevelFilterUpdater extends Thread { - - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterUpdater.class); - - private final LogLevelFilterMonitor logLevelFilterMonitor; - private final int interval; - private boolean stop = false; - - public LogLevelFilterUpdater(String threadName, LogLevelFilterMonitor logLevelFilterMonitor, Integer interval) { - this.setName(threadName); - this.setDaemon(true); - this.logLevelFilterMonitor = logLevelFilterMonitor; - this.interval = interval == null ? 30 : interval; - } - - public LogLevelFilterMonitor getLogLevelFilterMonitor() { - return logLevelFilterMonitor; - } - - public void setStop(boolean stop) { - this.stop = stop; - } - - @Override - public void run() { - while (!Thread.currentThread().isInterrupted() || !stop) { - try { - Thread.sleep(1000 * interval); - checkFilters(logLevelFilterMonitor); - } catch (Exception e) { - LOG.error("Exception happened during log level filter check: {}", e); - } - } - } - - /** - * Periodically check filters from a source (and use log level filter monitor to create/update/delete it) - */ - protected abstract void checkFilters(final LogLevelFilterMonitor logLevelFilterMonitor); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java deleted file mode 100644 index 97eabdf5f43..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.io.Closeable; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; - -/** - * Log Search Configuration, which uploads, retrieves configurations, and monitors it's changes. - */ -public interface LogSearchConfig extends Closeable { - /** - * Uploads the input configuration for a service in a cluster. - * - * @param clusterName The name of the cluster where the service is. - * @param serviceName The name of the service of which's input configuration is uploaded. - * @param inputConfig The input configuration of the service. - * @throws Exception - */ - void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception; - - /** - * Get log level filter handler / manager - */ - LogLevelFilterManager getLogLevelFilterManager(); - - /** - * Set log level filter handler / manager - */ - void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java deleted file mode 100644 index 8b2c17d4686..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Factory class for LogSearchConfigServer and LogSearchConfigLogFeeder. - */ -public class LogSearchConfigFactory { - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigFactory.class); - - /** - * Creates a Log Search Configuration instance for the Log Search Server that implements - * {@link org.apache.ambari.logsearch.config.api.LogSearchConfigServer}. - * - * @param properties The properties of the component for which the configuration is created. If the properties contain the - * "logsearch.config.class" entry than the class defined there would be used instead of the default class. - * @param defaultClass The default configuration class to use if not specified otherwise. - * @param init initialize the properties and zookeeper client - * @return The Log Search Configuration instance. - * @throws Exception Throws exception if the defined class does not implement LogSearchConfigServer, or doesn't have an empty - * constructor, or throws an exception in it's init method. - */ - public static LogSearchConfigServer createLogSearchConfigServer(Map properties, - Class defaultClass, boolean init) throws Exception { - try { - LogSearchConfigServer logSearchConfig = null; - String configClassName = properties.get("logsearch.config.server.class"); - if (configClassName != null && !"".equals(configClassName.trim())) { - Class clazz = Class.forName(configClassName); - if (LogSearchConfigServer.class.isAssignableFrom(clazz)) { - logSearchConfig = (LogSearchConfigServer) clazz.newInstance(); - } else { - throw new IllegalArgumentException("Class " + configClassName + " does not implement the interface " + - LogSearchConfigServer.class.getName()); - } - } else { - logSearchConfig = defaultClass.newInstance(); - } - if (init) { - logSearchConfig.init(properties); - } - return logSearchConfig; - } catch (Exception e) { - LOG.error("Could not initialize logsearch config.", e); - throw e; - } - } - - /** - * Creates a Log Search Configuration instance for the Log Search Server that implements - * {@link org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder}. - * - * @param properties The properties of the component for which the configuration is created. If the properties contain the - * "logsearch.config.class" entry than the class defined there would be used instead of the default class. - * @param clusterName The name of the cluster. - * @param defaultClass The default configuration class to use if not specified otherwise. - * @param init initialize the properties and zookeeper client - * @return The Log Search Configuration instance. - * @throws Exception Throws exception if the defined class does not implement LogSearchConfigLogFeeder, or doesn't have an empty - * constructor, or throws an exception in it's init method. - */ - public static LogSearchConfigLogFeeder createLogSearchConfigLogFeeder(Map properties, String clusterName, - Class defaultClass, boolean init) throws Exception { - try { - LogSearchConfigLogFeeder logSearchConfig = null; - String configClassName = properties.get("logsearch.config.logfeeder.class"); - if (configClassName != null && !"".equals(configClassName.trim())) { - Class clazz = Class.forName(configClassName); - if (LogSearchConfig.class.isAssignableFrom(clazz)) { - logSearchConfig = (LogSearchConfigLogFeeder) clazz.newInstance(); - } else { - throw new IllegalArgumentException("Class " + configClassName + " does not implement the interface " + - LogSearchConfigLogFeeder.class.getName()); - } - } else { - logSearchConfig = defaultClass.newInstance(); - } - if (init) { - logSearchConfig.init(properties, clusterName == null ? "null" : clusterName.toLowerCase()); - } - return logSearchConfig; - } catch (Exception e) { - LOG.error("Could not initialize logsearch config.", e); - throw e; - } - } - - /** - * Creates a Log Search Configuration instance for the Log Search Server that implements - * {@link org.apache.ambari.logsearch.config.api.LogSearchConfigServer}. - * - * @param properties The properties of the component for which the configuration is created. If the properties contain the - * "logsearch.config.class" entry than the class defined there would be used instead of the default class. - * @param defaultClass The default configuration class to use if not specified otherwise. - * @return The Log Search Configuration instance. - * @throws Exception Throws exception if the defined class does not implement LogSearchConfigServer, or doesn't have an empty - * constructor, or throws an exception in it's init method. - */ - public static LogSearchConfigServer createLogSearchConfigServer(Map properties, - Class defaultClass) throws Exception { - return createLogSearchConfigServer(properties, defaultClass, true); - } - - /** - * Creates a Log Search Configuration instance for the Log Search Server that implements - * {@link org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder}. - * - * @param properties The properties of the component for which the configuration is created. If the properties contain the - * "logsearch.config.class" entry than the class defined there would be used instead of the default class. - * @param clusterName The name of the cluster. - * @param defaultClass The default configuration class to use if not specified otherwise. - * @return The Log Search Configuration instance. - * @throws Exception Throws exception if the defined class does not implement LogSearchConfigLogFeeder, or doesn't have an empty - * constructor, or throws an exception in it's init method. - */ - public static LogSearchConfigLogFeeder createLogSearchConfigLogFeeder(Map properties, String clusterName, - Class defaultClass) throws Exception { - return createLogSearchConfigLogFeeder(properties, clusterName, defaultClass, true); - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeeder.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeeder.java deleted file mode 100644 index af4408ac751..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeeder.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.Map; - -/** - * Log Search Configuration for Log Feeder. - */ -public interface LogSearchConfigLogFeeder extends LogSearchConfig { - /** - * Initialization of the configuration. - * - * @param properties The properties of that component. - * @param clusterName The name of the cluster. - * @throws Exception - */ - void init(Map properties, String clusterName) throws Exception; - - /** - * Checks if input configuration exists. - * - * @param serviceName The name of the service looked for. - * @return If input configuration exists for the service. - * @throws Exception - */ - boolean inputConfigExists(String serviceName) throws Exception; - - /** - * Starts the monitoring of the input configurations, asynchronously. - * - * @param inputConfigMonitor The input config monitor to call in case of an input config change. - * @param logLevelFilterMonitor The log level filter monitor to call in case of a log level filter change. - * @param clusterName The name of the cluster. - * @throws Exception - */ - void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor, - String clusterName) throws Exception; -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServer.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServer.java deleted file mode 100644 index d269c5ae32b..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServer.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; - -/** - * Log Search Configuration for Log Search Server. - */ -public interface LogSearchConfigServer extends LogSearchConfig { - /** - * Initialization of the configuration. - * - * @param properties The properties of that component. - * @throws Exception - */ - void init(Map properties) throws Exception; - - /** - * Returns all the service names with input configurations of a cluster. - * - * @param clusterName The name of the cluster which's services are required. - * @return List of the service names. - */ - List getServices(String clusterName); - - /** - * Checks if input configuration exists. - * - * @param clusterName The name of the cluster where the service is looked for. - * @param serviceName The name of the service looked for. - * @return If input configuration exists for the service. - * @throws Exception - */ - boolean inputConfigExists(String clusterName, String serviceName) throws Exception; - - /** - * Returns the global configurations of a cluster. - * - * @param clusterName The name of the cluster where the service is looked for. - * @return The global configurations of the cluster if it exists, null otherwise. - */ - String getGlobalConfigs(String clusterName); - - /** - * Modifies the input configuration for a service in a cluster. - * - * @param clusterName The name of the cluster where the service is. - * @param serviceName The name of the service of which's input configuration is uploaded. - * @param inputConfig The input configuration of the service. - * @throws Exception - */ - void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception; - - /** - * Returns the input configuration of a service in a cluster. - * - * @param clusterName The name of the cluster where the service is looked for. - * @param serviceName The name of the service looked for. - * @return The input configuration for the service if it exists, null otherwise. - */ - InputConfig getInputConfig(String clusterName, String serviceName); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchPropertyDescription.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchPropertyDescription.java deleted file mode 100644 index 330ef5cce88..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchPropertyDescription.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Marker for describe static application level properties (e.g.: logsearch.properties/logfeeder.properties) - * Can be used to generate documentation about the internal configs. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.FIELD, ElementType.METHOD}) -public @interface LogSearchPropertyDescription { - - /** - * Name of the property inside the application level property file. - */ - String name(); - - /** - * Describe what the property used for. - */ - String description(); - - /** - * An example value for the property. - */ - String[] examples(); - - /** - * Default value of the property, emtpy by default. - */ - String defaultValue() default ""; - - /** - * Name of the property files where the configurations located - */ - String[] sources(); - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/OutputConfigMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/OutputConfigMonitor.java deleted file mode 100644 index cad4a7d2b4e..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/OutputConfigMonitor.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -/** - * Monitors output configuration changes. - */ -public interface OutputConfigMonitor { - /** - * @return The destination of the output. - */ - String getDestination(); - - /** - * @return The type of the output logs. - */ - String getOutputType(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigElementDescription.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigElementDescription.java deleted file mode 100644 index d65bf8e01a9..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigElementDescription.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Marker for the shipper configuration properties. - * Can be used to generate documentation about the shipper configs. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.FIELD}) -public @interface ShipperConfigElementDescription { - - /** - * The path of the json element. - */ - String path(); - - /** - * The type of the json element. - */ - String type(); - - /** - * Describe what the json element is used for. - */ - String description(); - - /** - * An example value for the element, if applicable. - */ - String[] examples() default {}; - - /** - * Default value of the json element, if applicable. - */ - String defaultValue() default ""; - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigTypeDescription.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigTypeDescription.java deleted file mode 100644 index 1c112d84d86..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/ShipperConfigTypeDescription.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Marker for the shipper configuration types. - * Can be used to generate documentation about the shipper configs. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE}) -public @interface ShipperConfigTypeDescription { - - /** - * The name of the element type. - */ - String name(); - - /** - * The description of the json element. - */ - String description(); - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java deleted file mode 100644 index 4da400a9bd9..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Conditions.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface Conditions { - Fields getFields(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/CustomDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/CustomDescriptor.java deleted file mode 100644 index 481fb23737c..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/CustomDescriptor.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.Map; - -public interface CustomDescriptor { - Map getProperties(); - - void setProperties(Map properties); - - String getMapperClassName(); - - void setMapperClassName(String className); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java deleted file mode 100644 index 5d34b1ed556..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/Fields.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.Set; - -public interface Fields { - Set getType(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java deleted file mode 100644 index 632c6cb800c..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterDescriptor.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.List; -import java.util.Map; - -public interface FilterDescriptor { - String getFilter(); - - Conditions getConditions(); - - Integer getSortOrder(); - - String getSourceField(); - - Boolean isRemoveSourceField(); - - Map> getPostMapValues(); - - Boolean isEnabled(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java deleted file mode 100644 index 9fc8eb465fc..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterGrokDescriptor.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface FilterGrokDescriptor extends FilterDescriptor { - String getLog4jFormat(); - - String getMultilinePattern(); - - String getMessagePattern(); - - void setMultilinePattern(String multilinePattern); - - boolean isSkipOnError(); - - boolean isDeepExtract(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java deleted file mode 100644 index 08f1893e6b6..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterJsonDescriptor.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface FilterJsonDescriptor extends FilterDescriptor { -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java deleted file mode 100644 index 6edd140822c..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/FilterKeyValueDescriptor.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface FilterKeyValueDescriptor extends FilterDescriptor { - String getFieldSplit(); - - String getValueSplit(); - - String getValueBorders(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java deleted file mode 100644 index 8126ac9b586..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputConfig.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.List; - -public interface InputConfig { - List getInput(); - - List getFilter(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputCustomDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputCustomDescriptor.java deleted file mode 100644 index 54c5935a9b7..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputCustomDescriptor.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface InputCustomDescriptor extends InputDescriptor, CustomDescriptor { -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java deleted file mode 100644 index 7c00a6bd1f3..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputDescriptor.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.List; -import java.util.Map; - -public interface InputDescriptor { - String getType(); - - String getRowtype(); - - String getPath(); - - Map getAddFields(); - - String getSource(); - - Boolean isTail(); - - Boolean isGenEventMd5(); - - Boolean isUseEventMd5AsId(); - - Boolean isCacheEnabled(); - - String getCacheKeyField(); - - Boolean getCacheLastDedupEnabled(); - - Integer getCacheSize(); - - Long getCacheDedupInterval(); - - Boolean isEnabled(); - - String getGroup(); - - Boolean isInitDefaultFields(); - - List getDefaultLogLevels(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java deleted file mode 100644 index a393dc732ad..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileBaseDescriptor.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface InputFileBaseDescriptor extends InputDescriptor { - Boolean getProcessFile(); - - Boolean getCopyFile(); - - Integer getCheckpointIntervalMs(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java deleted file mode 100644 index 2689f8200c6..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputFileDescriptor.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface InputFileDescriptor extends InputFileBaseDescriptor { - Integer getDetachIntervalMin(); - - Integer getDetachTimeMin(); - - Integer getPathUpdateIntervalMin(); - - Integer getMaxAgeMin(); - - Boolean getDockerEnabled(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java deleted file mode 100644 index b075629cddc..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputS3FileDescriptor.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface InputS3FileDescriptor extends InputFileBaseDescriptor { - String getS3AccessKey(); - - String getS3SecretKey(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputSocketDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputSocketDescriptor.java deleted file mode 100644 index d89e9fcba7f..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/InputSocketDescriptor.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface InputSocketDescriptor extends InputDescriptor { - - Integer getPort(); - - String getProtocol(); - - Boolean isSecure(); - - Boolean isLog4j(); - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapAnonymizeDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapAnonymizeDescriptor.java deleted file mode 100644 index 253315592a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapAnonymizeDescriptor.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapAnonymizeDescriptor extends MapFieldDescriptor { - String getPattern(); - - Character getHideChar(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapCustomDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapCustomDescriptor.java deleted file mode 100644 index a6d4a426af6..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapCustomDescriptor.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapCustomDescriptor extends MapFieldDescriptor, CustomDescriptor { -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java deleted file mode 100644 index 985d2213f11..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapDateDescriptor.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapDateDescriptor extends MapFieldDescriptor { - String getSourceDatePattern(); - - String getTargetDatePattern(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java deleted file mode 100644 index 596c173f7f6..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldCopyDescriptor.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapFieldCopyDescriptor extends MapFieldDescriptor { - String getCopyName(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java deleted file mode 100644 index f5a2e359a72..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldDescriptor.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapFieldDescriptor { - String getJsonName(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java deleted file mode 100644 index da8cd0d603e..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldNameDescriptor.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapFieldNameDescriptor extends MapFieldDescriptor { - String getNewFieldName(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java deleted file mode 100644 index f0399588917..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/MapFieldValueDescriptor.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -public interface MapFieldValueDescriptor extends MapFieldDescriptor { - String getPreValue(); - - String getPostValue(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java deleted file mode 100644 index 5be72877cd1..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/inputconfig/PostMapValues.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api.model.inputconfig; - -import java.util.List; - -public interface PostMapValues { - List getMappers(); -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java deleted file mode 100644 index 06cf589a812..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilter.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.loglevelfilter; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -public class LogLevelFilter { - - private String label; - private List hosts; - private List defaultLevels; - private List overrideLevels; - private Date expiryTime; - - public LogLevelFilter() { - hosts = new ArrayList(); - defaultLevels = new ArrayList(); - overrideLevels = new ArrayList(); - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public List getHosts() { - return hosts; - } - - public void setHosts(List hosts) { - this.hosts = hosts; - } - - public List getDefaultLevels() { - return defaultLevels; - } - - public void setDefaultLevels(List defaultLevels) { - this.defaultLevels = defaultLevels; - } - - public List getOverrideLevels() { - return overrideLevels; - } - - public void setOverrideLevels(List overrideLevels) { - this.overrideLevels = overrideLevels; - } - - public Date getExpiryTime() { - return expiryTime; - } - - public void setExpiryTime(Date expiryTime) { - this.expiryTime = expiryTime; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java deleted file mode 100644 index 37fdb9f9a8b..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/model/loglevelfilter/LogLevelFilterMap.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.api.model.loglevelfilter; - -import java.util.TreeMap; - -public class LogLevelFilterMap { - private TreeMap filter; - - public TreeMap getFilter() { - return filter; - } - - public void setFilter(TreeMap filter) { - this.filter = filter; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java deleted file mode 100644 index d0db87f3ea5..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; - -import junit.framework.Assert; - -public class LogSearchConfigFactoryTest { - - @Test - public void testDefaultConfigServer() throws Exception { - LogSearchConfigServer config = LogSearchConfigFactory.createLogSearchConfigServer( Collections. emptyMap(), - LogSearchConfigServerClass1.class); - - Assert.assertSame(config.getClass(), LogSearchConfigServerClass1.class); - } - - @Test - public void testCustomConfigServer() throws Exception { - Map logsearchConfClassMap = new HashMap<>(); - logsearchConfClassMap.put("logsearch.config.server.class", "org.apache.ambari.logsearch.config.api.LogSearchConfigServerClass2"); - LogSearchConfig config = LogSearchConfigFactory.createLogSearchConfigServer(logsearchConfClassMap, - LogSearchConfigServerClass1.class); - - Assert.assertSame(config.getClass(), LogSearchConfigServerClass2.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testNonConfigClassServer() throws Exception { - Map logsearchConfClassMap = new HashMap<>(); - logsearchConfClassMap.put("logsearch.config.server.class", "org.apache.ambari.logsearch.config.api.NonLogSearchConfigClass"); - LogSearchConfigFactory.createLogSearchConfigServer(logsearchConfClassMap, LogSearchConfigServerClass1.class); - } - - @Test - public void testDefaultConfigLogFeeder() throws Exception { - LogSearchConfigLogFeeder config = LogSearchConfigFactory.createLogSearchConfigLogFeeder( Collections. emptyMap(), - null, LogSearchConfigLogFeederClass1.class); - - Assert.assertSame(config.getClass(), LogSearchConfigLogFeederClass1.class); - } - - @Test - public void testCustomConfigLogFeeder() throws Exception { - Map logsearchConfClassMap = new HashMap<>(); - logsearchConfClassMap.put("logsearch.config.logfeeder.class", "org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeederClass2"); - LogSearchConfigLogFeeder config = LogSearchConfigFactory.createLogSearchConfigLogFeeder(logsearchConfClassMap, null, - LogSearchConfigLogFeederClass1.class); - - Assert.assertSame(config.getClass(), LogSearchConfigLogFeederClass2.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testNonConfigClassLogFeeder() throws Exception { - Map logsearchConfClassMap = new HashMap<>(); - logsearchConfClassMap.put("logsearch.config.logfeeder.class", "org.apache.ambari.logsearch.config.api.NonLogSearchConfigClass"); - LogSearchConfigFactory.createLogSearchConfigLogFeeder(logsearchConfClassMap, null, LogSearchConfigLogFeederClass1.class); - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass1.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass1.java deleted file mode 100644 index 54b3ca04b97..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass1.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.Map; - -public class LogSearchConfigLogFeederClass1 implements LogSearchConfigLogFeeder { - @Override - public void init(Map properties, String clusterName) {} - - @Override - public boolean inputConfigExists(String serviceName) throws Exception { - return false; - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return null; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - - } - - @Override - public void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor, - String clusterName) throws Exception {} - - @Override - public void close() {} -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass2.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass2.java deleted file mode 100644 index ca23b22dc1a..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigLogFeederClass2.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.Map; - -public class LogSearchConfigLogFeederClass2 implements LogSearchConfigLogFeeder { - @Override - public void init(Map properties, String clusterName) {} - - @Override - public boolean inputConfigExists(String serviceName) throws Exception { - return false; - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return null; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - } - - @Override - public void monitorInputConfigChanges(InputConfigMonitor inputConfigMonitor, LogLevelFilterMonitor logLevelFilterMonitor, - String clusterName) throws Exception {} - - @Override - public void close() {} -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass1.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass1.java deleted file mode 100644 index a1aaa254526..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass1.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; - -public class LogSearchConfigServerClass1 implements LogSearchConfigServer { - @Override - public void init(Map properties) {} - - @Override - public boolean inputConfigExists(String clusterName, String serviceName) throws Exception { - return false; - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return null; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - - } - - @Override - public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public List getServices(String clusterName) { - return null; - } - - @Override - public String getGlobalConfigs(String clusterName) { - return null; - } - - @Override - public InputConfig getInputConfig(String clusterName, String serviceName) { - return null; - } - - @Override - public void close() {} -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass2.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass2.java deleted file mode 100644 index e94f9ceb2e3..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigServerClass2.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; - -public class LogSearchConfigServerClass2 implements LogSearchConfigServer { - @Override - public void init(Map properties) {} - - @Override - public boolean inputConfigExists(String clusterName, String serviceName) throws Exception { - return false; - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return null; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - } - - @Override - public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {} - - @Override - public List getServices(String clusterName) { - return null; - } - - @Override - public String getGlobalConfigs(String clusterName) { - return null; - } - - @Override - public InputConfig getInputConfig(String clusterName, String serviceName) { - return null; - } - - @Override - public void close() {} -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java deleted file mode 100644 index 9564f33a54a..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.api; - -public class NonLogSearchConfigClass { -} diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml deleted file mode 100644 index 6d968f98ad5..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-config-json/pom.xml b/ambari-logsearch/ambari-logsearch-config-json/pom.xml deleted file mode 100644 index cdccbbacae4..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/pom.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-config-json - jar - Ambari Logsearch Config JSON - http://maven.apache.org - - - UTF-8 - - - - - org.apache.ambari - ambari-logsearch-config-api - ${project.version} - - - org.apache.commons - commons-lang3 - 3.4 - - - commons-collections - commons-collections - 3.2.2 - - - org.slf4j - slf4j-api - 1.7.20 - - - com.google.code.gson - gson - 2.6.2 - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/JsonHelper.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/JsonHelper.java deleted file mode 100644 index 75fc71ef6fa..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/JsonHelper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.json; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; - -import java.util.Map; - -/** - * Utility class to help JSON operations. - */ -public class JsonHelper { - - private JsonHelper() { - } - - public static JsonElement mergeGlobalConfigWithInputConfig(JsonParser parser, String inputConfig, JsonArray globalConfigNode) { - JsonElement inputConfigJson = parser.parse(inputConfig); - for (Map.Entry typeEntry : inputConfigJson.getAsJsonObject().entrySet()) { - for (JsonElement e : typeEntry.getValue().getAsJsonArray()) { - for (JsonElement globalConfig : globalConfigNode) { - merge(globalConfig.getAsJsonObject(), e.getAsJsonObject()); - } - } - } - return inputConfigJson; - } - - public static void merge(JsonObject source, JsonObject target) { - for (Map.Entry e : source.entrySet()) { - if (!target.has(e.getKey())) { - target.add(e.getKey(), e.getValue()); - } else { - if (e.getValue().isJsonObject()) { - JsonObject valueJson = (JsonObject)e.getValue(); - merge(valueJson, target.get(e.getKey()).getAsJsonObject()); - } - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/ConditionsImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/ConditionsImpl.java deleted file mode 100644 index a6de00c3bdb..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/ConditionsImpl.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions; - -import com.google.gson.annotations.Expose; - -@ShipperConfigTypeDescription( - name = "Conditions", - description = "Describes the conditions that should be met in order to match a filter to an input element.\n" + - "\n" + - "It has the following attributes:" -) -public class ConditionsImpl implements Conditions { - @ShipperConfigElementDescription( - path = "/filter/[]/conditions/fields", - type = "json object", - description = "The fields in the input element of which's value should be met." - ) - @Expose - private FieldsImpl fields; - - public FieldsImpl getFields() { - return fields; - } - - public void setFields(FieldsImpl fields) { - this.fields = fields; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FieldsImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FieldsImpl.java deleted file mode 100644 index 426984a3b15..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FieldsImpl.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.util.Set; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields; - -import com.google.gson.annotations.Expose; - -@ShipperConfigTypeDescription( - name = "Fields", - description = "Describes a the fields which's value should be met in order to match a filter to an input element.\n" + - "\n" + - "It has the following attributes:" - ) -public class FieldsImpl implements Fields { - @ShipperConfigElementDescription( - path = "/filter/[]/conditions/fields/type", - type = "list of strings", - description = "The acceptable values for the type field in the input element.", - examples = {"ambari_server", "\"spark_jobhistory_server\", \"spark_thriftserver\", \"livy_server\""} - ) - @Expose - private Set type; - - public Set getType() { - return type; - } - - public void setType(Set type) { - this.type = type; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterAdapter.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterAdapter.java deleted file mode 100644 index fcc7876f704..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterAdapter.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.lang.reflect.Type; - -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonElement; - -public class FilterAdapter implements JsonDeserializer { - @Override - public FilterDescriptorImpl deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) { - switch (json.getAsJsonObject().get("filter").getAsString()) { - case "grok": - return (FilterDescriptorImpl)context.deserialize(json, FilterGrokDescriptorImpl.class); - case "keyvalue": - return (FilterDescriptorImpl)context.deserialize(json, FilterKeyValueDescriptorImpl.class); - case "json": - return (FilterDescriptorImpl)context.deserialize(json, FilterJsonDescriptorImpl.class); - default: - throw new IllegalArgumentException("Unknown filter type: " + json.getAsJsonObject().get("filter").getAsString()); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterDescriptorImpl.java deleted file mode 100644 index b40c139f043..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterDescriptorImpl.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Filter", - description = "The filter element in the [input configuration](inputConfig.md) contains a list of filter descriptions, each describing one filter applied on an input.\n" + - "\n" + - "The general elements in the json are the following:" -) -public abstract class FilterDescriptorImpl implements FilterDescriptor { - @ShipperConfigElementDescription( - path = "/filter/[]/filter", - type = "string", - description = "The type of the filter.", - examples = {"grok", "keyvalue", "json"} - ) - @Expose - private String filter; - - @ShipperConfigElementDescription( - path = "/filter/[]/conditions", - type = "json object", - description = "The conditions of which input to filter." - ) - @Expose - private ConditionsImpl conditions; - - @ShipperConfigElementDescription( - path = "/filter/[]/sort_order", - type = "integer", - description = "Describes the order in which the filters should be applied.", - examples = {"1", "3"} - ) - @Expose - @SerializedName("sort_order") - private Integer sortOrder; - - @ShipperConfigElementDescription( - path = "/filter/[]/source_field", - type = "integer", - description = "The source of the filter, must be set for keyvalue filters.", - examples = {"field_further_to_filter"}, - defaultValue = "log_message" - ) - @Expose - @SerializedName("source_field") - private String sourceField; - - @ShipperConfigElementDescription( - path = "/filter/[]/remove_source_field", - type = "boolean", - description = "Remove the source field after the filter is applied.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("remove_source_field") - private Boolean removeSourceField; - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values", - type = "dictionary string to list of json objects", - description = "Mappings done after the filtering provided it's result." - ) - @Expose - @SerializedName("post_map_values") - private Map> postMapValues; - - @ShipperConfigElementDescription( - path = "/filter/[]/is_enabled", - type = "boolean", - description = "A flag to show if the filter should be used.", - examples = {"true", "false"}, - defaultValue = "true" - ) - @Expose - @SerializedName("is_enabled") - private Boolean isEnabled; - - public String getFilter() { - return filter; - } - - public void setFilter(String filter) { - this.filter = filter; - } - - public ConditionsImpl getConditions() { - return conditions; - } - - public void setConditions(ConditionsImpl conditions) { - this.conditions = conditions; - } - - public Integer getSortOrder() { - return sortOrder; - } - - public void setSortOrder(Integer sortOrder) { - this.sortOrder = sortOrder; - } - - public String getSourceField() { - return sourceField; - } - - public void setSourceField(String sourceField) { - this.sourceField = sourceField; - } - - public Boolean isRemoveSourceField() { - return removeSourceField; - } - - public void setRemoveSourceField(Boolean removeSourceField) { - this.removeSourceField = removeSourceField; - } - - public Map> getPostMapValues() { - return postMapValues; - } - - public void setPostMapValues(Map> postMapValues) { - this.postMapValues = postMapValues; - } - - public Boolean isEnabled() { - return isEnabled; - } - - public void setIsEnabled(Boolean isEnabled) { - this.isEnabled = isEnabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterGrokDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterGrokDescriptorImpl.java deleted file mode 100644 index 520af97bff0..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterGrokDescriptorImpl.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Grok Filter", - description = "Grok filters have the following additional parameters:" -) -public class FilterGrokDescriptorImpl extends FilterDescriptorImpl implements FilterGrokDescriptor { - @ShipperConfigElementDescription( - path = "/filter/[]/log4j_format", - type = "string", - description = "The log4j pattern of the log, not used, it is only there for documentation.", - examples = {"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n"} - ) - @Expose - @SerializedName("log4j_format") - private String log4jFormat; - - @ShipperConfigElementDescription( - path = "/filter/[]/multiline_pattern", - type = "string", - description = "The grok pattern that shows that the line is not a log line on it's own but the part of a multi line entry.", - examples = {"^(%{TIMESTAMP_ISO8601:logtime})"} - ) - @Expose - @SerializedName("multiline_pattern") - private String multilinePattern; - - @ShipperConfigElementDescription( - path = "/filter/[]/message_pattern", - type = "string", - description = "The grok pattern to use to parse the log entry.", - examples = {"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}"} - ) - @Expose - @SerializedName("message_pattern") - private String messagePattern; - - @ShipperConfigElementDescription( - path = "/filter/[]/skip_on_error", - type = "boolean", - description = "Skip filter if an error occurred during applying the grok filter.", - examples = {"true"} - ) - @Expose - @SerializedName("skip_on_error") - private boolean skipOnError; - - @ShipperConfigElementDescription( - path = "/filter/[]/deep_extract", - type = "boolean", - description = "", - examples = {""} - ) - @Expose - @SerializedName("deep_extract") - private boolean deepExtract; - - @Override - public String getLog4jFormat() { - return log4jFormat; - } - - public void setLog4jFormat(String log4jFormat) { - this.log4jFormat = log4jFormat; - } - - @Override - public String getMultilinePattern() { - return multilinePattern; - } - - @Override - public void setMultilinePattern(String multilinePattern) { - this.multilinePattern = multilinePattern; - } - - @Override - public boolean isSkipOnError() { - return this.skipOnError; - } - - public void setSkipOnError(boolean skipOnError) { - this.skipOnError = skipOnError; - } - - @Override - public boolean isDeepExtract() { - return deepExtract; - } - - public void setDeepExtract(boolean deepExtract) { - this.deepExtract = deepExtract; - } - - @Override - public String getMessagePattern() { - return messagePattern; - } - - public void setMessagePattern(String messagePattern) { - this.messagePattern = messagePattern; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterJsonDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterJsonDescriptorImpl.java deleted file mode 100644 index 2dc8439ba97..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterJsonDescriptorImpl.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterJsonDescriptor; - -public class FilterJsonDescriptorImpl extends FilterDescriptorImpl implements FilterJsonDescriptor { -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java deleted file mode 100644 index b45f2a002c3..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/FilterKeyValueDescriptorImpl.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Key-value Filter", - description = "value_borders is only used if it is specified, and value_split is not.\n" + - "\n" + - "Key-value filters have the following additional parameters:" -) -public class FilterKeyValueDescriptorImpl extends FilterDescriptorImpl implements FilterKeyValueDescriptor { - @ShipperConfigElementDescription( - path = "/filter/[]/field_split", - type = "string", - description = "The string that splits the key-value pairs.", - examples = {" ", ","}, - defaultValue = "\\t" - ) - @Expose - @SerializedName("field_split") - private String fieldSplit; - - @ShipperConfigElementDescription( - path = "/filter/[]/value_split", - type = "string", - description = "The string that separates keys from values.", - examples = {":", "->"}, - defaultValue = "=" - ) - @Expose - @SerializedName("value_split") - private String valueSplit; - - @ShipperConfigElementDescription( - path = "/filter/[]/value_borders", - type = "string", - description = "The borders around the value, must be 2 characters long, first before it, second after it.", - examples = {"()", "[]", "{}"} - ) - @Expose - @SerializedName("value_borders") - private String valueBorders; - - public String getFieldSplit() { - return fieldSplit; - } - - public void setFieldSplit(String fieldSplit) { - this.fieldSplit = fieldSplit; - } - - public String getValueSplit() { - return valueSplit; - } - - public void setValueSplit(String valueSplit) { - this.valueSplit = valueSplit; - } - - public String getValueBorders() { - return valueBorders; - } - - public void setValueBorders(String valueBorders) { - this.valueBorders = valueBorders; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputAdapter.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputAdapter.java deleted file mode 100644 index d0a40929633..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputAdapter.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.lang.reflect.Type; - -import com.google.gson.JsonArray; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonElement; - -public class InputAdapter implements JsonDeserializer { - private static JsonArray globalConfigs; - public static void setGlobalConfigs(JsonArray globalConfigs_) { - globalConfigs = globalConfigs_; - } - - @Override - public InputDescriptorImpl deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) { - String source = null; - if (json.getAsJsonObject().has("source")) { - source = json.getAsJsonObject().get("source").getAsString(); - } else { - for (JsonElement e : globalConfigs) { - if (e.getAsJsonObject().has("source")) { - source = e.getAsJsonObject().get("source").getAsString(); - break; - } - } - } - - switch (source) { - case "file": - return (InputDescriptorImpl)context.deserialize(json, InputFileDescriptorImpl.class); - case "s3_file": - return (InputDescriptorImpl)context.deserialize(json, InputS3FileDescriptorImpl.class); - case "socket": - return (InputDescriptorImpl)context.deserialize(json, InputSocketDescriptorImpl.class); - case "custom": - return (InputDescriptorImpl)context.deserialize(json, InputCustomDescriptorImpl.class); - default: - throw new IllegalArgumentException("Unknown input type: " + json.getAsJsonObject().get("source").getAsString()); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigGson.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigGson.java deleted file mode 100644 index 78d3db324c8..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigGson.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.lang.reflect.Type; -import java.util.List; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.reflect.TypeToken; - -/** - * Helper class to convert between json string and InputConfig class. - */ -public class InputConfigGson { - public static Gson gson; - static { - Type inputType = new TypeToken() {}.getType(); - Type filterType = new TypeToken() {}.getType(); - Type postMapValuesType = new TypeToken>() {}.getType(); - gson = new GsonBuilder() - .registerTypeAdapter(inputType, new InputAdapter()) - .registerTypeAdapter(filterType, new FilterAdapter()) - .registerTypeAdapter(postMapValuesType, new PostMapValuesAdapter()) - .setPrettyPrinting() - .excludeFieldsWithoutExposeAnnotation() - .create(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigImpl.java deleted file mode 100644 index 56660af3ff8..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputConfigImpl.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.util.List; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; - -import com.google.gson.annotations.Expose; - -@ShipperConfigTypeDescription( - name = "Input Config", - description = "The input configurations are stored in json files. Each of them are describing the processing of the log files of a service.\n" + - "\n" + - "The json contains two elements:" -) -public class InputConfigImpl implements InputConfig { - @ShipperConfigElementDescription( - path = "/input", - type = "list of json objects", - description = "A list of input descriptions" - ) - @Expose - private List input; - - @ShipperConfigElementDescription( - path = "/filter", - type = "list of json objects", - description = "A list of filter descriptions" - ) - @Expose - private List filter; - - @Override - public List getInput() { - return input; - } - - public void setInput(List input) { - this.input = input; - } - - @Override - public List getFilter() { - return filter; - } - - public void setFilter(List filter) { - this.filter = filter; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputCustomDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputCustomDescriptorImpl.java deleted file mode 100644 index cc9a36fc049..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputCustomDescriptorImpl.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputCustomDescriptor; - -import java.util.Map; - -public class InputCustomDescriptorImpl extends InputDescriptorImpl implements InputCustomDescriptor { - - @ShipperConfigElementDescription( - path = "/input/[]/properties", - type = "map", - description = "Custom key value pairs", - examples = {"{k1 : v1, k2: v2}"}, - defaultValue = "" - ) - @Expose - @SerializedName("properties") - private Map properties; - - @ShipperConfigElementDescription( - path = "/input/[]/class_name", - type = "string", - description = "Custom class which implements an input type", - examples = {"org.example.MyInputSource"}, - defaultValue = "" - ) - @Expose - @SerializedName("class") - private String mapperClassName; - - @Override - public Map getProperties() { - return this.properties; - } - - @Override - public String getMapperClassName() { - return this.mapperClassName; - } - - @Override - public void setProperties(Map properties) { - this.properties = properties; - } - - @Override - public void setMapperClassName(String className) { - this.mapperClassName = className; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputDescriptorImpl.java deleted file mode 100644 index d175c586253..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputDescriptorImpl.java +++ /dev/null @@ -1,352 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Input", - description = "The input element in the input configuration contains a list of input descriptions, each describing one source of input.\n" + - "\n" + - "The general elements in the json are the following:" -) -public abstract class InputDescriptorImpl implements InputDescriptor { - @ShipperConfigElementDescription( - path = "/input/[]/type", - type = "string", - description = "The log id for this source.", - examples = {"zookeeper", "ambari_server"} - ) - @Expose - private String type; - - @ShipperConfigElementDescription( - path = "/input/[]/rowtype", - type = "string", - description = "The type of the row.", - examples = {"service", "audit"} - ) - @Expose - private String rowtype; - - @ShipperConfigElementDescription( - path = "/input/[]/group", - type = "string", - description = "Group of the input type.", - examples = {"Ambari", "Yarn"} - ) - @Expose - private String group; - - @ShipperConfigElementDescription( - path = "/input/[]/path", - type = "string", - description = "The path of the source, may contain '*' characters too.", - examples = {"/var/log/ambari-logsearch-logfeeder/logsearch-logfeeder.json", "/var/log/zookeeper/zookeeper*.log"} - ) - @Expose - private String path; - - @ShipperConfigElementDescription( - path = "/input/[]/add_fields", - type = "dictionary", - description = "The element contains field_name: field_value pairs which will be added to each rows data.", - examples = {"\"cluster\":\"cluster_name\""} - ) - @Expose - @SerializedName("add_fields") - private Map addFields; - - @ShipperConfigElementDescription( - path = "/input/[]/source", - type = "dictionary", - description = "The type of the input source.", - examples = {"file", "s3_file"} - ) - @Expose - private String source; - - @ShipperConfigElementDescription( - path = "/input/[]/tail", - type = "boolean", - description = "The input should check for only the latest file matching the pattern, not all of them.", - examples = {"true", "false"}, - defaultValue = "true" - ) - @Expose - private Boolean tail; - - @ShipperConfigElementDescription( - path = "/input/[]/gen_event_md5", - type = "boolean", - description = "Generate an event_md5 field for each row by creating a hash of the row data.", - examples = {"true", "false"}, - defaultValue = "true" - ) - @Expose - @SerializedName("gen_event_md5") - private Boolean genEventMd5; - - @ShipperConfigElementDescription( - path = "/input/[]/use_event_md5_as_id", - type = "boolean", - description = "Generate an id for each row by creating a hash of the row data.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("use_event_md5_as_id") - private Boolean useEventMd5AsId; - - @ShipperConfigElementDescription( - path = "/input/[]/cache_enabled", - type = "boolean", - description = "Allows the input to use a cache to filter out duplications.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("cache_enabled") - private Boolean cacheEnabled; - - @ShipperConfigElementDescription( - path = "/input/[]/cache_key_field", - type = "string", - description = "Specifies the field for which to use the cache to find duplications of.", - examples = {"some_field_prone_to_repeating_value"}, - defaultValue = "log_message" - ) - @Expose - @SerializedName("cache_key_field") - private String cacheKeyField; - - @ShipperConfigElementDescription( - path = "/input/[]/cache_last_dedup_enabled", - type = "boolean", - description = "Allow to filter out entries which are same as the most recent one irrelevant of it's time.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("cache_last_dedup_enabled") - private Boolean cacheLastDedupEnabled; - - @ShipperConfigElementDescription( - path = "/input/[]/cache_size", - type = "integer", - description = "The number of entries to store in the cache.", - examples = {"50"}, - defaultValue = "100" - ) - @Expose - @SerializedName("cache_size") - private Integer cacheSize; - - @ShipperConfigElementDescription( - path = "/input/[]/cache_dedup_interval", - type = "integer", - description = "The maximum interval in ms which may pass between two identical log messages to filter the latter out.", - examples = {"500"}, - defaultValue = "1000" - ) - @Expose - @SerializedName("cache_dedup_interval") - private Long cacheDedupInterval; - - @ShipperConfigElementDescription( - path = "/input/[]/is_enabled", - type = "boolean", - description = "A flag to show if the input should be used.", - examples = {"true", "false"}, - defaultValue = "true" - ) - @Expose - @SerializedName("is_enabled") - private Boolean isEnabled; - - - @ShipperConfigElementDescription( - path = "/input/[]/init_default_fields", - type = "boolean", - description = "Init default fields (ip, path etc.) before applying the filter.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("init_default_fields") - private Boolean initDefaultFields; - - @ShipperConfigElementDescription( - path = "/input/[]/default_log_levels", - type = "list of strings", - description = "Use these as default log levels for the input - overrides the global default log levels.", - examples = {"default_log_levels: [\"INFO\", \"WARN\"]"} - ) - @Expose - @SerializedName("default_log_levels") - private List defaultLogLevels; - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getRowtype() { - return rowtype; - } - - public void setRowtype(String rowType) { - this.rowtype = rowType; - } - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } - - public Map getAddFields() { - return addFields; - } - - public void setAddFields(Map addFields) { - this.addFields = addFields; - } - - public String getSource() { - return source; - } - - public void setSource(String source) { - this.source = source; - } - - public Boolean isTail() { - return tail; - } - - public void setTail(Boolean tail) { - this.tail = tail; - } - - public Boolean isGenEventMd5() { - return genEventMd5; - } - - public void setGenEventMd5(Boolean genEventMd5) { - this.genEventMd5 = genEventMd5; - } - - public Boolean isUseEventMd5AsId() { - return useEventMd5AsId; - } - - public void setUseEventMd5AsId(Boolean useEventMd5AsId) { - this.useEventMd5AsId = useEventMd5AsId; - } - - public Boolean isCacheEnabled() { - return cacheEnabled; - } - - public void setCacheEnabled(Boolean cacheEnabled) { - this.cacheEnabled = cacheEnabled; - } - - public String getCacheKeyField() { - return cacheKeyField; - } - - public void setCacheKeyField(String cacheKeyField) { - this.cacheKeyField = cacheKeyField; - } - - public Boolean getCacheLastDedupEnabled() { - return cacheLastDedupEnabled; - } - - public void setCacheLastDedupEnabled(Boolean cacheLastDedupEnabled) { - this.cacheLastDedupEnabled = cacheLastDedupEnabled; - } - - public Integer getCacheSize() { - return cacheSize; - } - - public void setCacheSize(Integer cacheSize) { - this.cacheSize = cacheSize; - } - - public Long getCacheDedupInterval() { - return cacheDedupInterval; - } - - public void setCacheDedupInterval(Long cacheDedupInterval) { - this.cacheDedupInterval = cacheDedupInterval; - } - - public Boolean isEnabled() { - return isEnabled; - } - - public void setIsEnabled(Boolean isEnabled) { - this.isEnabled = isEnabled; - } - - @Override - public String getGroup() { - return this.group; - } - - public void setGroup(String group) { - this.group = group; - } - - @Override - public Boolean isInitDefaultFields() { - return this.initDefaultFields; - } - - public void setInitDefaultFields(Boolean initDefaultFields) { - this.initDefaultFields = initDefaultFields; - } - - @Override - public List getDefaultLogLevels() { - return defaultLogLevels; - } - - public void setDefaultLogLevels(List defaultLogLevels) { - this.defaultLogLevels = defaultLogLevels; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileBaseDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileBaseDescriptorImpl.java deleted file mode 100644 index dca47f8f2f4..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileBaseDescriptorImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "File Input", - description = "File inputs have some additional parameters:" -) -public class InputFileBaseDescriptorImpl extends InputDescriptorImpl implements InputFileBaseDescriptor { - @ShipperConfigElementDescription( - path = "/input/[]/checkpoint_interval_ms", - type = "integer", - description = "The time interval in ms when the checkpoint file should be updated.", - examples = {"10000"}, - defaultValue = "5000" - ) - @Expose - @SerializedName("checkpoint_interval_ms") - private Integer checkpointIntervalMs; - - @ShipperConfigElementDescription( - path = "/input/[]/process_file", - type = "boolean", - description = "Should the file be processed.", - examples = {"true", "false"}, - defaultValue = "true" - ) - @Expose - @SerializedName("process_file") - private Boolean processFile; - - @ShipperConfigElementDescription( - path = "/input/[]/copy_file", - type = "boolean", - description = "Should the file be copied (only if not processed).", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("copy_file") - private Boolean copyFile; - - @Override - public Boolean getProcessFile() { - return processFile; - } - - public void setProcessFile(Boolean processFile) { - this.processFile = processFile; - } - - @Override - public Boolean getCopyFile() { - return copyFile; - } - - public void setCopyFile(Boolean copyFile) { - this.copyFile = copyFile; - } - - @Override - public Integer getCheckpointIntervalMs() { - return checkpointIntervalMs; - } - - public void setCheckpointIntervalMs(Integer checkpointIntervalMs) { - this.checkpointIntervalMs = checkpointIntervalMs; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileDescriptorImpl.java deleted file mode 100644 index a0613d8a9f3..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputFileDescriptorImpl.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor; - -public class InputFileDescriptorImpl extends InputFileBaseDescriptorImpl implements InputFileDescriptor { - - @ShipperConfigElementDescription( - path = "/input/[]/detach_interval_min", - type = "integer", - description = "The period in minutes for checking which files are too old (default: 300)", - examples = {"60"}, - defaultValue = "1800" - ) - @Expose - @SerializedName("detach_interval_min") - private Integer detachIntervalMin; - - @ShipperConfigElementDescription( - path = "/input/[]/detach_time_min", - type = "integer", - description = "The period in minutes when the application flags a file is too old (default: 2000)", - examples = {"60"}, - defaultValue = "2000" - ) - @Expose - @SerializedName("detach_time_min") - private Integer detachTimeMin; - - @ShipperConfigElementDescription( - path = "/input/[]/path_update_interval_min", - type = "integer", - description = "The period in minutes for checking new files (default: 5, based on detach values, its possible that a new input wont be monitored)", - examples = {"5"}, - defaultValue = "5" - ) - @Expose - @SerializedName("path_update_interval_min") - private Integer pathUpdateIntervalMin; - - @ShipperConfigElementDescription( - path = "/input/[]/max_age_min", - type = "integer", - description = "If the file has not modified for long (this time value in minutes), then the checkpoint file can be deleted.", - examples = {"2000"}, - defaultValue = "0" - ) - @Expose - @SerializedName("max_age_min") - private Integer maxAgeMin; - - @ShipperConfigElementDescription( - path = "/input/[]/docker", - type = "boolean", - description = "Input comes from a docker container.", - examples = {"true", "false"}, - defaultValue = "false" - ) - @Expose - @SerializedName("docker") - private Boolean dockerEnabled; - - @Override - public Integer getDetachIntervalMin() { - return this.detachIntervalMin; - } - - @Override - public Integer getDetachTimeMin() { - return this.detachTimeMin; - } - - @Override - public Integer getPathUpdateIntervalMin() { - return this.pathUpdateIntervalMin; - } - - @Override - public Integer getMaxAgeMin() { - return this.maxAgeMin; - } - - @Override - public Boolean getDockerEnabled() { - return dockerEnabled; - } - - public void setDetachIntervalMin(Integer detachIntervalMin) { - this.detachIntervalMin = detachIntervalMin; - } - - public void setDetachTimeMin(Integer detachTimeMin) { - this.detachTimeMin = detachTimeMin; - } - - public void setPathUpdateIntervalMin(Integer pathUpdateIntervalMin) { - this.pathUpdateIntervalMin = pathUpdateIntervalMin; - } - - public void setMaxAgeMin(Integer maxAgeMin) { - this.maxAgeMin = maxAgeMin; - } - - public void setDockerEnabled(Boolean dockerEnabled) { - this.dockerEnabled = dockerEnabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputS3FileDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputS3FileDescriptorImpl.java deleted file mode 100644 index 527dae81882..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputS3FileDescriptorImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "S3 File Input", - description = "S3 file inputs have the following parameters in addition to the general file parameters:" -) -public class InputS3FileDescriptorImpl extends InputFileBaseDescriptorImpl implements InputS3FileDescriptor { - @ShipperConfigElementDescription( - path = "/input/[]/s3_access_key", - type = "string", - description = "The access key used for AWS credentials." - ) - @Expose - @SerializedName("s3_access_key") - private String s3AccessKey; - - @ShipperConfigElementDescription( - path = "/input/[]/s3_secret_key", - type = "string", - description = "The secret key used for AWS credentials." - ) - @Expose - @SerializedName("s3_secret_key") - private String s3SecretKey; - - @Override - public String getS3AccessKey() { - return s3AccessKey; - } - - public void setS3AccessKey(String s3AccessKey) { - this.s3AccessKey = s3AccessKey; - } - - @Override - public String getS3SecretKey() { - return s3SecretKey; - } - - public void setS3SecretKey(String s3SecretKey) { - this.s3SecretKey = s3SecretKey; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputSocketDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputSocketDescriptorImpl.java deleted file mode 100644 index 69b00684234..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/InputSocketDescriptorImpl.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputSocketDescriptor; - -@ShipperConfigTypeDescription( - name = "Socket Input", - description = "Socket (TCP/UDP) inputs have the following parameters in addition to the general parameters:" -) -public class InputSocketDescriptorImpl extends InputDescriptorImpl implements InputSocketDescriptor { - - @ShipperConfigElementDescription( - path = "/input/[]/port", - type = "int", - description = "Unique port for specific socket input", - examples = {"61999"} - ) - @Expose - @SerializedName("port") - private Integer port; - - @ShipperConfigElementDescription( - path = "/input/[]/protocol", - type = "int", - description = "Protocol type for socket server (tcp / udp - udp is not supported right now)", - examples = {"udp", "tcp"}, - defaultValue = "tcp" - ) - @Expose - @SerializedName("protocol") - private String protocol; - - @ShipperConfigElementDescription( - path = "/input/[]/secure", - type = "boolean", - description = "Use SSL", - examples = {"true"}, - defaultValue = "false" - ) - @Expose - @SerializedName("secure") - private Boolean secure; - - @ShipperConfigElementDescription( - path = "/input/[]/log4j", - type = "boolean", - description = "Use Log4j serialized objects (e.g.: SocketAppender)", - examples = {"true"}, - defaultValue = "false" - ) - @Expose - @SerializedName("log4j") - private Boolean log4j; - - @Override - public Integer getPort() { - return this.port; - } - - @Override - public String getProtocol() { - return this.protocol; - } - - @Override - public Boolean isSecure() { - return this.secure; - } - - @Override - public Boolean isLog4j() { - return this.log4j; - } - - public void setPort(Integer port) { - this.port = port; - } - - public void setProtocol(String protocol) { - this.protocol = protocol; - } - - public void setSecure(Boolean secure) { - this.secure = secure; - } - - public void setLog4j(Boolean log4j) { - this.log4j = log4j; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapAnonymizeDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapAnonymizeDescriptorImpl.java deleted file mode 100644 index 4b67d5f1dfb..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapAnonymizeDescriptorImpl.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapAnonymizeDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Map Anonymize", - description = "The name of the mapping element should be map_anonymize. The value json element should contain the following parameter:" -) -public class MapAnonymizeDescriptorImpl extends MapFieldDescriptorImpl implements MapAnonymizeDescriptor { - @Override - public String getJsonName() { - return "map_anonymize"; - } - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_anonymize/pattern", - type = "string", - description = "The pattern to use to identify parts to anonymize. The parts to hide should be marked with the \"\" string.", - examples = {"Some secret is here: , and another one is here: "} - ) - @Expose - private String pattern; - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_anonymize/hide_char", - type = "string", - description = "The character to hide with", - defaultValue = "*", - examples = {"X", "-"} - ) - @Expose - @SerializedName("hide_char") - private Character hideChar; - - @Override - public String getPattern() { - return pattern; - } - - public void setPattern(String pattern) { - this.pattern = pattern; - } - - @Override - public Character getHideChar() { - return hideChar; - } - - public void setHideChar(Character hideChar) { - this.hideChar = hideChar; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapCustomDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapCustomDescriptorImpl.java deleted file mode 100644 index 34abdb9804e..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapCustomDescriptorImpl.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapCustomDescriptor; - -import java.util.Map; - -@ShipperConfigTypeDescription( - name = "Map Custom", - description = "The name of the mapping element should be map_custom. The value json element may contain the following parameters:" -) -public class MapCustomDescriptorImpl implements MapCustomDescriptor { - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_custom/properties", - type = "map", - description = "Custom key value pairs", - examples = {"{k1 : v1, k2: v2}"}, - defaultValue = "" - ) - @Expose - @SerializedName("properties") - private Map properties; - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_custom/class_name", - type = "string", - description = "Custom class which implements a mapper type", - examples = {"org.example.MyMapper"}, - defaultValue = "" - ) - @Expose - @SerializedName("class") - private String mapperClassName; - - @Override - public Map getProperties() { - return this.properties; - } - - @Override - public String getMapperClassName() { - return this.mapperClassName; - } - - @Override - public String getJsonName() { - return "map_custom"; - } - - @Override - public void setProperties(Map properties) { - this.properties = properties; - } - - @Override - public void setMapperClassName(String className) { - this.mapperClassName = className; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapDateDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapDateDescriptorImpl.java deleted file mode 100644 index 13869cad83c..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapDateDescriptorImpl.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapDateDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Map Date", - description = "The name of the mapping element should be map_date. The value json element may contain the following parameters:" -) -public class MapDateDescriptorImpl extends MapFieldDescriptorImpl implements MapDateDescriptor { - @Override - public String getJsonName() { - return "map_date"; - } - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_date/src_date_pattern", - type = "string", - description = "If it is specified than the mapper converts from this format to the target, and also adds missing year", - examples = {"MMM dd HH:mm:ss"} - ) - @Expose - @SerializedName("src_date_pattern") - private String sourceDatePattern; - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_date/target_date_pattern", - type = "string", - description = "If 'epoch' then the field is parsed as seconds from 1970, otherwise the content used as pattern", - examples = {"yyyy-MM-dd HH:mm:ss,SSS", "epoch"} - ) - @Expose - @SerializedName("target_date_pattern") - private String targetDatePattern; - - @Override - public String getSourceDatePattern() { - return sourceDatePattern; - } - - public void setSourceDatePattern(String sourceDatePattern) { - this.sourceDatePattern = sourceDatePattern; - } - - @Override - public String getTargetDatePattern() { - return targetDatePattern; - } - - public void setTargetDatePattern(String targetDatePattern) { - this.targetDatePattern = targetDatePattern; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java deleted file mode 100644 index fa4c43661b1..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldCopyDescriptorImpl.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldCopyDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Map Copy", - description = "The name of the mapping element should be map_copy. The value json element should contain the following parameter:" -) -public class MapFieldCopyDescriptorImpl extends MapFieldDescriptorImpl implements MapFieldCopyDescriptor { - @Override - public String getJsonName() { - return "map_field_copy"; - } - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_copy/copy_name", - type = "string", - description = "The name of the copied field", - examples = {"new_name"} - ) - @Expose - @SerializedName("copy_name") - private String copyName; - - @Override - public String getCopyName() { - return copyName; - } - - public void setCopyName(String copyName) { - this.copyName = copyName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldDescriptorImpl.java deleted file mode 100644 index 4581d625953..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldDescriptorImpl.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; - -@ShipperConfigTypeDescription( - name = "Post Map Values", - description = "The Post Map Values element in the [filter](filter.md) field names as keys, the values are lists of sets of " + - "post map values, each describing one mapping done on a field named before obtained after filtering.\n" + - "\n" + - "Currently there are four kind of mappings are supported:" - ) -public abstract class MapFieldDescriptorImpl implements MapFieldDescriptor { -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldNameDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldNameDescriptorImpl.java deleted file mode 100644 index 5c994c93b3a..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldNameDescriptorImpl.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldNameDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Map Field Name", - description = "The name of the mapping element should be map_field_name. The value json element should contain the following parameter:" -) -public class MapFieldNameDescriptorImpl extends MapFieldDescriptorImpl implements MapFieldNameDescriptor { - @Override - public String getJsonName() { - return "map_field_name"; - } - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_field_name/new_field_name", - type = "string", - description = "The name of the renamed field", - examples = {"new_name"} - ) - @Expose - @SerializedName("new_field_name") - private String newFieldName; - - @Override - public String getNewFieldName() { - return newFieldName; - } - - public void setNewFieldName(String newFieldName) { - this.newFieldName = newFieldName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldValueDescriptorImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldValueDescriptorImpl.java deleted file mode 100644 index 8e850f271e0..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/MapFieldValueDescriptorImpl.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.config.api.ShipperConfigTypeDescription; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldValueDescriptor; - -import com.google.gson.annotations.Expose; -import com.google.gson.annotations.SerializedName; - -@ShipperConfigTypeDescription( - name = "Map Field Value", - description = "The name of the mapping element should be map_field_value. The value json element should contain the following parameter:" -) -public class MapFieldValueDescriptorImpl extends MapFieldDescriptorImpl implements MapFieldValueDescriptor { - @Override - public String getJsonName() { - return "map_field_value"; - } - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_field_value/pre_value", - type = "string", - description = "The value that the field must match (ignoring case) to be mapped", - examples = {"old_value"} - ) - @Expose - @SerializedName("pre_value") - private String preValue; - - @ShipperConfigElementDescription( - path = "/filter/[]/post_map_values/{field_name}/[]/map_field_value/post_value", - type = "string", - description = "The value to which the field is modified to", - examples = {"new_value"} - ) - @Expose - @SerializedName("post_value") - private String postValue; - - @Override - public String getPreValue() { - return preValue; - } - - public void setPreValue(String preValue) { - this.preValue = preValue; - } - - @Override - public String getPostValue() { - return postValue; - } - - public void setPostValue(String postValue) { - this.postValue = postValue; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesAdapter.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesAdapter.java deleted file mode 100644 index 50ce358b227..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesAdapter.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; - -import com.google.gson.JsonArray; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonSerializer; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonSerializationContext; - -public class PostMapValuesAdapter implements JsonDeserializer>, JsonSerializer> { - @Override - public List deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) { - List vals = new ArrayList<>(); - if (json.isJsonArray()) { - for (JsonElement e : json.getAsJsonArray()) { - vals.add(createPostMapValues(e, context)); - } - } else if (json.isJsonObject()) { - vals.add(createPostMapValues(json, context)); - } else { - throw new RuntimeException("Unexpected JSON type: " + json.getClass()); - } - return vals; - } - - private PostMapValuesImpl createPostMapValues(JsonElement e, JsonDeserializationContext context) { - List mappers = new ArrayList<>(); - for (Map.Entry m : e.getAsJsonObject().entrySet()) { - switch (m.getKey()) { - case "map_date": - mappers.add(context.deserialize(m.getValue(), MapDateDescriptorImpl.class)); - break; - case "map_field_copy": - mappers.add(context.deserialize(m.getValue(), MapFieldCopyDescriptorImpl.class)); - break; - case "map_field_name": - mappers.add(context.deserialize(m.getValue(), MapFieldNameDescriptorImpl.class)); - break; - case "map_field_value": - mappers.add(context.deserialize(m.getValue(), MapFieldValueDescriptorImpl.class)); - break; - case "map_anonymize": - mappers.add(context.deserialize(m.getValue(), MapAnonymizeDescriptorImpl.class)); - break; - case "map_custom": - mappers.add(context.deserialize(m.getValue(), MapCustomDescriptorImpl.class)); - default: - System.out.println("Unknown key: " + m.getKey()); - } - } - - PostMapValuesImpl postMapValues = new PostMapValuesImpl(); - postMapValues.setMappers(mappers); - return postMapValues; - } - - @Override - public JsonElement serialize(List src, Type typeOfSrc, JsonSerializationContext context) { - if (src.size() == 1) { - return createMapperObject(src.get(0), context); - } else { - JsonArray jsonArray = new JsonArray(); - for (PostMapValuesImpl postMapValues : src) { - jsonArray.add(createMapperObject(postMapValues, context)); - } - return jsonArray; - } - } - - private JsonElement createMapperObject(PostMapValuesImpl postMapValues, JsonSerializationContext context) { - JsonObject jsonObject = new JsonObject(); - for (MapFieldDescriptor m : postMapValues.getMappers()) { - jsonObject.add(m.getJsonName(), context.serialize(m)); - } - return jsonObject; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesImpl.java b/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesImpl.java deleted file mode 100644 index 6d6e29fd043..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-json/src/main/java/org/apache/ambari/logsearch/config/json/model/inputconfig/impl/PostMapValuesImpl.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.json.model.inputconfig.impl; - -import java.util.List; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; - -import com.google.gson.annotations.Expose; - -public class PostMapValuesImpl implements PostMapValues { - @Expose - private List mappers; - - public List getMappers() { - return mappers; - } - - public void setMappers(List mappers) { - this.mappers = mappers; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-local/pom.xml b/ambari-logsearch/ambari-logsearch-config-local/pom.xml deleted file mode 100644 index 7e50f7c875b..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/pom.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-config-local - jar - Ambari Logsearch Config Local - http://maven.apache.org - - - UTF-8 - - - - - org.apache.ambari - ambari-logsearch-config-json - ${project.version} - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogLevelFilterManagerLocal.java b/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogLevelFilterManagerLocal.java deleted file mode 100644 index 5ca210d9169..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogLevelFilterManagerLocal.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.local; - -import com.google.gson.Gson; -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Map; -import java.util.TreeMap; - -/** - * Local implementation of Log Level Filter manager - keep the data in [config-dir]/filters folder in [service_name]-filter.json files - */ -public class LogLevelFilterManagerLocal implements LogLevelFilterManager { - - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterManagerLocal.class); - - private final String configDir; - private final Gson gson; - - private final FilenameFilter filterConfigFilenameFilter = (dir, name) -> name.endsWith("-filter.json"); - - public LogLevelFilterManagerLocal(String configDir, Gson gson) { - this.configDir = configDir; - this.gson = gson; - } - - @Override - public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception { - Path filterDirs = Paths.get(configDir, "filters"); - if (!filterDirs.toFile().exists()) { - Files.createDirectory(filterDirs); - } - String logLevelFilterJson = gson.toJson(filter); - Path filePath = Paths.get(filterDirs.toAbsolutePath().toString(), String.format("%s-filter.json", logId.toLowerCase())); - byte[] data = logLevelFilterJson.getBytes(StandardCharsets.UTF_8); - Files.write(filePath, data); - } - - @Override - public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception { - for (Map.Entry e : filters.getFilter().entrySet()) { - Path filterDirs = Paths.get(configDir, "filters"); - String logLevelFilterJson = gson.toJson(e.getValue()); - Path filePath = Paths.get(filterDirs.toAbsolutePath().toString(), String.format("%s-filter.json", e.getKey())); - if (filePath.toFile().exists()) { - String currentLogLevelFilterJson = new String(Files.readAllBytes(filePath)); - if (!logLevelFilterJson.equals(currentLogLevelFilterJson)) { - byte[] data = logLevelFilterJson.getBytes(StandardCharsets.UTF_8); - Files.write(filePath, data); - LOG.info("Set log level filter for the log " + e.getKey() + " for cluster " + clusterName); - } - } - } - } - - @Override - public LogLevelFilterMap getLogLevelFilters(String clusterName) { - TreeMap filters = new TreeMap<>(); - File filterDirs = Paths.get(configDir, "filters").toFile(); - if (filterDirs.exists()) { - File[] logLevelFilterFiles = filterDirs.listFiles(filterConfigFilenameFilter); - if (logLevelFilterFiles != null) { - for (File file : logLevelFilterFiles) { - try { - String serviceName = file.getName().replace("-filter.json", "").toLowerCase(); - String logLevelFilterStr = new String(Files.readAllBytes(file.toPath())); - LogLevelFilter logLevelFilter = gson.fromJson(logLevelFilterStr, LogLevelFilter.class); - filters.put(serviceName, logLevelFilter); - } catch (IOException e) { - // skip - } - } - } - } - LogLevelFilterMap logLevelFilters = new LogLevelFilterMap(); - logLevelFilters.setFilter(filters); - return logLevelFilters; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocal.java b/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocal.java deleted file mode 100644 index 191b6f8c3be..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocal.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.local; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.LogSearchConfig; - -import java.io.FilenameFilter; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.regex.Pattern; - -/** - * Abstract local config location handler - holds common operations for Log Search Server and Log Feeder local config handler - */ -public abstract class LogSearchConfigLocal implements LogSearchConfig { - - private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; - - protected Map properties; - protected final Map> clusterInputConfigMap = new ConcurrentHashMap<>(); - protected final Map inputFileContentsMap = new ConcurrentHashMap<>(); - protected Gson gson; - protected final FilenameFilter inputConfigFileFilter = (dir, name) -> name.startsWith("input.config-") && name.endsWith(".json"); - protected final Pattern serviceNamePattern = Pattern.compile("input.config-(.+).json"); - protected final ExecutorService executorService = Executors.newCachedThreadPool(); - protected LogLevelFilterManager logLevelFilterManager; - - public void init(Map properties) throws Exception { - this.properties = properties; - gson = new GsonBuilder().setDateFormat(DATE_FORMAT).setPrettyPrinting().create(); - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception { - Path filePath = Paths.get(getConfigDir(), String.format("input.config-%s.json", serviceName.toLowerCase())); - byte[] data = inputConfig.getBytes(StandardCharsets.UTF_8); - Files.write(filePath, data); - inputFileContentsMap.put(filePath.toAbsolutePath().toString(), inputConfig); - if (!clusterInputConfigMap.containsKey(clusterName)) { - clusterInputConfigMap.put(clusterName, inputFileContentsMap); - } - } - - @Override - public void close() throws IOException { - } - - public abstract String getConfigDir(); - - public abstract void setConfigDir(String configDir); - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return logLevelFilterManager; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - this.logLevelFilterManager = logLevelFilterManager; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocalUpdater.java b/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocalUpdater.java deleted file mode 100644 index 76379ec95bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLocalUpdater.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.local; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; -import org.apache.ambari.logsearch.config.api.InputConfigMonitor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.ambari.logsearch.config.json.JsonHelper; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; - -import java.io.File; -import java.io.IOException; -import java.nio.file.ClosedWatchServiceException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.WatchEvent; -import java.nio.file.WatchKey; -import java.nio.file.WatchService; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE; -import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE; -import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; - -/** - * Watch specific config folder, and check against input.config-*.json file changes (create/update/remove), - * a change can trigger an input config monitor (which should start to monitor input files with the new or updated settings) - */ -public class LogSearchConfigLocalUpdater implements Runnable { - - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigLocalUpdater.class); - - private final Path path; - private final WatchService watchService; - private final InputConfigMonitor inputConfigMonitor; - private final Map inputFileContentsMap; - private final JsonParser parser; - private final JsonArray globalConfigNode; - private final Pattern serviceNamePattern; - - public LogSearchConfigLocalUpdater(final Path path, final WatchService watchService, - final InputConfigMonitor inputConfigMonitor, final Map inputFileContentsMap, - final JsonParser parser, final JsonArray globalConfigNode, final Pattern serviceNamePattern) { - this.path = path; - this.watchService = watchService; - this.inputConfigMonitor = inputConfigMonitor; - this.inputFileContentsMap = inputFileContentsMap; - this.parser = parser; - this.globalConfigNode = globalConfigNode; - this.serviceNamePattern = serviceNamePattern; - } - - @Override - public void run() { - final Map keys = new ConcurrentHashMap<>(); - try { - register(this.path, keys, watchService); - } catch (IOException e) { - LOG.error("{}", e); - throw new RuntimeException(e); - } - while (!Thread.interrupted()) { - WatchKey key; - try { - key = watchService.poll(10, TimeUnit.SECONDS); - } catch (InterruptedException | ClosedWatchServiceException e) { - break; - } - if (key != null) { - Path path = keys.get(key); - for (WatchEvent ev : key.pollEvents()) { - WatchEvent event = cast(ev); - WatchEvent.Kind kind = event.kind(); - Path name = event.context(); - Path monitoredInput = path.resolve(name); - File file = monitoredInput.toFile(); - String absPath = monitoredInput.toAbsolutePath().toString(); - if (file.getName().startsWith("input.config-") && file.getName().endsWith(".json")) { - Matcher m = serviceNamePattern.matcher(file.getName()); - m.find(); - String serviceName = m.group(1); - try { - if (kind == ENTRY_CREATE) { - LOG.info("New input config entry found: {}", absPath); - String inputConfig = new String(Files.readAllBytes(monitoredInput)); - JsonElement inputConfigJson = JsonHelper.mergeGlobalConfigWithInputConfig(parser, inputConfig, globalConfigNode); - inputConfigMonitor.loadInputConfigs(serviceName, InputConfigGson.gson.fromJson(inputConfigJson, InputConfigImpl.class)); - inputFileContentsMap.put(absPath, inputConfig); - } else if (kind == ENTRY_MODIFY) { - LOG.info("Input config entry modified: {}", absPath); - if (inputFileContentsMap.containsKey(absPath)) { - String oldContent = inputFileContentsMap.get(absPath); - String inputConfig = new String(Files.readAllBytes(monitoredInput)); - if (!inputConfig.equals(oldContent)) { - inputConfigMonitor.removeInputs(serviceName); - inputFileContentsMap.remove(absPath); - JsonElement inputConfigJson = JsonHelper.mergeGlobalConfigWithInputConfig(parser, inputConfig, globalConfigNode); - inputConfigMonitor.loadInputConfigs(serviceName, InputConfigGson.gson.fromJson(inputConfigJson, InputConfigImpl.class)); - inputFileContentsMap.put(absPath, inputConfig); - } - } - } else if (kind == ENTRY_DELETE) { - LOG.info("Input config deleted: {}", absPath); - if (inputFileContentsMap.containsKey(absPath)) { - inputConfigMonitor.removeInputs(serviceName); - inputFileContentsMap.remove(absPath); - } - } - } catch (Exception e) { - LOG.error("{}", e); - break; - } - } - } - if (!key.reset()) { - LOG.info("{} is invalid", key); - keys.remove(key); - if (keys.isEmpty()) { - break; - } - } - } - } - } - - private void register(Path dir, Map keys, WatchService watchService) - throws IOException { - WatchKey key = dir.register(watchService, ENTRY_CREATE, ENTRY_DELETE, - ENTRY_MODIFY); - keys.put(key, dir); - } - - @SuppressWarnings("unchecked") - private WatchEvent cast(WatchEvent event) { - return (WatchEvent) event; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLogFeederLocal.java b/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLogFeederLocal.java deleted file mode 100644 index 2a02cb57013..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigLogFeederLocal.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.local; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; -import org.apache.ambari.logsearch.config.api.InputConfigMonitor; -import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.ambari.logsearch.config.json.JsonHelper; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.WatchService; -import java.util.Map; -import java.util.regex.Matcher; - -/** - * Handle input.config-*.json files from local sourse (filesystem) - * After the first file check in the configuration folder, it starts to watch the specified about changes (create/update/delete files) - */ -public class LogSearchConfigLogFeederLocal extends LogSearchConfigLocal implements LogSearchConfigLogFeeder { - - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigLogFeederLocal.class); - - private String configDir; - - @Override - public void init(Map properties, String clusterName) throws Exception { - super.init(properties); - setConfigDir(properties.getOrDefault("logfeeder.config.dir", "/usr/lib/ambari-logsearch-logfeeder/conf")); - boolean localConfig = Boolean.valueOf(properties.getOrDefault("logfeeder.config.filter.local", "false")); - if (localConfig) { - setLogLevelFilterManager(new LogLevelFilterManagerLocal(getConfigDir(), gson)); - } - } - - @Override - public boolean inputConfigExists(String serviceName) throws Exception { - Path filePath = Paths.get(getConfigDir(), String.format("input.config-%s.json", serviceName.toLowerCase())); - return inputFileContentsMap.containsKey(filePath.toAbsolutePath().toString()); - } - - @Override - public void monitorInputConfigChanges(final InputConfigMonitor inputConfigMonitor, final LogLevelFilterMonitor logLevelFilterMonitor, String clusterName) throws Exception { - final JsonParser parser = new JsonParser(); - final JsonArray globalConfigNode = new JsonArray(); - for (String globalConfigJsonString : inputConfigMonitor.getGlobalConfigJsons()) { - JsonElement globalConfigJson = parser.parse(globalConfigJsonString); - globalConfigNode.add(globalConfigJson.getAsJsonObject().get("global")); - Path filePath = Paths.get(configDir, "global.config.json"); - String strData = InputConfigGson.gson.toJson(globalConfigJson); - byte[] data = strData.getBytes(StandardCharsets.UTF_8); - Files.write(filePath, data); - } - - File[] inputConfigFiles = new File(configDir).listFiles(inputConfigFileFilter); - if (inputConfigFiles != null) { - for (File inputConfigFile : inputConfigFiles) { - String inputConfig = new String(Files.readAllBytes(inputConfigFile.toPath())); - Matcher m = serviceNamePattern.matcher(inputConfigFile.getName()); - m.find(); - String serviceName = m.group(1); - JsonElement inputConfigJson = JsonHelper.mergeGlobalConfigWithInputConfig(parser, inputConfig, globalConfigNode); - inputConfigMonitor.loadInputConfigs(serviceName, InputConfigGson.gson.fromJson(inputConfigJson, InputConfigImpl.class)); - } - } - final FileSystem fs = FileSystems.getDefault(); - final WatchService ws = fs.newWatchService(); - Path configPath = Paths.get(configDir); - LogSearchConfigLocalUpdater updater = new LogSearchConfigLocalUpdater(configPath, ws, inputConfigMonitor, inputFileContentsMap, - parser, globalConfigNode, serviceNamePattern); - executorService.submit(updater); - } - - @Override - public void close() throws IOException { - } - - @Override - public String getConfigDir() { - return this.configDir; - } - - @Override - public void setConfigDir(String configDir) { - this.configDir = configDir; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigServerLocal.java b/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigServerLocal.java deleted file mode 100644 index 13aacd04176..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-local/src/main/java/org/apache/ambari/logsearch/config/local/LogSearchConfigServerLocal.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.local; - -import org.apache.ambari.logsearch.config.api.LogSearchConfigServer; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; - -import java.io.File; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -// TODO: implement every method of this, although that can be useful only for local 1-node deployments -public class LogSearchConfigServerLocal extends LogSearchConfigLocal implements LogSearchConfigServer { - - private String configDir; - - @Override - public void init(Map properties) throws Exception { - super.init(properties); - setConfigDir(properties.getOrDefault("logsearch.logfeeder.config.dir", "/usr/lib/ambari-logsearch-portal/conf/input-configs")); - File confDirFile = new File(configDir); - if (!confDirFile.exists()) { - confDirFile.mkdir(); - } - boolean localConfig = Boolean.valueOf(properties.getOrDefault("logsearch.logfeeder.config.filter.local", "false")); - if (localConfig) { - setLogLevelFilterManager(new LogLevelFilterManagerLocal(getConfigDir(), gson)); - } - } - - @Override - public List getServices(String clusterName) { - return null; - } - - @Override - public boolean inputConfigExists(String clusterName, String serviceName) throws Exception { - return false; - } - - @Override - public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception { - } - - @Override - public String getGlobalConfigs(String clusterName) { - return null; - } - - @Override - public InputConfig getInputConfig(String clusterName, String serviceName) { - return null; - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception { - } - - @Override - public void close() throws IOException { - } - - @Override - public String getConfigDir() { - return this.configDir; - } - - @Override - public void setConfigDir(String configDir) { - this.configDir = configDir; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-solr/pom.xml b/ambari-logsearch/ambari-logsearch-config-solr/pom.xml deleted file mode 100644 index f47fd75fa98..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-solr/pom.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-config-solr - jar - Ambari Logsearch Config Solr - http://maven.apache.org - - - UTF-8 - - - - - org.apache.ambari - ambari-logsearch-config-api - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-json - ${project.version} - - - org.apache.solr - solr-solrj - ${solr.version} - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterManagerSolr.java b/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterManagerSolr.java deleted file mode 100644 index 0eabead0429..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterManagerSolr.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.solr; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.SolrPingResponse; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; -import org.apache.solr.common.SolrInputDocument; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; -import java.util.TreeMap; - -/** - * Gather and store log level filters from/in a Solr collection. - */ -public class LogLevelFilterManagerSolr implements LogLevelFilterManager { - - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterManagerSolr.class); - - private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; - private final SolrClient solrClient; - private Gson gson; - private boolean useClusterParam = true; - - public LogLevelFilterManagerSolr(SolrClient solrClient) { - this.solrClient = solrClient; - waitForSolr(solrClient); - gson = new GsonBuilder().setDateFormat(DATE_FORMAT).setPrettyPrinting().create(); - } - - @Override - public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception { - final SolrInputDocument doc = new SolrInputDocument(); - int hashCode = useClusterParam ? (clusterName + logId).hashCode() : logId.hashCode(); - doc.addField("id", String.valueOf(hashCode)); - if (useClusterParam) { - doc.addField("cluster_string", clusterName); - } - doc.addField("filtername", logId); - doc.addField("rowtype", "log_level_filter"); - doc.addField("jsons", gson.toJson(filter)); - doc.addField("username", "default"); - LOG.debug("Creating log level filter - logid: {}, cluster: {}", logId, clusterName); - solrClient.add(doc); - } - - @Override - public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception { - TreeMap logLevelFilterTreeMap = filters.getFilter(); - if (!logLevelFilterTreeMap.isEmpty()) { - LogLevelFilterMap actualFiltersMap = getLogLevelFilters(clusterName); - if (actualFiltersMap.getFilter().isEmpty()) { - if (!filters.getFilter().isEmpty()) { - for (Map.Entry entry : filters.getFilter().entrySet()) { - createLogLevelFilter(clusterName, entry.getKey(), entry.getValue()); - } - } - } else { - TreeMap mapToSet = filters.getFilter(); - TreeMap finalMapToSet = new TreeMap<>(); - for (Map.Entry entry : actualFiltersMap.getFilter().entrySet()) { - if (mapToSet.containsKey(entry.getKey())) { - String actualValue = gson.toJson(entry.getValue()); - String newValue = gson.toJson(mapToSet.get(entry.getKey())); - if (!newValue.equals(actualValue)) { - finalMapToSet.put(entry.getKey(), mapToSet.get(entry.getKey())); - } - } else { - finalMapToSet.put(entry.getKey(), mapToSet.get(entry.getKey())); - } - } - for (Map.Entry entry : finalMapToSet.entrySet()) { - createLogLevelFilter(clusterName, entry.getKey(), entry.getValue()); - } - } - } - } - - @Override - public LogLevelFilterMap getLogLevelFilters(String clusterName) { - LogLevelFilterMap logLevelFilterMap = new LogLevelFilterMap(); - TreeMap logLevelFilterTreeMap = new TreeMap<>(); - try { - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - if (useClusterParam) { - solrQuery.addFilterQuery("cluster_string:" + clusterName); - } - solrQuery.addFilterQuery("rowtype:log_level_filter"); - solrQuery.setFields("jsons", "filtername"); - - final QueryResponse response = solrClient.query(solrQuery); - if (response != null) { - final SolrDocumentList documents = response.getResults(); - if (documents != null && !documents.isEmpty()) { - for(SolrDocument document : documents) { - String jsons = (String) document.getFieldValue("jsons"); - String logId = (String) document.getFieldValue("filtername"); - if (jsons != null) { - LogLevelFilter logLevelFilter = gson.fromJson(jsons, LogLevelFilter.class); - logLevelFilterTreeMap.put(logId,logLevelFilter); - } - } - } - } - } catch (Exception e) { - LOG.error("Error during getting log level filters: {}", e.getMessage()); - } - logLevelFilterMap.setFilter(logLevelFilterTreeMap); - return logLevelFilterMap; - } - - public boolean isUseClusterParam() { - return useClusterParam; - } - - public void setUseClusterParam(boolean useClusterParam) { - this.useClusterParam = useClusterParam; - } - - public Gson getGson() { - return gson; - } - - private void waitForSolr(SolrClient solrClient) { - boolean solrAvailable = false; - while (!solrAvailable) { - try { - LOG.debug("Start solr ping for log level filter collection"); - SolrPingResponse pingResponse = solrClient.ping(); - if (pingResponse.getStatus() == 0) { - solrAvailable = true; - break; - } - } catch (Exception e) { - LOG.error("{}", e); - } - LOG.info("Solr (collection for log level filters) is not available yet. Sleeping 10 sec. Retrying..."); - try { - Thread.sleep(10000); - } catch (InterruptedException e) { - LOG.error("{}", e); - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterUpdaterSolr.java b/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterUpdaterSolr.java deleted file mode 100644 index 534a60cd2f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-solr/src/main/java/org/apache/ambari/logsearch/config/solr/LogLevelFilterUpdaterSolr.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.solr; - -import com.google.gson.Gson; -import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor; -import org.apache.ambari.logsearch.config.api.LogLevelFilterUpdater; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** - * Periodically checks log level filters in Solr, and send a notification about any change to a log level filter monitor. - */ -public class LogLevelFilterUpdaterSolr extends LogLevelFilterUpdater { - - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterUpdaterSolr.class); - - private final LogLevelFilterManagerSolr logLevelFilterManagerSolr; - private final String cluster; - - public LogLevelFilterUpdaterSolr(String threadName, LogLevelFilterMonitor logLevelFilterMonitor, Integer interval, - LogLevelFilterManagerSolr logLevelFilterManagerSolr, String cluster) { - super(threadName, logLevelFilterMonitor, interval); - this.logLevelFilterManagerSolr = logLevelFilterManagerSolr; - this.cluster = cluster; - } - - @Override - protected void checkFilters(LogLevelFilterMonitor logLevelFilterMonitor) { - try { - LOG.debug("Start checking log level filters in Solr ..."); - LogLevelFilterMap logLevelFilterMap = logLevelFilterManagerSolr.getLogLevelFilters(cluster); - Map filters = logLevelFilterMap.getFilter(); - Map copiedStoredFilters = new ConcurrentHashMap<>(logLevelFilterMonitor.getLogLevelFilters()); - final Gson gson = logLevelFilterManagerSolr.getGson(); - for (Map.Entry logFilterEntry : filters.entrySet()){ - if (copiedStoredFilters.containsKey(logFilterEntry.getKey())) { - String remoteValue = gson.toJson(logFilterEntry.getValue()); - String storedValue = gson.toJson(copiedStoredFilters.get(logFilterEntry.getKey())); - if (!storedValue.equals(remoteValue)) { - LOG.info("Log level filter updated for {}", logFilterEntry.getKey()); - logLevelFilterMonitor.setLogLevelFilter(logFilterEntry.getKey(), logFilterEntry.getValue()); - } - } else { - LOG.info("New log level filter registered: {}", logFilterEntry.getKey()); - logLevelFilterMonitor.setLogLevelFilter(logFilterEntry.getKey(), logFilterEntry.getValue()); - } - } - for (Map.Entry storedLogFilterEntry : copiedStoredFilters.entrySet()) { - if (!filters.containsKey(storedLogFilterEntry.getKey())) { - LOG.info("Removing log level filter: {}", storedLogFilterEntry.getKey()); - logLevelFilterMonitor.removeLogLevelFilter(storedLogFilterEntry.getKey()); - } - } - } catch (Exception e) { - LOG.error("Error during filter Solr check: {}",e); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore b/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore deleted file mode 100644 index ae3c1726048..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/bin/ diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml b/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml deleted file mode 100644 index 343971e9890..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml +++ /dev/null @@ -1,63 +0,0 @@ - - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-config-zookeeper - jar - Ambari Logsearch Config Zookeeper - http://maven.apache.org - - - UTF-8 - - - - - org.apache.ambari - ambari-logsearch-config-json - ${project.version} - - - org.apache.zookeeper - zookeeper - - - org.apache.curator - curator-framework - 2.12.0 - - - org.apache.curator - curator-client - 2.12.0 - - - org.apache.curator - curator-recipes - 2.12.0 - - - diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogLevelFilterManagerZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogLevelFilterManagerZK.java deleted file mode 100644 index e62ec1b890b..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogLevelFilterManagerZK.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.zookeeper; - -import com.google.gson.Gson; -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.ChildData; -import org.apache.curator.framework.recipes.cache.TreeCache; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.data.ACL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -public class LogLevelFilterManagerZK implements LogLevelFilterManager { - - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterManagerZK.class); - - private final CuratorFramework client; - private final TreeCache serverCache; - private final Gson gson; - private final List aclList; - - public LogLevelFilterManagerZK(Map properties) throws Exception { - this.client = LogSearchConfigZKHelper.createZKClient(properties); - this.serverCache = new TreeCache(client, "/"); - this.aclList = LogSearchConfigZKHelper.getAcls(properties); - this.gson = LogSearchConfigZKHelper.createGson(); - this.serverCache.start(); - } - - public LogLevelFilterManagerZK(Map properties, CuratorFramework client) throws Exception { - this.client = client; - this.serverCache = new TreeCache(client, "/"); - this.aclList = LogSearchConfigZKHelper.getAcls(properties); - this.gson = LogSearchConfigZKHelper.createGson(); - this.serverCache.start(); - } - - public LogLevelFilterManagerZK(CuratorFramework client, TreeCache serverCache, List aclList, Gson gson) { - this.client = client; - this.serverCache = serverCache; - this.aclList = aclList; - this.gson = gson; - } - - @Override - public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception { - String nodePath = String.format("/%s/loglevelfilter/%s", clusterName, logId); - String logLevelFilterJson = gson.toJson(filter); - try { - client.create().creatingParentContainersIfNeeded().withACL(aclList).forPath(nodePath, logLevelFilterJson.getBytes()); - LOG.info("Uploaded log level filter for the log " + logId + " for cluster " + clusterName); - } catch (KeeperException.NodeExistsException e) { - LOG.debug("Did not upload log level filters for log " + logId + " as it was already uploaded by another Log Feeder"); - } - } - - @Override - public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception { - for (Map.Entry e : filters.getFilter().entrySet()) { - String nodePath = String.format("/%s/loglevelfilter/%s", clusterName, e.getKey()); - String logLevelFilterJson = gson.toJson(e.getValue()); - String currentLogLevelFilterJson = new String(serverCache.getCurrentData(nodePath).getData()); - if (!logLevelFilterJson.equals(currentLogLevelFilterJson)) { - client.setData().forPath(nodePath, logLevelFilterJson.getBytes()); - LOG.info("Set log level filter for the log " + e.getKey() + " for cluster " + clusterName); - } - } - } - - @Override - public LogLevelFilterMap getLogLevelFilters(String clusterName) { - String parentPath = String.format("/%s/loglevelfilter", clusterName); - TreeMap filters = new TreeMap<>(); - Map logLevelFilterNodes = serverCache.getCurrentChildren(parentPath); - if (logLevelFilterNodes != null && !logLevelFilterNodes.isEmpty()) { - for (Map.Entry e : logLevelFilterNodes.entrySet()) { - LogLevelFilter logLevelFilter = gson.fromJson(new String(e.getValue().getData()), LogLevelFilter.class); - filters.put(e.getKey(), logLevelFilter); - } - } - LogLevelFilterMap logLevelFilters = new LogLevelFilterMap(); - logLevelFilters.setFilter(filters); - return logLevelFilters; - } - - public CuratorFramework getClient() { - return client; - } - - public TreeCache getServerCache() { - return serverCache; - } - - public Gson getGson() { - return gson; - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigLogFeederZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigLogFeederZK.java deleted file mode 100644 index 0c565d316f4..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigLogFeederZK.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.zookeeper; - -import java.util.Map; -import java.util.Set; - -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.apache.ambari.logsearch.config.json.JsonHelper; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; -import org.apache.ambari.logsearch.config.api.InputConfigMonitor; -import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.TreeCache; -import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.curator.framework.recipes.cache.TreeCacheEvent.Type; -import org.apache.curator.framework.recipes.cache.TreeCacheListener; -import org.apache.curator.utils.ZKPaths; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.collect.ImmutableSet; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; - -public class LogSearchConfigLogFeederZK extends LogSearchConfigZK implements LogSearchConfigLogFeeder { - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigLogFeederZK.class); - - private TreeCache logFeederClusterCache; - - @Override - public void init(Map properties, String clusterName) throws Exception { - super.init(properties); - LogSearchConfigZKHelper.waitUntilRootAvailable(client); - logFeederClusterCache = LogSearchConfigZKHelper.createClusterCache(client, clusterName); - LogLevelFilterManager logLevelFilterManager = new LogLevelFilterManagerZK(client, null, LogSearchConfigZKHelper.getAcls(properties), gson); - setLogLevelFilterManager(logLevelFilterManager); - } - - @Override - public boolean inputConfigExists(String serviceName) throws Exception { - String nodePath = String.format("/input/%s", serviceName); - return logFeederClusterCache.getCurrentData(nodePath) != null; - } - - @Override - public void monitorInputConfigChanges(final InputConfigMonitor inputConfigMonitor, - final LogLevelFilterMonitor logLevelFilterMonitor, final String clusterName) throws Exception { - final JsonParser parser = new JsonParser(); - final JsonArray globalConfigNode = new JsonArray(); - for (String globalConfigJsonString : inputConfigMonitor.getGlobalConfigJsons()) { - JsonElement globalConfigJson = parser.parse(globalConfigJsonString); - globalConfigNode.add(globalConfigJson.getAsJsonObject().get("global")); - } - - createGlobalConfigNode(globalConfigNode, clusterName); - - TreeCacheListener listener = new TreeCacheListener() { - private final Set nodeEvents = ImmutableSet.of(Type.NODE_ADDED, Type.NODE_UPDATED, Type.NODE_REMOVED); - - public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception { - if (!nodeEvents.contains(event.getType())) { - return; - } - - String nodeName = ZKPaths.getNodeFromPath(event.getData().getPath()); - String nodeData = new String(event.getData().getData()); - Type eventType = event.getType(); - - String configPathStab = String.format("/%s/", clusterName); - - if (event.getData().getPath().startsWith(configPathStab + "input/")) { - handleInputConfigChange(eventType, nodeName, nodeData); - } else if (event.getData().getPath().startsWith(configPathStab + "loglevelfilter/")) { - LogSearchConfigZKHelper.handleLogLevelFilterChange(eventType, nodeName, nodeData, gson, logLevelFilterMonitor); - } - } - - private void handleInputConfigChange(Type eventType, String nodeName, String nodeData) { - switch (eventType) { - case NODE_ADDED: - LOG.info("Node added under input ZK node: " + nodeName); - addInputs(nodeName, nodeData); - break; - case NODE_UPDATED: - LOG.info("Node updated under input ZK node: " + nodeName); - removeInputs(nodeName); - addInputs(nodeName, nodeData); - break; - case NODE_REMOVED: - LOG.info("Node removed from input ZK node: " + nodeName); - removeInputs(nodeName); - break; - default: - break; - } - } - - private void removeInputs(String serviceName) { - inputConfigMonitor.removeInputs(serviceName); - } - - private void addInputs(String serviceName, String inputConfig) { - try { - JsonElement inputConfigJson = parser.parse(inputConfig); - for (Map.Entry typeEntry : inputConfigJson.getAsJsonObject().entrySet()) { - for (JsonElement e : typeEntry.getValue().getAsJsonArray()) { - for (JsonElement globalConfig : globalConfigNode) { - JsonHelper.merge(globalConfig.getAsJsonObject(), e.getAsJsonObject()); - } - } - } - - inputConfigMonitor.loadInputConfigs(serviceName, InputConfigGson.gson.fromJson(inputConfigJson, InputConfigImpl.class)); - } catch (Exception e) { - LOG.error("Could not load input configuration for service " + serviceName + ":\n" + inputConfig, e); - } - } - }; - logFeederClusterCache.getListenable().addListener(listener); - logFeederClusterCache.start(); - } - - private void createGlobalConfigNode(JsonArray globalConfigNode, String clusterName) { - String globalConfigNodePath = String.format("/%s/global", clusterName); - String data = InputConfigGson.gson.toJson(globalConfigNode); - - try { - if (logFeederClusterCache.getCurrentData(globalConfigNodePath) != null) { - client.setData().forPath(globalConfigNodePath, data.getBytes()); - } else { - client.create().creatingParentContainersIfNeeded().withACL(LogSearchConfigZKHelper.getAcls(properties)).forPath(globalConfigNodePath, data.getBytes()); - } - } catch (Exception e) { - LOG.warn("Exception during global config node creation/update", e); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigServerZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigServerZK.java deleted file mode 100644 index 7380c6b449c..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigServerZK.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.zookeeper; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.LogSearchConfigServer; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputAdapter; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; -import org.apache.curator.framework.recipes.cache.ChildData; -import org.apache.curator.framework.recipes.cache.TreeCache; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.gson.JsonArray; -import com.google.gson.JsonParser; - -public class LogSearchConfigServerZK extends LogSearchConfigZK implements LogSearchConfigServer { - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigServerZK.class); - - private TreeCache serverCache; - - @Override - public void init(Map properties) throws Exception { - super.init(properties); - - if (client.checkExists().forPath("/") == null) { - client.create().creatingParentContainersIfNeeded().forPath("/"); - } - serverCache = new TreeCache(client, "/"); - serverCache.start(); - LogLevelFilterManager logLevelFilterManager = new LogLevelFilterManagerZK(client, serverCache, LogSearchConfigZKHelper.getAcls(properties), gson); - setLogLevelFilterManager(logLevelFilterManager); - } - - @Override - public boolean inputConfigExists(String clusterName, String serviceName) throws Exception { - String nodePath = String.format("/%s/input/%s", clusterName, serviceName); - return serverCache.getCurrentData(nodePath) != null; - } - - @Override - public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception { - String nodePath = String.format("/%s/input/%s", clusterName, serviceName); - client.setData().forPath(nodePath, inputConfig.getBytes()); - LOG.info("Set input config for the service " + serviceName + " for cluster " + clusterName); - } - - @Override - public List getServices(String clusterName) { - String parentPath = String.format("/%s/input", clusterName); - Map serviceNodes = serverCache.getCurrentChildren(parentPath); - return serviceNodes == null ? - new ArrayList<>() : - new ArrayList<>(serviceNodes.keySet()); - } - - @Override - public String getGlobalConfigs(String clusterName) { - String globalConfigNodePath = String.format("/%s/global", clusterName); - return new String(serverCache.getCurrentData(globalConfigNodePath).getData()); - } - - @Override - public InputConfig getInputConfig(String clusterName, String serviceName) { - String globalConfigData = getGlobalConfigs(clusterName); - JsonArray globalConfigs = (JsonArray) new JsonParser().parse(globalConfigData); - InputAdapter.setGlobalConfigs(globalConfigs); - - ChildData childData = serverCache.getCurrentData(String.format("/%s/input/%s", clusterName, serviceName)); - return childData == null ? null : InputConfigGson.gson.fromJson(new String(childData.getData()), InputConfigImpl.class); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java deleted file mode 100644 index dcbedd5b9ad..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.config.zookeeper; - -import java.util.Map; - -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.LogSearchConfig; -import org.apache.curator.framework.CuratorFramework; -import org.apache.zookeeper.KeeperException.NodeExistsException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.gson.Gson; - -public class LogSearchConfigZK implements LogSearchConfig { - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigZK.class); - - protected Map properties; - protected CuratorFramework client; - protected Gson gson; - protected LogLevelFilterManager logLevelFilterManager; - - public void init(Map properties) throws Exception { - this.properties = properties; - client = LogSearchConfigZKHelper.createZKClient(properties); - client.start(); - gson = LogSearchConfigZKHelper.createGson(); - } - - @Override - public void createInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception { - String nodePath = String.format("/%s/input/%s", clusterName, serviceName); - try { - client.create().creatingParentContainersIfNeeded().withACL(LogSearchConfigZKHelper.getAcls(properties)).forPath(nodePath, inputConfig.getBytes()); - LOG.info("Uploaded input config for the service " + serviceName + " for cluster " + clusterName); - } catch (NodeExistsException e) { - LOG.debug("Did not upload input config for service " + serviceName + " as it was already uploaded by another Log Feeder"); - } - } - - @Override - public LogLevelFilterManager getLogLevelFilterManager() { - return this.logLevelFilterManager; - } - - @Override - public void setLogLevelFilterManager(LogLevelFilterManager logLevelFilterManager) { - this.logLevelFilterManager = logLevelFilterManager; - } - - @Override - public void close() { - LOG.info("Closing ZooKeeper Connection"); - client.close(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZKHelper.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZKHelper.java deleted file mode 100644 index b26181dd7f9..00000000000 --- a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZKHelper.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.config.zookeeper; - -import com.google.common.base.Splitter; -import com.google.common.collect.ImmutableSet; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.curator.RetryPolicy; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.CuratorFrameworkFactory; -import org.apache.curator.framework.recipes.cache.TreeCache; -import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.curator.framework.recipes.cache.TreeCacheListener; -import org.apache.curator.retry.RetryForever; -import org.apache.curator.retry.RetryUntilElapsed; -import org.apache.curator.utils.ZKPaths; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Utility functions for handling ZK operation and monitor ZK data for Log Search configuration - */ -public class LogSearchConfigZKHelper { - - private static final Logger LOG = LoggerFactory.getLogger(LogSearchConfigZKHelper.class); - - private static final int DEFAULT_SESSION_TIMEOUT = 60000; - private static final int DEFAULT_CONNECTION_TIMEOUT = 30000; - private static final int RETRY_INTERVAL_MS = 10000; - private static final String DEFAULT_ZK_ROOT = "/logsearch"; - private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_connect_string", - description = "ZooKeeper connection string.", - examples = {"localhost1:2181,localhost2:2181/znode"}, - sources = {"logsearch.properties", "logfeeder.properties"} - ) - private static final String ZK_CONNECT_STRING_PROPERTY = "logsearch.config.zk_connect_string"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_acls", - description = "ZooKeeper ACLs for handling configs. (read & write)", - examples = {"world:anyone:r,sasl:solr:cdrwa,sasl:logsearch:cdrwa"}, - sources = {"logsearch.properties", "logfeeder.properties"}, - defaultValue = "world:anyone:cdrwa" - ) - private static final String ZK_ACLS_PROPERTY = "logsearch.config.zk_acls"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_root", - description = "ZooKeeper root node where the shippers are stored. (added to the connection string)", - examples = {"/logsearch"}, - sources = {"logsearch.properties", "logfeeder.properties"} - ) - private static final String ZK_ROOT_NODE_PROPERTY = "logsearch.config.zk_root"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_session_time_out_ms", - description = "ZooKeeper session timeout in milliseconds", - examples = {"60000"}, - sources = {"logsearch.properties", "logfeeder.properties"} - ) - private static final String ZK_SESSION_TIMEOUT_PROPERTY = "logsearch.config.zk_session_time_out_ms"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_connection_time_out_ms", - description = "ZooKeeper connection timeout in milliseconds", - examples = {"30000"}, - sources = {"logsearch.properties", "logfeeder.properties"} - ) - private static final String ZK_CONNECTION_TIMEOUT_PROPERTY = "logsearch.config.zk_connection_time_out_ms"; - - @LogSearchPropertyDescription( - name = "logsearch.config.zk_connection_retry_time_out_ms", - description = "The maximum elapsed time for connecting to ZooKeeper in milliseconds. 0 means retrying forever.", - examples = {"1200000"}, - sources = {"logsearch.properties", "logfeeder.properties"} - ) - private static final String ZK_CONNECTION_RETRY_TIMEOUT_PROPERTY = "logsearch.config.zk_connection_retry_time_out_ms"; - - private static final long WAIT_FOR_ROOT_SLEEP_SECONDS = 10; - - private LogSearchConfigZKHelper() { - } - - /** - * Create ZK curator client from a configuration (map holds the configs for that) - */ - public static CuratorFramework createZKClient(Map properties) { - String root = MapUtils.getString(properties, ZK_ROOT_NODE_PROPERTY, DEFAULT_ZK_ROOT); - LOG.info("Connecting to ZooKeeper at " + properties.get(ZK_CONNECT_STRING_PROPERTY) + root); - return CuratorFrameworkFactory.builder() - .connectString(properties.get(ZK_CONNECT_STRING_PROPERTY) + root) - .retryPolicy(getRetryPolicy(properties.get(ZK_CONNECTION_RETRY_TIMEOUT_PROPERTY))) - .connectionTimeoutMs(getIntProperty(properties, ZK_CONNECTION_TIMEOUT_PROPERTY, DEFAULT_CONNECTION_TIMEOUT)) - .sessionTimeoutMs(getIntProperty(properties, ZK_SESSION_TIMEOUT_PROPERTY, DEFAULT_SESSION_TIMEOUT)) - .build(); - } - - /** - * Get ACLs from a property (get the value then parse and transform it as ACL objects) - */ - public static List getAcls(Map properties) { - String aclStr = properties.get(ZK_ACLS_PROPERTY); - if (StringUtils.isBlank(aclStr)) { - return ZooDefs.Ids.OPEN_ACL_UNSAFE; - } - - List acls = new ArrayList<>(); - List aclStrList = Splitter.on(",").omitEmptyStrings().trimResults().splitToList(aclStr); - for (String unparcedAcl : aclStrList) { - String[] parts = unparcedAcl.split(":"); - if (parts.length == 3) { - acls.add(new ACL(parsePermission(parts[2]), new Id(parts[0], parts[1]))); - } - } - return acls; - } - - private static int getIntProperty(Map properties, String propertyKey, int defaultValue) { - if (properties.get(propertyKey) == null) - return defaultValue; - return Integer.parseInt(properties.get(propertyKey)); - } - - private static RetryPolicy getRetryPolicy(String zkConnectionRetryTimeoutValue) { - if (zkConnectionRetryTimeoutValue == null) - return new RetryForever(RETRY_INTERVAL_MS); - int maxElapsedTimeMs = Integer.parseInt(zkConnectionRetryTimeoutValue); - if (maxElapsedTimeMs == 0) - return new RetryForever(RETRY_INTERVAL_MS); - return new RetryUntilElapsed(maxElapsedTimeMs, RETRY_INTERVAL_MS); - } - - /** - * Create listener for znode of log level filters - can be used for Log Feeder as it can be useful if it's monitoring the log level changes - */ - public static TreeCacheListener createTreeCacheListener(String clusterName, Gson gson, LogLevelFilterMonitor logLevelFilterMonitor) { - return new TreeCacheListener() { - private final Set nodeEvents = ImmutableSet.of(TreeCacheEvent.Type.NODE_ADDED, TreeCacheEvent.Type.NODE_UPDATED, TreeCacheEvent.Type.NODE_REMOVED); - public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception { - if (!nodeEvents.contains(event.getType())) { - return; - } - String nodeName = ZKPaths.getNodeFromPath(event.getData().getPath()); - String nodeData = new String(event.getData().getData()); - TreeCacheEvent.Type eventType = event.getType(); - - String configPathStab = String.format("/%s/", clusterName); - - if (event.getData().getPath().startsWith(configPathStab + "loglevelfilter/")) { - handleLogLevelFilterChange(eventType, nodeName, nodeData, gson, logLevelFilterMonitor); - } - } - }; - } - - /** - * Create root + cluster name znode cache - */ - public static TreeCache createClusterCache(CuratorFramework client, String clusterName) { - return new TreeCache(client, String.format("/%s", clusterName)); - } - - /** - * Assign listener to cluster cache and start to use that listener - */ - public static void addAndStartListenersOnCluster(TreeCache clusterCache, TreeCacheListener listener) throws Exception { - clusterCache.getListenable().addListener(listener); - clusterCache.start(); - } - - public static void waitUntilRootAvailable(CuratorFramework client) throws Exception { - while (client.checkExists().forPath("/") == null) { - LOG.info("Root node is not present yet, going to sleep for " + WAIT_FOR_ROOT_SLEEP_SECONDS + " seconds"); - Thread.sleep(WAIT_FOR_ROOT_SLEEP_SECONDS * 1000); - } - } - - /** - * Call log level filter monitor interface to handle node related operations (on update/remove) - */ - public static void handleLogLevelFilterChange(final TreeCacheEvent.Type eventType, final String nodeName, final String nodeData, - final Gson gson, final LogLevelFilterMonitor logLevelFilterMonitor) { - switch (eventType) { - case NODE_ADDED: - case NODE_UPDATED: - LOG.info("Node added/updated under loglevelfilter ZK node: " + nodeName); - LogLevelFilter logLevelFilter = gson.fromJson(nodeData, LogLevelFilter.class); - logLevelFilterMonitor.setLogLevelFilter(nodeName, logLevelFilter); - break; - case NODE_REMOVED: - LOG.info("Node removed loglevelfilter input ZK node: " + nodeName); - logLevelFilterMonitor.removeLogLevelFilter(nodeName); - break; - default: - break; - } - } - - /** - * Pares ZK ACL permission string and transform it to an integer - */ - public static Integer parsePermission(String permission) { - int permissionCode = 0; - for (char each : permission.toLowerCase().toCharArray()) { - switch (each) { - case 'r': - permissionCode |= ZooDefs.Perms.READ; - break; - case 'w': - permissionCode |= ZooDefs.Perms.WRITE; - break; - case 'c': - permissionCode |= ZooDefs.Perms.CREATE; - break; - case 'd': - permissionCode |= ZooDefs.Perms.DELETE; - break; - case 'a': - permissionCode |= ZooDefs.Perms.ADMIN; - break; - default: - throw new IllegalArgumentException("Unsupported permission: " + permission); - } - } - return permissionCode; - } - - public static Gson createGson() { - return new GsonBuilder().setDateFormat(DATE_FORMAT).create(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/log4j.dtd b/ambari-logsearch/ambari-logsearch-it/log4j.dtd deleted file mode 100644 index 89181016038..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/log4j.dtd +++ /dev/null @@ -1,230 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/pom.xml b/ambari-logsearch/ambari-logsearch-it/pom.xml deleted file mode 100644 index cf8970e74c3..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/pom.xml +++ /dev/null @@ -1,349 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-it - http://maven.apache.org - Ambari Logsearch Integration Test - jar - - - true - 4.0.5 - 3.5.5 - 2.23.1 - 2.9.4 - 2.20 - 1 - localhost - NONE - NONE - - - - - org.jbehave - jbehave-core - ${jbehave.version} - - - org.jbehave.web - jbehave-web-selenium - ${jbehave-selenium} - - - org.apache.solr - solr-solrj - ${solr.version} - - - junit - junit - - - commons-io - commons-io - 2.5 - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - ${jackson-jaxrs.version} - - - org.glassfish.jersey.core - jersey-client - ${jersey.version} - - - org.slf4j - slf4j-api - 1.7.20 - - - org.slf4j - slf4j-log4j12 - 1.7.20 - - - com.flipkart.zjsonpatch - zjsonpatch - 0.2.4 - - - com.google.guava - guava - - - jackson-core - com.fasterxml.jackson.core - - - - - org.apache.ambari - ambari-logsearch-server - ${project.version} - - - jackson-core - com.fasterxml.jackson.core - - - - - org.apache.ambari - ambari-logsearch-web - ${project.version} - - - org.apache.ambari - ambari-logsearch-logfeeder - ${project.version} - - - jackson-core - com.fasterxml.jackson.core - - - - - com.google.guava - guava - 16.0.1 - - - - com.hubspot.jinjava - jinjava - 2.2.0 - test - - - org.easymock - easymock - 3.4 - test - - - org.hamcrest - hamcrest-all - 1.3 - - - - - target/classes - - - src/test/java/ - - **/*.story - - - - src/test/resources - - - - - - - selenium-tests - - - selenium-tests - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe-plugin.version} - - - run-integration-tests - integration-test - - integration-test - - - - **/*UIStories.java - - - file:${project.build.testOutputDirectory}/log4j.properties - ${docker.host} - ${ui.stories.location} - - - - - verify-integration-tests - verify - - verify - - - - - - - - - backend-tests - - - backend-tests - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe-plugin.version} - - - run-integration-tests - integration-test - - integration-test - - - - **/*BackendStories.java - - - file:${project.build.testOutputDirectory}/log4j.properties - ${docker.host} - ${backend.stories.location} - - - - - verify-integration-tests - verify - - verify - - - - - - - - - input-config-tests - - - input-config-tests - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe-plugin.version} - - - run-integration-tests - integration-test - - integration-test - - - - **/*IT.java - - - file:${project.build.testOutputDirectory}/log4j.properties - ${docker.host} - ${backend.stories.location} - - - - - verify-integration-tests - verify - - verify - - - - - - - - - all-tests - - - all-tests - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - ${failsafe-plugin.version} - - - run-integration-tests - integration-test - - integration-test - - - - **/*Stories.java - - - file:${project.build.testOutputDirectory}/log4j.properties - ${docker.host} - ${backend.stories.location} - ${ui.stories.location} - - - - - verify-integration-tests - verify - - verify - - - - - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java deleted file mode 100644 index 92a659bf1ca..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/StoryDataRegistry.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.domain; - -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputAdapter; -import org.apache.solr.client.solrj.SolrClient; -import org.jbehave.web.selenium.WebDriverProvider; - -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; - -public class StoryDataRegistry { - public static final StoryDataRegistry INSTANCE = new StoryDataRegistry(); - - public static final String CLUSTER = "cl1"; - public static final String LOGSEARCH_GLOBAL_CONFIG = "[\n" + - " {\n" + - " \"add_fields\": {\n" + - " \"cluster\": \""+ CLUSTER +"\"\n" + - " },\n" + - " \"source\": \"file\",\n" + - " \"tail\": \"true\",\n" + - " \"gen_event_md5\": \"true\"\n" + - " }\n" + - "]"; - - - private SolrClient solrClient; - private boolean logsearchContainerStarted = false; - private String dockerHost; - private String ambariFolder; - private String shellScriptLocation; - private String shellScriptFolder; - private final int solrPort = 8886; - private final int logsearchPort = 61888; - private final int zookeeperPort = 9983; - private final String serviceLogsCollection = "hadoop_logs"; - private final String auditLogsCollection = "audit_logs"; - private WebDriverProvider webDriverProvider; - - private StoryDataRegistry() { - JsonParser jsonParser = new JsonParser(); - JsonElement globalConfigJsonElement = jsonParser.parse(LOGSEARCH_GLOBAL_CONFIG); - InputAdapter.setGlobalConfigs(globalConfigJsonElement.getAsJsonArray()); - } - - public String getDockerHost() { - return dockerHost; - } - - public void setDockerHost(String dockerHost) { - this.dockerHost = dockerHost; - } - - public int getSolrPort() { - return solrPort; - } - - public int getLogsearchPort() { - return logsearchPort; - } - - public int getZookeeperPort() { - return zookeeperPort; - } - - public String getServiceLogsCollection() { - return serviceLogsCollection; - } - - public String getAuditLogsCollection() { - return auditLogsCollection; - } - - public SolrClient getSolrClient() { - return solrClient; - } - - public void setSolrClient(SolrClient solrClient) { - this.solrClient = solrClient; - } - - public String getAmbariFolder() { - return ambariFolder; - } - - public void setAmbariFolder(String ambariFolder) { - this.ambariFolder = ambariFolder; - } - - public String getShellScriptLocation() { - return shellScriptLocation; - } - - public void setShellScriptLocation(String shellScriptLocation) { - this.shellScriptLocation = shellScriptLocation; - } - - public boolean isLogsearchContainerStarted() { - return logsearchContainerStarted; - } - - public void setLogsearchContainerStarted(boolean logsearchContainerStarted) { - this.logsearchContainerStarted = logsearchContainerStarted; - } - - public WebDriverProvider getWebDriverProvider() { - return webDriverProvider; - } - - public void setWebDriverProvider(WebDriverProvider webDriverProvider) { - this.webDriverProvider = webDriverProvider; - } - - public String getShellScriptFolder() { - return shellScriptFolder; - } - - public void setShellScriptFolder(String shellScriptFolder) { - this.shellScriptFolder = shellScriptFolder; - } - - public WebClient logsearchClient() { - return new WebClient(dockerHost, logsearchPort); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/WebClient.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/WebClient.java deleted file mode 100644 index 1c14cd08ac1..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/domain/WebClient.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.domain; - -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; - -import org.glassfish.jersey.client.JerseyClient; -import org.glassfish.jersey.client.JerseyClientBuilder; -import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class WebClient { - private static Logger LOG = LoggerFactory.getLogger(WebClient.class); - - private final String host; - private final int port; - - public WebClient(String host, int port) { - this.host = host; - this.port = port; - } - - public String get(String path) { - JerseyClient jerseyClient = JerseyClientBuilder.createClient(); - HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder() - .credentials("admin", "admin") - .build(); - jerseyClient.register(authFeature); - - String url = String.format("http://%s:%d%s", host, port, path); - - LOG.info("Url: {}", url); - - WebTarget target = jerseyClient.target(url); - Invocation.Builder invocationBuilder = target.request(MediaType.APPLICATION_JSON_TYPE); - return invocationBuilder.get().readEntity(String.class); - } - - public String put(String path, String requestBody) { - JerseyClient jerseyClient = JerseyClientBuilder.createClient(); - HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder() - .credentials("admin", "admin") - .build(); - jerseyClient.register(authFeature); - - String url = String.format("http://%s:%d%s", host, port, path); - - LOG.info("Url: {}", url); - - WebTarget target = jerseyClient.target(url); - Invocation.Builder invocationBuilder = target.request(MediaType.APPLICATION_JSON_TYPE); - String response = invocationBuilder.put(Entity.entity(requestBody, MediaType.APPLICATION_JSON_TYPE)).readEntity(String.class); - - LOG.info("Response: {}", response); - - return response; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariInfraSolrLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariInfraSolrLogPatternIT.java deleted file mode 100644 index 7b55716ccfd..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariInfraSolrLogPatternIT.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; - -import org.junit.Test; - -public class AmbariInfraSolrLogPatternIT extends PatternITBase { - // TODO: use hdp_ambari_definitions - @Test - public void testAmbariInfraSolrLogLayout() { - String layout = Log4jProperties.loadFrom(new File(AMBARI_STACK_DEFINITIONS, "AMBARI_INFRA_SOLR/0.1.0/properties/solr-log4j.properties.j2")).getLayout("file"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testAmbariInfraSolrGrokPatter() throws Exception { - String layout = Log4jProperties.loadFrom(new File(AMBARI_STACK_DEFINITIONS, "AMBARI_INFRA_SOLR/0.1.0/properties/solr-log4j.properties.j2")).getLayout("file"); - testServiceLog("infra_solr", layout, inputConfigTemplate( - new File(AMBARI_STACK_DEFINITIONS, "AMBARI_INFRA_SOLR/0.1.0/package/templates/input.config-ambari-infra.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariLogPatternIT.java deleted file mode 100644 index 7397df08f31..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AmbariLogPatternIT.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Paths; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.util.Date; -import java.util.Map; - -import org.junit.BeforeClass; -import org.junit.Test; - -public class AmbariLogPatternIT extends PatternITBase { - - public static File AMBARI_CONF; - - @BeforeClass - public static void setupAmbariConfig() throws Exception { - setupGlobal(); - AMBARI_CONF = new File(AMBARI_FOLDER, Paths.get("ambari-server", "conf", "unix").toString()); - } - - @Test - public void testAmbariAgentLogEntry() throws Exception { - // given - String logEntry = "INFO 2018-05-02 09:29:12,359 DataCleaner.py:39 - Data cleanup thread started"; - // when - Map result = testLogEntry(logEntry, "ambari_agent", ambariInputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("ambari_agent")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Data cleanup thread started")); - assertThat(result.get("file"), is("DataCleaner.py")); - assertThat(result.get("line_number"), is("39")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 29, 12, 359000000))); - } - - @Test - public void testAmbariAgentMultilineLogEntry() throws Exception { - // given - String logEntry = "INFO 2018-05-02 09:31:52,227 RecoveryManager.py:572 - RecoverConfig = {u'components': u'',\n" + - " u'maxCount': u'6',\n" + - " u'maxLifetimeCount': u'1024',\n" + - " u'retryGap': u'5',\n" + - " u'type': u'AUTO_START',\n" + - " u'windowInMinutes': u'60'}"; - // when - Map result = testLogEntry(logEntry, "ambari_agent", ambariInputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("ambari_agent")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("RecoverConfig = {u'components': u'',\n" + - " u'maxCount': u'6',\n" + - " u'maxLifetimeCount': u'1024',\n" + - " u'retryGap': u'5',\n" + - " u'type': u'AUTO_START',\n" + - " u'windowInMinutes': u'60'}")); - assertThat(result.get("file"), is("RecoveryManager.py")); - assertThat(result.get("line_number"), is("572")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 31, 52, 227000000))); - } - - @Test - public void testAmbariServerLogLayout() { - testAmbariServerLogLayout("file"); - } - - @Test - public void testAmbariAlertsLogLayout() { - testAmbariServerLogLayout("alerts"); - } - - @Test - public void testAmbariConfigChangesLogLayout() { - testAmbariServerLogLayout("configchange"); - } - - @Test - public void testAmbariDbCheckLogLayout() { - testAmbariServerLogLayout("dbcheckhelper"); - } - - public void testAmbariServerLogLayout(String appenderName) { - String layout = Log4jProperties.loadFrom(new File(AMBARI_CONF, "log4j.properties")).getLayout(appenderName); - assertThatDateIsISO8601(layout); - } - - @Test - public void testAmbariServerLog() throws Exception { - testAmbariServerLog("file", "ambari_server"); - } - - @Test - public void testAmbariConfigChangesLog() throws Exception { - testAmbariServerLog("configchange", "ambari_config_changes"); - } - - @Test - public void testAmbariDBCheckLog() throws Exception { - testAmbariServerLog("dbcheckhelper", "ambari_server_check_database"); - } - - public void testAmbariServerLog(String appenderName, String logId) throws Exception { - String layout = Log4jProperties.loadFrom(new File(AMBARI_CONF, "log4j.properties")).getLayout(appenderName); - testServiceLog(logId, layout, ambariInputConfigTemplate()); - } - - private String ambariInputConfigTemplate() throws IOException { - return inputConfigTemplate( - new File(AMBARI_STACK_DEFINITIONS, "LOGSEARCH/0.5.0/properties/input.config-ambari.json.j2")); - } - - @Test - public void testAmbariAlertsLog() throws Exception { - // given - String layout = Log4jProperties.loadFrom(new File(AMBARI_CONF, "log4j.properties")).getLayout("alerts"); - String logEntry = generateLogEntry(layout); - // when - Map result = testLogEntry(logEntry, "ambari_alerts", ambariInputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("ambari_alerts")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message").toString().contains("This is a test message"), is(true)); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime.toLocalDate(), is(LocalDate.now())); - } - - @Test - public void testAmbariEclipseLinkSevereEntry() throws Exception { - testAmbariEclipseLinkEntry("Severe", "ERROR"); - } - - @Test - public void testAmbariEclipseLinkWarningEntry() throws Exception { - testAmbariEclipseLinkEntry("Warning", "WARN"); - } - - @Test - public void testAmbariEclipseLinkInfoEntry() throws Exception { - testAmbariEclipseLinkEntry("Info", "INFO"); - } - - @Test - public void testAmbariEclipseLinkConfigEntry() throws Exception { - testAmbariEclipseLinkEntry("Config", "INFO"); - } - - private void testAmbariEclipseLinkEntry(String logLevel, String expectedLogLevel) throws Exception { - // given - String logEntry = "[EL " + logLevel + "]: 2018-05-02 09:27:17.79--ServerSession(1657512321)-- EclipseLink, version: Eclipse Persistence Services - 2.6.2.v20151217-774c696"; - // when - Map result = testLogEntry(logEntry, "ambari_eclipselink", ambariInputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("level"), is(expectedLogLevel)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("ambari_eclipselink")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("--ServerSession(1657512321)-- EclipseLink, version: Eclipse Persistence Services - 2.6.2.v20151217-774c696")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 27, 17, 79000000))); - - } - - @Test - public void testAmbariAuditLogEntry() throws Exception { - // given - String logEntry = "2018-05-02T09:28:10.302Z, User(null), RemoteIp(192.175.27.2), Operation(User login), Roles(\n" + - "), Status(Failed), Reason(Authentication required), Consecutive failures(UNKNOWN USER)\n" + - "2018-05-02T09:28:10.346Z, User(admin), RemoteIp(192.175.27.2), Operation(User login), Roles(\n" + - " Ambari: Ambari Administrator\n" + - "), Status(Success)"; - // when - Map result = testLogEntry(logEntry, "ambari_audit", ambariInputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("ambari_audit")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("User(null), RemoteIp(192.175.27.2), Operation(User login), Roles(\n" + - "), Status(Failed), Reason(Authentication required), Consecutive failures(UNKNOWN USER)\n" + - "2018-05-02T09:28:10.346Z, User(admin), RemoteIp(192.175.27.2), Operation(User login), Roles(\n" + - " Ambari: Ambari Administrator\n" + - "), Status(Success)")); - Date logTime = (Date) result.get("evtTime"); - ZonedDateTime localDateTime = ZonedDateTime.ofInstant(logTime.toInstant(), ZoneId.of("Z")); - assertThat(localDateTime, is(ZonedDateTime.parse("2018-05-02T09:28:10.302Z"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AtlasLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AtlasLogPatternIT.java deleted file mode 100644 index c1027d513a2..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/AtlasLogPatternIT.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class AtlasLogPatternIT extends PatternITBase { - - @Test - public void testAtlasLogLayout() { - String layout = Log4jXml.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ATLAS", "configuration", "atlas-log4j.xml").toString())).getLayout("FILE"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testAtlas() throws Exception { - String layout = Log4jXml.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ATLAS", "configuration", "atlas-log4j.xml").toString())).getLayout("FILE"); - - testServiceLog("atlas_app", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER,"ATLAS/package/templates/input.config-atlas.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HBaseLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HBaseLogPatternIT.java deleted file mode 100644 index 27119fef807..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HBaseLogPatternIT.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class HBaseLogPatternIT extends PatternITBase { - - @Test - public void testHBaseLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "HBASE", "configuration", "hbase-log4j.xml").toString())).getLayout("RFA"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testHBase() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "HBASE", "configuration", "hbase-log4j.xml").toString())).getLayout("RFA"); - - testServiceLog("hbase_master", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "HBASE/package/templates/input.config-hbase.json.j2"))); - testServiceLog("hbase_regionserver", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "HBASE/package/templates/input.config-hbase.json.j2"))); -// testServiceLog("hbase_phoenix_server", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "HBASE/package/templates/input.config-hbase.json.j2"))); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HDFSLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HDFSLogPatternIT.java deleted file mode 100644 index 9d17da04203..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HDFSLogPatternIT.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class HDFSLogPatternIT extends PatternITBase { - - @Test - public void testHDFSLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "HDFS", "configuration", "hdfs-log4j.xml").toString())).getLayout("RFA"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testHDFS() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "HDFS", "configuration", "hdfs-log4j.xml").toString())).getLayout("RFA"); - - testServiceLog("hdfs_namenode", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "HDFS/package/templates/input.config-hdfs.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HdfsAuditLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HdfsAuditLogPatternIT.java deleted file mode 100644 index 3c3cd9f589e..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HdfsAuditLogPatternIT.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.nio.file.Paths; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.Map; - -import org.apache.log4j.PatternLayout; -import org.junit.Test; - -public class HdfsAuditLogPatternIT extends PatternITBase { - - @Test - public void testHDFSAudit() throws Exception { - // given - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "HDFS", "configuration", "hdfs-log4j.xml").toString())).getLayout("RFAS"); - listAppender.setLayout(new PatternLayout(layout)); - listAppender.activateOptions(); - - // when - LOG.info("allowed=true\tugi=hdfs (auth:SIMPLE)\tip=/192.168.73.101\tcmd=getfileinfo\tsrc=/user\tdst=null\tperm=null\tproto=rpc"); - - // then - String logEntry = listAppender.getLogList().get(0); - Map result = testLogEntry(logEntry, "hdfs_audit", inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "HDFS/package/templates/input.config-hdfs.json.j2"))); - - assertAuditLog(result); - } - - private void assertAuditLog(Map resultEntry) { - assertThat(resultEntry.isEmpty(), is(false)); - assertThat(resultEntry.get("logType"), is("HDFSAudit")); - assertThat(resultEntry.get("cluster"), is(CLUSTER)); - assertThat(resultEntry.get("dst"), is("null")); - assertThat(resultEntry.get("perm"), is("null")); - assertThat(resultEntry.get("event_count"), is(1)); - assertThat(resultEntry.get("repo"), is("hdfs")); - assertThat(resultEntry.get("reqUser"), is("hdfs")); - assertThat(resultEntry.get("type"), is("hdfs_audit")); - assertThat(resultEntry.get("level"), is("INFO")); - assertThat(resultEntry.containsKey("seq_num"), is(true)); - assertThat(LOG.getName().contains(resultEntry.get("logger_name").toString()), is(true)); - assertThat(resultEntry.containsKey("id"), is(true)); - assertThat(resultEntry.get("authType"), is("SIMPLE")); - assertThat(resultEntry.get("action"), is("getfileinfo")); - assertThat(resultEntry.containsKey("message_md5"), is(true)); - assertThat(resultEntry.containsKey("event_md5"), is(true)); - assertThat(resultEntry.containsKey("ip"), is(true)); - assertThat(resultEntry.containsKey("host"), is(true)); - Date logTime = (Date) resultEntry.get("evtTime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime.toLocalDate(), is(LocalDate.now())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HiveLogPatterntIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HiveLogPatterntIT.java deleted file mode 100644 index 1b4f18dcaba..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/HiveLogPatterntIT.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -import java.io.File; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.Map; - -import org.hamcrest.MatcherAssert; -import org.junit.Test; - -public class HiveLogPatterntIT extends PatternITBase { - - @Test - public void testHiveServerLogEntry() throws Exception { - String logEntry = "2018-05-11T07:46:01,087 WARN [main]: metastore.HiveMetaStoreClient (:()) - Failed to connect to the MetaStore Server..."; - Map result = testLogEntry(logEntry,"hive_server", inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("WARN")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("hive_server")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Failed to connect to the MetaStore Server...")); - assertThat(result.get("logger_name"), is("metastore.HiveMetaStoreClient ")); - assertThat(result.get("host"), is("HW13201.local")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 7, 46, 1, 87000000))); - } - - @Test - public void testHiveServerInteractiveLogEntry() throws Exception { - String logEntry = "2018-05-11T08:48:02,973 WARN [main]: conf.HiveConf (HiveConf.java:initialize(5193)) - HiveConf of name hive.hook.proto.base-directory does not exist"; - Map result = testLogEntry(logEntry,"hive_server_interactive", inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("WARN")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("hive_server_interactive")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("HiveConf of name hive.hook.proto.base-directory does not exist")); - assertThat(result.get("logger_name"), is("conf.HiveConf ")); - assertThat(result.get("host"), is("HW13201.local")); - assertThat(result.get("file"), is("HiveConf.java")); - assertThat(result.get("method"), is("initialize")); - assertThat(result.get("line_number"), is("5193")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 8, 48, 2, 973000000))); - } - - @Test - public void testHiveMetastoreLogEntry() throws Exception { - String logEntry = "2018-05-11T09:13:14,706 INFO [pool-7-thread-6]: txn.TxnHandler (TxnHandler.java:performWriteSetGC(1588)) - Deleted 0 obsolete rows from WRTIE_SET"; - Map result = testLogEntry(logEntry,"hive_metastore", inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("hive_metastore")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Deleted 0 obsolete rows from WRTIE_SET")); - assertThat(result.get("logger_name"), is("txn.TxnHandler ")); - assertThat(result.get("host"), is("HW13201.local")); - assertThat(result.get("line_number"), is("1588")); - assertThat(result.get("file"), is("TxnHandler.java")); - assertThat(result.get("method"), is("performWriteSetGC")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 9, 13, 14, 706000000))); - } -} - diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/JinjaFunctions.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/JinjaFunctions.java deleted file mode 100644 index db000b64779..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/JinjaFunctions.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -public class JinjaFunctions { - public static Object defaultFunc(Object value, Object defaultValue) { - if (value == null) - return defaultValue; - return value; - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KafkaLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KafkaLogPatternIT.java deleted file mode 100644 index bdba4737bf5..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KafkaLogPatternIT.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class KafkaLogPatternIT extends PatternITBase { - - @Test - public void testKafkaRequestAppenderLayout() { - testKafkaAppenderLayout("requestAppender"); - } - - @Test - public void testKafkaControllerAppenderLayout() { - testKafkaAppenderLayout("controllerAppender"); - } - - @Test - public void testKafkaLogCleanerAppenderLayout() { - testKafkaAppenderLayout("cleanerAppender"); - } - - @Test - public void testKafkaStateChangeAppenderLayout() { - testKafkaAppenderLayout("stateChangeAppender"); - } - - @Test - public void testKafkaServerAppenderLayout() { - testKafkaAppenderLayout("kafkaAppender"); - } - - private void testKafkaAppenderLayout(String appender) { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KAFKA", "configuration", "kafka-log4j.xml").toString())).getLayout(appender); - assertThatDateIsISO8601(layout); - } - - @Test - public void testKafkaRequestAppender() throws Exception { - testKafka("requestAppender", "kafka_request"); - } - - @Test - public void testKafkaControllerAppender() throws Exception { - testKafka("controllerAppender", "kafka_controller"); - } - - @Test - public void testKafkaLogCleanerAppender() throws Exception { - testKafka("cleanerAppender", "kafka_logcleaner"); - } - - @Test - public void testKafkaStateChangeAppender() throws Exception { - testKafka("stateChangeAppender", "kafka_statechange"); - } - - @Test - public void testKafkaServerAppender() throws Exception { - testKafka("kafkaAppender", "kafka_server"); - } - - private void testKafka(String appender, String logId) throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KAFKA", "configuration", "kafka-log4j.xml").toString())).getLayout(appender); - - testServiceLog(logId, layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "KAFKA/package/templates/input.config-kafka.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KnoxLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KnoxLogPatternIT.java deleted file mode 100644 index c1c2cef952a..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/KnoxLogPatternIT.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class KnoxLogPatternIT extends PatternITBase { - - @Test - public void testKnoxGatewayAppenderLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KNOX", "configuration", "gateway-log4j.xml").toString())).getLayout("drfa"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testKnoxLdapAppenderLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KNOX", "configuration", "ldap-log4j.xml").toString())).getLayout("drfa"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testKnoxGateway() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KNOX", "configuration", "gateway-log4j.xml").toString())).getLayout("drfa"); - - testServiceLog("knox_gateway", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "KNOX/package/templates/input.config-knox.json.j2"))); - } - - @Test - public void testKnoxLdap() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "KNOX", "configuration", "ldap-log4j.xml").toString())).getLayout("drfa"); - - testServiceLog("knox_ldap", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "KNOX/package/templates/input.config-knox.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/LayoutQuery.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/LayoutQuery.java deleted file mode 100644 index 0400a6060dc..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/LayoutQuery.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -public interface LayoutQuery { - String query(String parameterName); -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ListAppender.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ListAppender.java deleted file mode 100644 index 658b8f6940c..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ListAppender.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.List; - -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Layout; -import org.apache.log4j.WriterAppender; -import org.apache.log4j.spi.LoggingEvent; - -public class ListAppender extends AppenderSkeleton { - - private final List logList; - - public ListAppender() { - logList = new ArrayList<>(); - } - - @Override - protected void append(LoggingEvent event) { - StringWriter stringWriter = new StringWriter(); - WriterAppender writerAppender = new WriterAppender(layout, stringWriter); - writerAppender.append(event); - logList.add(stringWriter.toString()); - } - - @Override - public void close() { - - } - - @Override - public boolean requiresLayout() { - return true; - } - - public List getLogList() { - return logList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jContent.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jContent.java deleted file mode 100644 index fabe11ea5ba..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jContent.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; - -public interface Log4jContent { - String loadContent(); -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jProperties.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jProperties.java deleted file mode 100644 index d031f8d54db..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jProperties.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.io.IOException; -import java.io.StringReader; -import java.io.UncheckedIOException; -import java.nio.charset.Charset; -import java.util.Properties; - -import org.apache.commons.io.FileUtils; - -public class Log4jProperties { - public static Log4jProperties loadFrom(File file) { - return new Log4jProperties(() -> { - try { - return FileUtils.readFileToString(file, Charset.defaultCharset()); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - } - - public static Log4jProperties unwrapFrom(File file) { - return new Log4jProperties(new StackDefContent(file, "content")); - } - - public static Log4jProperties unwrapFrom(File file, String propertyName) { - return new Log4jProperties(new StackDefContent(file, propertyName)); - } - - private final Log4jContent content; - - public Log4jProperties(Log4jContent content) { - this.content = content; - } - - public String getLayout(String appenderName) { - Properties properties = new Properties(); - try (StringReader reader = new StringReader(content.loadContent())) { - properties.load(reader); - return properties.getProperty("log4j.appender." + appenderName + ".layout.ConversionPattern"); - } - catch (IOException ex) { - throw new UncheckedIOException(ex); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXml.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXml.java deleted file mode 100644 index 4b0b9e23257..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXml.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.apache.ambari.logsearch.patterns.StackDefContent.DOCUMENT_BUILDER_FACTORY; -import static org.apache.ambari.logsearch.patterns.StackDefContent.X_PATH_FACTORY; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.InputStream; -import java.nio.charset.Charset; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpression; - -import org.w3c.dom.Document; - -public class Log4jXml { - public static Log4jXml unwrapFrom(File file) { - return unwrapFrom(file, "content"); - } - - public static Log4jXml unwrapFrom(File file, String propertyName) { - return new Log4jXml( - new StackDefContent(file, propertyName), - (appenderName) -> "/configuration/appender[@name='" + appenderName + "']/layout/param[@name='ConversionPattern']/@value"); - } - - private final Log4jContent content; - private final LayoutQuery layoutQuery; - - public Log4jXml(Log4jContent content, LayoutQuery layoutQuery) { - this.content = content; - this.layoutQuery = layoutQuery; - } - - public String getLayout(String appenderName) { - return getLayout(content, layoutQuery, appenderName); - } - - public static String getLayout(Log4jContent content, LayoutQuery layoutQuery, String parameterName) { - try { - DocumentBuilder builder = DOCUMENT_BUILDER_FACTORY.newDocumentBuilder(); - Document doc; - try (InputStream stringReader = new ByteArrayInputStream(content.loadContent().getBytes(Charset.defaultCharset()))) { - doc = builder.parse(stringReader); - } - XPath xpath = X_PATH_FACTORY.newXPath(); - XPathExpression expr = xpath.compile(layoutQuery.query(parameterName)); - return (String) expr.evaluate(doc, XPathConstants.STRING); - } - catch (Exception ex) { - throw new RuntimeException(ex); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXmlProperties.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXmlProperties.java deleted file mode 100644 index ada5f2acd6f..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Log4jXmlProperties.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; - -public class Log4jXmlProperties { - public static Log4jXmlProperties unwrapFrom(File file) { - return unwrapFrom(file, "content"); - } - - public static Log4jXmlProperties unwrapFrom(File file, String contentPropertyName) { - return new Log4jXmlProperties( - new StackDefContent(file, contentPropertyName), - (xmlPropertyName) -> "/configuration/properties/property[@name='" + xmlPropertyName + "']/text()"); - } - - public Log4jXmlProperties(Log4jContent content, LayoutQuery layoutQuery) { - this.content = content; - this.layoutQuery = layoutQuery; - } - - private final Log4jContent content; - private final LayoutQuery layoutQuery; - - public String getLayout(String propertyName) { - return Log4jXml.getLayout(content, layoutQuery, propertyName); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/MetricsLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/MetricsLogPatternIT.java deleted file mode 100644 index 18b35ca8941..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/MetricsLogPatternIT.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class MetricsLogPatternIT extends PatternITBase { - - @Test - public void testMetricsLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "AMBARI_METRICS", "configuration", "ams-log4j.xml").toString())).getLayout("file"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testMetrics() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "AMBARI_METRICS", "configuration", "ams-log4j.xml").toString())).getLayout("file"); - - testServiceLog("ams_collector", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "AMBARI_METRICS/package/templates/input.config-ambari-metrics.json.j2"))); -// testServiceLog("ams_monitor", layout, Paths.get("AMBARI_METRICS", "package", "templates", "input.config-ambari-metrics.json.j2")); - } - -// @Test -// public void testMetricsGrafana() throws Exception { -// testServiceLog("ams_grafana", "%d{ISO8601} %-5p [%t] %c{2}: %m%n", Paths.get("AMBARI_METRICS", "package", "templates", "input.config-ambari-metrics.json.j2")); -// } - - @Test - public void testMetricsHBaseLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "AMBARI_METRICS", "configuration", "ams-hbase-log4j.xml").toString())).getLayout("DRFA"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testMetricsHBase() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "AMBARI_METRICS", "configuration", "ams-hbase-log4j.xml").toString())).getLayout("DRFA"); - - testServiceLog("ams_hbase_master", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "AMBARI_METRICS/package/templates/input.config-ambari-metrics.json.j2"))); - testServiceLog("ams_hbase_regionserver", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "AMBARI_METRICS/package/templates/input.config-ambari-metrics.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/PatternITBase.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/PatternITBase.java deleted file mode 100644 index 25e49d53d27..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/PatternITBase.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; -import static org.junit.Assume.assumeTrue; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.nio.charset.Charset; -import java.nio.file.Paths; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logfeeder.common.LogEntryParseTester; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputAdapter; -import org.apache.commons.io.FileUtils; -import org.apache.log4j.Logger; -import org.apache.log4j.PatternLayout; -import org.junit.Before; -import org.junit.BeforeClass; - -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; -import com.hubspot.jinjava.Jinjava; -import com.hubspot.jinjava.lib.fn.ELFunctionDefinition; - -public class PatternITBase { - protected final static Logger LOG = Logger.getLogger(PatternITBase.class); - - public static File HDP_AMBARI_DEFINITIONS; - public static File AMBARI_STACK_DEFINITIONS; - public static File AMBARI_FOLDER; - public static File HDP_SERVICES_FOLDER; - public static final String CLUSTER = "cl1"; - public static final String GLOBAL_CONFIG = "[\n" + - " {\n" + - " \"add_fields\": {\n" + - " \"cluster\": \""+ CLUSTER +"\"\n" + - " },\n" + - " \"source\": \"file\",\n" + - " \"tail\": \"true\",\n" + - " \"gen_event_md5\": \"true\"\n" + - " }\n" + - "]"; - - private Jinjava jinjava = new Jinjava(); - protected ListAppender listAppender; - - - @BeforeClass - public static void setupGlobal() throws Exception { - String hdpAmbariDefinitionsPath = System.getProperty("hdp.ambari.definitions.path"); - if (isNotBlank(hdpAmbariDefinitionsPath)) { - HDP_AMBARI_DEFINITIONS = new File(hdpAmbariDefinitionsPath); - HDP_SERVICES_FOLDER = new File(HDP_AMBARI_DEFINITIONS, Paths.get( "src", "main", "resources", "stacks", "HDP", "3.0", "services").toString()); - } - - assumeTrue(HDP_SERVICES_FOLDER != null && HDP_SERVICES_FOLDER.exists()); - - URL location = PatternITBase.class.getProtectionDomain().getCodeSource().getLocation(); - - AMBARI_FOLDER = new File(new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent()); - AMBARI_STACK_DEFINITIONS = new File(AMBARI_FOLDER, Paths.get("ambari-server", "src", "main", "resources", "common-services").toString()); - } - - @Before - public void setUp() throws Exception { - JsonParser jsonParser = new JsonParser(); - JsonElement globalConfigJsonElement = jsonParser.parse(GLOBAL_CONFIG); - - InputAdapter.setGlobalConfigs(globalConfigJsonElement.getAsJsonArray()); - jinjava.getGlobalContext().registerFunction(new ELFunctionDefinition("", "default", JinjaFunctions.class, "defaultFunc", Object.class, Object.class)); - - listAppender = new ListAppender(); - LOG.addAppender(listAppender); - } - - protected String inputConfigTemplate(File templateFile) throws IOException { - return FileUtils.readFileToString(templateFile, Charset.defaultCharset()); - } - - protected void testServiceLog(String logId, String log4jLayout, String inputConfigTemplate) throws Exception { - String logEntry = generateLogEntry(log4jLayout); - Map resultEntry = testLogEntry(logEntry, logId, inputConfigTemplate); - assertServiceLog(logId, resultEntry); - } - - protected String generateLogEntry(String log4jLayout) { - return generateLogEntry(log4jLayout, "This is a test message"); - } - - protected String generateLogEntry(String log4jLayout, String message) { - listAppender.setLayout(new PatternLayout(log4jLayout)); - listAppender.activateOptions(); - LOG.error(message, new Exception("TEST")); - return listAppender.getLogList().get(0); - } - - protected Map testLogEntry(String logEntry, String logId, String inputConfigTemplate) throws Exception { - String grokFilter = jinjava.render(inputConfigTemplate, new HashMap<>()); - - LogEntryParseTester tester = new LogEntryParseTester(logEntry, grokFilter, GLOBAL_CONFIG, logId); - return tester.parse(); - } - - private void assertServiceLog(String logId, Map resultEntry) { - assertThat(resultEntry.isEmpty(), is(false)); - assertThat(resultEntry.get("cluster"), is(CLUSTER)); - assertThat(resultEntry.get("level"), is("ERROR")); - assertThat(resultEntry.get("event_count"), is(1)); - assertThat(resultEntry.get("type"), is(logId)); - assertThat(resultEntry.containsKey("seq_num"), is(true)); -// assertThat(LOG.getName().contains(resultEntry.get("logger_name").toString()), is(true)); - assertThat(resultEntry.containsKey("id"), is(true)); - assertThat(resultEntry.containsKey("message_md5"), is(true)); - assertThat(resultEntry.containsKey("event_md5"), is(true)); - assertThat(resultEntry.containsKey("ip"), is(true)); - assertThat(resultEntry.containsKey("host"), is(true)); - assertThat(resultEntry.get("log_message").toString().contains("This is a test message"), is(true)); - assertThat(resultEntry.get("log_message").toString().contains("java.lang.Exception: TEST"), is(true)); - Date logTime = (Date) resultEntry.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime.toLocalDate(), is(LocalDate.now())); - } - - protected void assertThatDateIsISO8601(String layout) { - assertThat(layout.toLowerCase().contains("%d{iso8601}"), is(true)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/RangerLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/RangerLogPatternIT.java deleted file mode 100644 index 11d3a2a0782..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/RangerLogPatternIT.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class RangerLogPatternIT extends PatternITBase { - - @Test - public void testRangerAdminLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER", "configuration", "admin-log4j.xml").toString())).getLayout("xa_log_appender"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testRangerUserSynchLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER", "configuration", "usersync-log4j.xml").toString())).getLayout("logFile"); - assertThat(layout.contains("%d{dd MMM yyyy HH:mm:ss}"), is(true)); - } - - @Test - public void testRangerAdminLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER", "configuration", "admin-log4j.xml").toString())).getLayout("xa_log_appender"); - - testServiceLog("ranger_admin", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "RANGER/package/templates/input.config-ranger.json.j2"))); - } - - @Test - public void testRangerUserSynchLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER", "configuration", "usersync-log4j.xml").toString())).getLayout("logFile"); - - testServiceLog("ranger_usersync", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "RANGER/package/templates/input.config-ranger.json.j2"))); - } - - @Test - public void testRangerKMSLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER_KMS", "configuration", "kms-log4j.xml").toString())).getLayout("kms"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testRangerKMSLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "RANGER_KMS", "configuration", "kms-log4j.xml").toString())).getLayout("kms"); - - testServiceLog("ranger_kms", layout, inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "RANGER_KMS/package/templates/input.config-ranger-kms.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/SmartSenseLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/SmartSenseLogPatternIT.java deleted file mode 100644 index 8ca129439f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/SmartSenseLogPatternIT.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.Map; - -import org.apache.commons.io.IOUtils; -import org.junit.Test; - -public class SmartSenseLogPatternIT extends PatternITBase { - - // TODO: read input config from hdp-ambari-definitions when available - - @Test - public void testHSTServerLogEntry() throws Exception { - //given - String logEntry = "2018-05-02 09:40:14,740 INFO [main] SupportToolServer:143 - Starting HST Server."; - // when - Map result = testLogEntry(logEntry, "hst_server", inputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("hst_server")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("SupportToolServer")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Starting HST Server.")); - assertThat(result.get("line_number"), is("143")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 40, 14, 740000000))); - } - - private String inputConfigTemplate() throws IOException { - return IOUtils.toString(getClass().getClassLoader().getResourceAsStream("test-input-config/input.config-smartsense.json.j2"), Charset.defaultCharset()); - } - - @Test - public void testHSTAgentLogEntry() throws Exception { - // given - String logEntry = "INFO 2018-05-02 09:32:47,197 security.py:177 - Server certificate not exists, downloading"; - // when - Map result = testLogEntry(logEntry, "hst_agent", inputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("hst_agent")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("file"), is("security.py")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Server certificate not exists, downloading")); - assertThat(result.get("line_number"), is("177")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 32, 47, 197000000))); - } - - @Test - public void testActivityAnalyserLogEntry() throws Exception { - // given - String logEntry = "2018-05-02 10:23:49,592 INFO [main] ActivityUtil:410 - Could not find valid SmartSense ID. Will recheck every 5 minutes for next 5 minutes."; - // when - Map result = testLogEntry(logEntry, "activity_analyser", inputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("activity_analyser")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("ActivityUtil")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Could not find valid SmartSense ID. Will recheck every 5 minutes for next 5 minutes.")); - assertThat(result.get("line_number"), is("410")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 10, 23, 49, 592000000))); - } - - @Test - public void testActivityExplorerLogEntry() throws Exception { - // given - String logEntry = "2018-05-02 09:44:26,883 INFO [main] FileSystemConfigStorage:74 - Creating filesystem: org.apache.hadoop.fs.RawLocalFileSystem"; - // when - Map result = testLogEntry(logEntry, "activity_explorer", inputConfigTemplate()); - // then - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("activity_explorer")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("FileSystemConfigStorage")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Creating filesystem: org.apache.hadoop.fs.RawLocalFileSystem")); - assertThat(result.get("line_number"), is("74")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 44, 26, 883000000))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Spark2LogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Spark2LogPatternIT.java deleted file mode 100644 index 99fcc84e9ad..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/Spark2LogPatternIT.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class Spark2LogPatternIT extends PatternITBase { - - @Test - public void testSpark2LogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "SPARK2", "configuration", "spark2-log4j-properties.xml").toString())).getLayout("console"); - assertThat(layout.contains("%d{yy/MM/dd HH:mm:ss}"), is(true)); - } - - @Test - public void testSpark2Livy2LogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "SPARK2", "configuration", "livy2-log4j-properties.xml").toString())).getLayout("console"); - assertThat(layout.contains("%d{yy/MM/dd HH:mm:ss}"), is(true)); - } - - @Test - public void testSpark2Log() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "SPARK2", "configuration", "spark2-log4j-properties.xml").toString())).getLayout("console"); - - testServiceLog("spark2_jobhistory_server", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "SPARK2/package/templates/input.config-spark2.json.j2"))); - } - - @Test - public void testSpark2Livy2Log() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "SPARK2", "configuration", "livy2-log4j-properties.xml").toString())).getLayout("console"); - - testServiceLog("livy2_server", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "SPARK2/package/templates/input.config-spark2.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StackDefContent.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StackDefContent.java deleted file mode 100644 index 5ae33f8034c..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StackDefContent.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.io.FileInputStream; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathExpression; -import javax.xml.xpath.XPathFactory; - -import org.w3c.dom.Document; - -public class StackDefContent implements Log4jContent { - public static final XPathFactory X_PATH_FACTORY = XPathFactory.newInstance(); - public static final DocumentBuilderFactory DOCUMENT_BUILDER_FACTORY = DocumentBuilderFactory.newInstance(); - - private final File file; - private final String propertyName; - - public StackDefContent(File file, String propertyName) { - this.file = file; - this.propertyName = propertyName; - } - - @Override - public String loadContent() { - try { - DocumentBuilder builder = DOCUMENT_BUILDER_FACTORY.newDocumentBuilder(); - Document doc; - try (FileInputStream fileInputStream = new FileInputStream(file)) { - doc = builder.parse(fileInputStream); - } - XPath xpath = X_PATH_FACTORY.newXPath(); - XPathExpression expr = xpath.compile("/configuration/property[name/text()='" + propertyName + "']/value/text()"); - return expr.evaluate(doc); - } - catch (Exception ex) { - throw new RuntimeException(ex); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StormLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StormLogPatternIT.java deleted file mode 100644 index f1e65df260e..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/StormLogPatternIT.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.nio.file.Paths; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.Map; - -import org.hamcrest.MatcherAssert; -import org.junit.Test; - -public class StormLogPatternIT extends PatternITBase { - - @Test - public void testStormClusterLogLayout() { - String layout = Log4jXmlProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "STORM", "configuration", "storm-cluster-log4j.xml").toString())).getLayout("pattern"); - assertThat(layout.contains("yyyy-MM-dd HH:mm:ss.SSS"), is(true)); - } - - @Test - public void testStormWorkerLogLayout() { - String layout = Log4jXmlProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "STORM", "configuration", "storm-worker-log4j.xml").toString())).getLayout("pattern"); - assertThat(layout.contains("yyyy-MM-dd HH:mm:ss.SSS"), is(true)); - } - - @Test - public void testStormLog() throws Exception { - String layout = Log4jXmlProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "STORM", "configuration", "storm-cluster-log4j.xml").toString())).getLayout("pattern"); - - testServiceLog("storm_drpc", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "STORM/package/templates/input.config-storm.json.j2"))); - } - - @Test - public void testStormWorkerLog() throws Exception { - String layout = Log4jXmlProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "STORM", "configuration", "storm-worker-log4j.xml").toString())).getLayout("pattern"); - - testServiceLog("storm_worker", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "STORM/package/templates/input.config-storm.json.j2"))); - } - - @Test - public void testStormWorkerLogEntry() throws Exception { - String logEntry = "2018-05-04 05:10:00.120 o.a.s.d.executor main [INFO] Loaded executor tasks count:[5 5]"; - Map result = testLogEntry(logEntry, "storm_worker", inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "STORM/package/templates/input.config-storm.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("storm_worker")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Loaded executor tasks count:[5 5]")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 4, 5, 10, 0, 120000000))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/YarnLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/YarnLogPatternIT.java deleted file mode 100644 index 3a0f7668270..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/YarnLogPatternIT.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.core.Is.is; - -import java.io.File; -import java.nio.file.Paths; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Date; -import java.util.Map; - -import org.junit.Test; - -public class YarnLogPatternIT extends PatternITBase { - - @Test - public void testYarnJobSummaryLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "YARN", "configuration", "yarn-log4j.xml").toString())).getLayout("RMSUMMARY"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testYarnJobSummaryLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "YARN", "configuration", "yarn-log4j.xml").toString())).getLayout("RMSUMMARY"); - - testServiceLog("yarn_jobsummary", layout, - inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "YARN/package/templates/input.config-yarn.json.j2"))); - } - - @Test - public void testYarnNodemanagerLogEntry() throws Exception { - Map result = testLogEntry("2018-05-02 09:43:46,898 INFO zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:zookeeper.version=3.4.6-1173--1,\n" + - " built on 04/10/2018 11:42 GMT", - "yarn_nodemanager", - inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "YARN/package/templates/input.config-yarn.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("yarn_nodemanager")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("zookeeper.ZooKeeper ")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Client environment:zookeeper.version=3.4.6-1173--1,\n built on 04/10/2018 11:42 GMT")); - assertThat(result.get("line_number"), is("100")); - assertThat(result.get("file"), is("Environment.java")); - assertThat(result.get("method"), is("logEnv")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 43, 46, 898000000))); - } - - @Test - public void testYarnResourcemanagerLogEntry() throws Exception { - Map result = testLogEntry("2018-05-02 09:41:43,917 INFO placement.UserGroupMappingPlacementRule (UserGroupMappingPlacementRule.java:get(232)) - Initialized queue mappings, override: false", - "yarn_resourcemanager", - inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "YARN/package/templates/input.config-yarn.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("yarn_resourcemanager")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("placement.UserGroupMappingPlacementRule ")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Initialized queue mappings, override: false")); - assertThat(result.get("line_number"), is("232")); - assertThat(result.get("file"), is("UserGroupMappingPlacementRule.java")); - assertThat(result.get("method"), is("get")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 9, 41, 43, 917000000))); - } - - @Test - public void testYarnTimelineServerLogEntry() throws Exception { - Map result = testLogEntry("2018-05-02 10:36:27,868 INFO timeline.RollingLevelDB (RollingLevelDB.java:evictOldDBs(345)) - Evicting entity-ldb DBs scheduled for eviction", - "yarn_timelineserver", - inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "YARN/package/templates/input.config-yarn.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("yarn_timelineserver")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.get("logger_name"), is("timeline.RollingLevelDB ")); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Evicting entity-ldb DBs scheduled for eviction")); - assertThat(result.get("line_number"), is("345")); - assertThat(result.get("file"), is("RollingLevelDB.java")); - assertThat(result.get("method"), is("evictOldDBs")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 10, 36, 27, 868000000))); - } - - @Test - public void testYarnHistoryServerLogEntry() throws Exception { - Map result = testLogEntry("2018-05-02 10:02:54,215 INFO webapp.View (HsJobsBlock.java:render(74)) - Getting list of all Jobs.", - "mapred_historyserver", - inputConfigTemplate(new File(HDP_SERVICES_FOLDER, "YARN/package/templates/input.config-mapreduce2.json.j2"))); - - assertThat(result.isEmpty(), is(false)); - assertThat(result.get("cluster"), is(CLUSTER)); - assertThat(result.get("level"), is("INFO")); - assertThat(result.get("event_count"), is(1)); - assertThat(result.get("type"), is("mapred_historyserver")); - assertThat(result.get("logger_name"), is("webapp.View ")); - assertThat(result.containsKey("seq_num"), is(true)); - assertThat(result.containsKey("id"), is(true)); - assertThat(result.containsKey("message_md5"), is(true)); - assertThat(result.containsKey("event_md5"), is(true)); - assertThat(result.containsKey("ip"), is(true)); - assertThat(result.containsKey("host"), is(true)); - assertThat(result.get("log_message"), is("Getting list of all Jobs.")); - assertThat(result.get("line_number"), is("74")); - assertThat(result.get("file"), is("HsJobsBlock.java")); - assertThat(result.get("method"), is("render")); - Date logTime = (Date) result.get("logtime"); - LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault()); - assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 2, 10, 2, 54, 215000000))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZeppelinLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZeppelinLogPatternIT.java deleted file mode 100644 index d924ef96771..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZeppelinLogPatternIT.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class ZeppelinLogPatternIT extends PatternITBase { - - @Test - public void testZeppelinLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ZEPPELIN", "configuration", "zeppelin-log4j-properties.xml").toString()), - "log4j_properties_content").getLayout("dailyfile"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testZeppelinLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ZEPPELIN", "configuration", "zeppelin-log4j-properties.xml").toString()), - "log4j_properties_content").getLayout("dailyfile"); - - testServiceLog("zeppelin", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "ZEPPELIN/package/templates/input.config-zeppelin.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZookeeperLogPatternIT.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZookeeperLogPatternIT.java deleted file mode 100644 index a68b7c4afeb..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/patterns/ZookeeperLogPatternIT.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.patterns; - -import java.io.File; -import java.nio.file.Paths; - -import org.junit.Test; - -public class ZookeeperLogPatternIT extends PatternITBase { - - @Test - public void testZookeeperLogLayout() { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ZOOKEEPER", "configuration", "zookeeper-log4j.xml").toString())).getLayout("ROLLINGFILE"); - assertThatDateIsISO8601(layout); - } - - @Test - public void testZookeeperLog() throws Exception { - String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get( - "ZOOKEEPER", "configuration", "zookeeper-log4j.xml").toString())).getLayout("ROLLINGFILE"); - - testServiceLog("zookeeper", layout, inputConfigTemplate( - new File(HDP_SERVICES_FOLDER, "ZOOKEEPER/package/templates/input.config-zookeeper.json.j2"))); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java deleted file mode 100644 index e986f6fe67e..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/AbstractLogSearchSteps.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.net.URL; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.apache.commons.lang3.StringUtils; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.impl.LBHttpSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.SolrPingResponse; -import org.apache.solr.common.SolrDocumentList; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class AbstractLogSearchSteps { - - private static final Logger LOG = LoggerFactory.getLogger(AbstractLogSearchSteps.class); - - protected void initDockerContainer() throws Exception{ - boolean logsearchStarted = StoryDataRegistry.INSTANCE.isLogsearchContainerStarted(); - if (!logsearchStarted) { - LOG.info("Create new docker container for Log Search ..."); - URL location = LogSearchDockerSteps.class.getProtectionDomain().getCodeSource().getLocation(); - String ambariFolder = new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent(); - StoryDataRegistry.INSTANCE.setAmbariFolder(ambariFolder); - String scriptFolder = ambariFolder + "/ambari-logsearch/docker/"; - StoryDataRegistry.INSTANCE.setShellScriptFolder(scriptFolder); - String shellScriptLocation = scriptFolder + "logsearch-docker.sh"; - StoryDataRegistry.INSTANCE.setShellScriptLocation(shellScriptLocation); - String output = runCommand(scriptFolder, new String[]{StoryDataRegistry.INSTANCE.getShellScriptLocation(), "start"}); - LOG.info("Command output: {}", output); - StoryDataRegistry.INSTANCE.setLogsearchContainerStarted(true); - - String dockerHostFromUri = System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost"; - - StoryDataRegistry.INSTANCE.setDockerHost(dockerHostFromUri); - checkHostAndPortReachable(dockerHostFromUri, StoryDataRegistry.INSTANCE.getLogsearchPort(), "LogSearch"); - waitUntilSolrIsUp(); - waitUntilSolrHasAnyData(); - - LOG.info("Waiting for logfeeder to finish the test log parsings... (10 sec)"); - Thread.sleep(10000); - } - } - - private void waitUntilSolrIsUp() throws Exception { - int maxTries = 30; - boolean solrIsUp = false; - String lastExceptionMessage = null; - for (int tries = 1; tries < maxTries; tries++) { - try { - SolrClient solrClient = new LBHttpSolrClient.Builder() - .withBaseSolrUrl(String.format("http://%s:%d/solr/%s_shard1_replica_n1", - StoryDataRegistry.INSTANCE.getDockerHost(), - StoryDataRegistry.INSTANCE.getSolrPort(), - StoryDataRegistry.INSTANCE.getServiceLogsCollection())) - .build(); - StoryDataRegistry.INSTANCE.setSolrClient(solrClient); - SolrPingResponse pingResponse = solrClient.ping(); - if (pingResponse.getStatus() != 0) { - LOG.info("Solr is not up yet, Retrying... ({} tries)", tries); - Thread.sleep(2000); - } else { - solrIsUp = true; - LOG.info("Solr is up and running"); - break; - } - } catch (Exception e) { - LOG.info("Error occurred during pinging solr. Retrying... ({} tries)", tries); - lastExceptionMessage = e.getMessage(); - Thread.sleep(2000); - } - } - - if (!solrIsUp) { - throw new IllegalStateException(String.format("Solr is not up after %d tries. Exception: %s", maxTries, lastExceptionMessage)); - } - } - - protected void waitUntilSolrHasAnyData() throws InterruptedException { - boolean solrHasData = false; - int maxTries = 60; - String lastExceptionMessage = null; - - for (int tries = 1; tries < maxTries; tries++) { - try { - SolrClient solrClient = StoryDataRegistry.INSTANCE.getSolrClient(); - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - QueryResponse queryResponse = solrClient.query(solrQuery); - SolrDocumentList list = queryResponse.getResults(); - if (list.size() > 0) { - solrHasData = true; - break; - } else { - Thread.sleep(2000); - LOG.info("Solr has no data yet. Retrying... ({} tries)", tries); - } - } catch (Exception e) { - LOG.info("Error occurred during checking solr. Retrying... ({} tries)", tries); - lastExceptionMessage = e.getMessage(); - Thread.sleep(2000); - } - } - if (!solrHasData) { - throw new IllegalStateException(String.format("Solr has no data after %d tries. Exception: %s", maxTries, lastExceptionMessage)); - } - } - - - protected void checkHostAndPortReachable(String host, int port, String serviceName) throws InterruptedException { - boolean reachable = false; - int maxTries = 60; - for (int tries = 1; tries < maxTries; tries++ ) { - try (Socket socket = new Socket()) { - socket.connect(new InetSocketAddress(host, port), 1000); - reachable = true; - break; - } catch (IOException e) { - Thread.sleep(2000); - LOG.info("{} is not reachable yet. Retrying... ({} tries)", serviceName, tries); - } - } - if (!reachable) { - throw new IllegalStateException(String.format("%s is not reachable after %s tries", serviceName, maxTries)); - } - } - - - protected String runCommand(String location, String[] command) { - try { - LOG.info("Exec command: {}", StringUtils.join(command, " ")); - Process process = Runtime.getRuntime().exec(command, null, new File(location)); - BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); - return reader.readLine(); - } catch (Exception e) { - throw new RuntimeException("Error during execute shell command: ", e); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java deleted file mode 100644 index 9e94a4920cc..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.jbehave.core.annotations.Named; -import org.jbehave.core.annotations.Then; -import org.jbehave.core.annotations.When; -import org.junit.Assert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.flipkart.zjsonpatch.JsonDiff; -import com.google.common.io.Resources; - -public class LogSearchApiSteps { - - private static Logger LOG = LoggerFactory.getLogger(LogSearchApiSteps.class); - - private String response; - - @When("LogSearch api query sent: ") - public void sendApiQuery(@Named("apiQuery") String apiQuery) { - response = StoryDataRegistry.INSTANCE.logsearchClient().get(apiQuery); - } - - - @Then("The api query result is ") - public void verifyRestApiCall(@Named("jsonResult") String jsonResult) throws IOException, URISyntaxException { - ObjectMapper mapper = new ObjectMapper(); - Path jsonFilePath = new File(Resources.getResource("test-output/" + jsonResult).toURI()).toPath(); - String jsonExpected = new String(Files.readAllBytes(jsonFilePath)); - - JsonNode expected = mapper.readTree(jsonExpected); - JsonNode result = mapper.readTree(response); - JsonNode patch = JsonDiff.asJson(expected, result); - List diffObjects = mapper.convertValue(patch, List.class); - assertDiffs(diffObjects, expected); - - } - - @SuppressWarnings("unchecked") - private void assertDiffs(List diffObjects, JsonNode expected) { - for (Object diffObj : diffObjects) { - String path = ((Map) diffObj).get("path"); - Assert.assertTrue(expected.at(path).isMissingNode()); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchConfigApiSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchConfigApiSteps.java deleted file mode 100644 index 7c6a114f10e..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchConfigApiSteps.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.not; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.hamcrest.Matchers; -import org.jbehave.core.annotations.Then; -import org.jbehave.core.annotations.When; - -import com.google.gson.Gson; - -public class LogSearchConfigApiSteps { - private String response; - private InputConfig inputConfig; - - @When("LogSearch api request sent: $url") - public String sendApiRequest(String url) { - response = StoryDataRegistry.INSTANCE.logsearchClient().get(url); - return response; - } - - @When("Update input config of $inputConfigType path to $logFilePath at $url") - public void changeAndPut(String inputConfigType, String logFilePath, String url) { - String putRequest = response.replace(inputConfig.getInput().get(0).getPath(), logFilePath); - String putResponse = StoryDataRegistry.INSTANCE.logsearchClient().put( - url, putRequest); - assertThat(putResponse, is("")); - - String getResponse = sendApiRequest(url); - checkInputConfig(getResponse, inputConfigType, logFilePath); - } - - @When("Update input config with data $jsonString at $url") - public void updateWithInvalidJson(String jsonString, String url) { - response = StoryDataRegistry.INSTANCE.logsearchClient().put(url, jsonString); - } - - @Then("Result is an input.config of $inputConfigType with log file path $logFilePath") - public void checkInputConfig(String inputConfigType, String logFilePath) { - checkInputConfig(response, inputConfigType, logFilePath); - } - - public void checkInputConfig(String result, String type, String path) { - inputConfig = InputConfigGson.gson.fromJson(response, InputConfigImpl.class); - assertThat(inputConfig.getInput(), is(not(Matchers.nullValue()))); - assertThat(inputConfig.getInput(), hasSize(1)); - assertThat(inputConfig.getInput().get(0).getType(), is(type)); - assertThat(inputConfig.getInput().get(0).getPath(), is(path)); - } - - @Then("Result is status code $statusCode") - public void checkStatus(int statusCode) { - System.out.println("************" + response); - StatusMessage statusMessage = new Gson().fromJson(response, StatusMessage.class); - assertThat(statusMessage.getStatusCode(), is(statusCode)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java deleted file mode 100644 index cbbeb2d4fb6..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchDockerSteps.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.LBHttpSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.SolrPingResponse; -import org.apache.solr.common.SolrDocumentList; -import org.jbehave.core.annotations.AfterStories; -import org.jbehave.core.annotations.BeforeStories; -import org.jbehave.core.annotations.Given; -import org.jbehave.core.annotations.When; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.net.URL; - -public class LogSearchDockerSteps extends AbstractLogSearchSteps { - - private static final Logger LOG = LoggerFactory.getLogger(LogSearchDockerSteps.class); - - @Given("logsearch docker container") - public void setupLogSearchContainer() throws Exception { - initDockerContainer(); - } - - @When("logfeeder started (parse logs & send data to solr)") - public void logfeederStarted() throws Exception { - // TODO: run ps aux to check LogFeeder process with docker exec - } - - @BeforeStories - public void initDocker() throws Exception { - // TODO: check docker is up - } - - @AfterStories - public void removeLogSearchContainer() { - runCommand(StoryDataRegistry.INSTANCE.getShellScriptFolder(), new String[]{StoryDataRegistry.INSTANCE.getShellScriptLocation(), "stop"}); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java deleted file mode 100644 index 1b7c63484a1..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import junit.framework.Assert; -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.apache.ambari.logsearch.web.Home; -import org.jbehave.core.annotations.AfterScenario; -import org.jbehave.core.annotations.AfterStory; -import org.jbehave.core.annotations.BeforeScenario; -import org.jbehave.core.annotations.BeforeStories; -import org.jbehave.core.annotations.Given; -import org.jbehave.core.annotations.Named; -import org.jbehave.core.annotations.Then; -import org.jbehave.core.annotations.When; -import org.jbehave.web.selenium.WebDriverProvider; -import org.openqa.selenium.By; -import org.openqa.selenium.NoSuchElementException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.TimeUnit; - -public class LogSearchUISteps extends AbstractLogSearchSteps { - - private static final Logger LOG = LoggerFactory.getLogger(LogSearchUISteps.class); - - private final WebDriverProvider driverProvider; - - private Home home; - - public LogSearchUISteps(WebDriverProvider driverProvider) { - this.driverProvider = driverProvider; - } - - @BeforeScenario - public void initHomePage() { - home = new Home(driverProvider); - LOG.info("Init home page: {}", home.getCurrentUrl()); - } - - @AfterScenario - public void deleteCookies() { - LOG.info("Delete all cookies..."); - home.manage().deleteAllCookies(); - } - - @BeforeStories - public void beforeStories() throws Exception { - initDockerContainer(); - LOG.info("Initialize web driver..."); - StoryDataRegistry.INSTANCE.getWebDriverProvider().initialize(); - LOG.info("Web driver details: {}", StoryDataRegistry.INSTANCE.getWebDriverProvider().get().toString()); - } - - @AfterStory - public void closePage() throws Exception { - LOG.info("Closing web driver"); - StoryDataRegistry.INSTANCE.getWebDriverProvider().end(); - } - - @Given("open logsearch home page") - public void initBrowser() { - LOG.info("Delete all cookies..."); - home.manage().deleteAllCookies(); - LOG.info("Open home page: {}", home.getCurrentUrl()); - home.open(); - } - - @When("login with $username / $password") - public void login(@Named("username") String userName, @Named("password") String password) { - LOG.info("Type username: {}", userName); - home.findElement(By.id("username")).sendKeys(userName); - LOG.info("Type password: {}", password); - home.findElement(By.id("password")).sendKeys(password); - LOG.info("Click on Sign In button."); - home.findElement(By.cssSelector("login-form > div > form > button")).click(); - closeTourPopup(); - } - - @Then("page contains text: '$text'") - public void contains(@Named("text") String text) { - LOG.info("Check page contains text: '{}'", text); - home.found(text); - } - - @Then("page does not contain text: '$text'") - public void notContains(@Named("text") String text) { - LOG.info("Check page does not contain text: '{}'", text); - home.notFound(text); - } - - @When("wait $seconds seconds") - public void waitSeconds(@Named("second") String second) { - LOG.info("Wait {} seconds...", second); - home.manage().timeouts().implicitlyWait(Integer.parseInt(second), TimeUnit.SECONDS); - } - - @When("click on element: $xpath (xpath)") - public void clickOnElementByXPath(@Named("xpath") String xPath) { - LOG.info("Click on element by xpath: '{}'", xPath); - driverProvider.get().findElement(By.xpath(xPath)).click(); - } - - @When("click on element: $id (id)") - public void clickOnElementById(@Named("id") String id) { - LOG.info("Click on element by id: '{}'", id); - driverProvider.get().findElement(By.xpath(id)).click(); - } - - @When("click on element: $css (css selector)") - public void clickOnElementByCssSelector(@Named("css") String cssSelector) { - LOG.info("Click on element by css selector: '{}'", cssSelector); - driverProvider.get().findElement(By.cssSelector(cssSelector)).click(); - } - - @Then("element exists with xpath: $xpath") - public void findByXPath(@Named("xpath") String xPath) { - LOG.info("Find element by xpath: '{}'", xPath); - Assert.assertNotNull(home.findElement(By.xpath(xPath))); - } - - @Then("element exists with xpath: $id") - public void findById(@Named("id") String id) { - LOG.info("Find element by id: '{}'", id); - Assert.assertNotNull(home.findElement(By.id(id))); - } - - @Then("element exists with css selector: $css") - public void findByCssSelector(@Named("css") String cssSelector) { - LOG.info("Find element by css selector: '{}'", cssSelector); - Assert.assertNotNull(home.findElement(By.cssSelector(cssSelector))); - } - - @Then("element text equals '$text', with xpath $xpath") - public void equalsByXPath(@Named("text") String text, @Named("xpath") String xPath) { - LOG.info("Check text of the element (xpath: '{}') equals with '{}'", xPath, text); - Assert.assertEquals(text, home.findElement(By.xpath(xPath)).getText()); - } - - @Then("element text equals '$text' with id $id") - public void equalsyId(@Named("text") String text, @Named("id") String id) { - LOG.info("Check text of the element (id: '{}') equals with '{}'", id, text); - Assert.assertEquals(text, home.findElement(By.id(id)).getText()); - } - - @Then("element text equals '$text' with css selector $css") - public void equalsCssSelector(@Named("text") String text, @Named("css") String cssSelector) { - LOG.info("Check text of the element (css selector: '{}') equals with '{}'", cssSelector, text); - Assert.assertEquals(text, home.findElement(By.cssSelector(cssSelector)).getText()); - } - - @Then("element does not exist with xpath: $xpath") - public void doNotFindByXPath(@Named("xpath") String xPath) { - try { - LOG.info("Check that element does not exist with xpath: {}", xPath); - home.findElement(By.xpath(xPath)); - Assert.fail(String.format("Element is found. xPath: '%s'", xPath)); - } catch (NoSuchElementException e) { - // success - } - } - - @Then("element does not exist with xpath: $id") - public void doNotFindById(@Named("id") String id) { - try { - LOG.info("Check that element does not exist with id: {}", id); - home.findElement(By.xpath(id)); - Assert.fail(String.format("Element is found. id: '%s'", id)); - } catch (NoSuchElementException e) { - // success - } - } - - @Then("element does not exist with css selector: $css") - public void doNotFindByCssSelector(@Named("css") String cssSelector) { - try { - LOG.info("Check that element does not exist with css selector: {}", cssSelector); - home.findElement(By.xpath(cssSelector)); - Assert.fail(String.format("Element is found. css selector: '%s'", cssSelector)); - } catch (NoSuchElementException e) { - // success - } - } - - private void closeTourPopup() { - LOG.info("Close Tour popup if needed."); - try { - home.findElement(By.cssSelector("div.modal-footer > button.btn.btn-default")).click(); - } catch (NoSuchElementException ex) { - // do nothing - no popup - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java deleted file mode 100644 index 44205409f62..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.steps; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrDocumentList; -import org.jbehave.core.annotations.Named; -import org.jbehave.core.annotations.Then; -import org.junit.Assert; - -import java.io.IOException; - -public class SolrSteps { - - @Then("the number of docs is: ") - public void numberOfDocsForComponent(@Named("component") String component, @Named("docSize") int docSize) - throws IOException, SolrServerException, InterruptedException { - SolrClient solrClient = StoryDataRegistry.INSTANCE.getSolrClient(); - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery(String.format("type:%s", component)); - solrQuery.setStart(0); - solrQuery.setRows(20); - QueryResponse queryResponse = solrClient.query(solrQuery); - SolrDocumentList list = queryResponse.getResults(); - Assert.assertEquals(docSize, list.size()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java deleted file mode 100644 index 127b67a96d7..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchBackendStories.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.story; - -import java.util.List; - -import org.apache.ambari.logsearch.steps.LogSearchApiSteps; -import org.apache.ambari.logsearch.steps.LogSearchConfigApiSteps; -import org.apache.ambari.logsearch.steps.LogSearchDockerSteps; -import org.apache.ambari.logsearch.steps.SolrSteps; -import org.jbehave.core.configuration.Configuration; -import org.jbehave.core.configuration.MostUsefulConfiguration; -import org.jbehave.core.junit.JUnitStories; -import org.jbehave.core.reporters.Format; -import org.jbehave.core.reporters.StoryReporterBuilder; -import org.jbehave.core.steps.InjectableStepsFactory; -import org.jbehave.core.steps.InstanceStepsFactory; -import org.junit.Test; - -import com.google.common.base.Predicate; -import com.google.common.collect.Collections2; -import com.google.common.collect.Lists; - -public class LogSearchBackendStories extends JUnitStories { - - private static final String BACKEND_STORIES_LOCATION_PROPERTY = "backend.stories.location"; - private static final String STORY_SUFFIX = ".story"; - - @Override - public Configuration configuration() { - return new MostUsefulConfiguration() - .useStoryLoader(LogSearchStoryLocator.getStoryLoader(BACKEND_STORIES_LOCATION_PROPERTY, this.getClass())) - .useStoryReporterBuilder( - new StoryReporterBuilder().withFailureTrace(true).withDefaultFormats().withFormats(Format.CONSOLE, Format.TXT)); - } - - @Override - public InjectableStepsFactory stepsFactory() { - return new InstanceStepsFactory(configuration(), - new LogSearchDockerSteps(), - new SolrSteps(), - new LogSearchApiSteps(), - new LogSearchConfigApiSteps()); - } - - @Test - public void run() throws Throwable { - super.run(); - } - - @Override - protected List storyPaths() { - List backendStories = LogSearchStoryLocator.findStories(BACKEND_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass()); - return Lists.newArrayList(Collections2.filter(backendStories, new Predicate() { - @Override - public boolean apply(String storyFileName) { - return !storyFileName.endsWith("ui.story"); - } - })); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java deleted file mode 100644 index bed799948f3..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStoryLocator.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.story; - -import com.google.common.collect.Lists; -import org.apache.commons.lang.StringUtils; -import org.jbehave.core.io.LoadFromClasspath; -import org.jbehave.core.io.LoadFromRelativeFile; -import org.jbehave.core.io.StoryFinder; -import org.jbehave.core.io.StoryLoader; - -import java.io.File; -import java.net.URL; -import java.util.Arrays; -import java.util.List; - -import static org.jbehave.core.io.CodeLocations.codeLocationFromClass; - -/** - * Helper class for loading story files from the classpath or externally - based on system properties - */ -public class LogSearchStoryLocator { - - private LogSearchStoryLocator() { - } - - /** - * Get the proper story loader based on story location property (if empty or NONE - use story loading from classpath) - * @param property Story location property (absolute path - folder) - * @param clazz Class of the *Stories object - */ - public static StoryLoader getStoryLoader(String property, Class clazz) { - boolean useExternalStoryLocation = useExternalStoryLocation(property); - if (useExternalStoryLocation) { - try { - return new LoadFromRelativeFile(new URL("file://" + System.getProperty(property))); - } catch (Exception e) { - throw new RuntimeException("Cannot load story files from url: file://" + System.getProperty(property)); - } - } else { - return new LoadFromClasspath(clazz); - } - } - - - /** - * Find stories based on story location property, if the property is not set or NONE, then the story files will be loaded from the classpath - * @param property Story location property (absolute path - folder) - * @param suffix Story suffix for specific stories - i.e. : .ui.story - * @param clazz Class of the *Stories object - */ - public static List findStories(String property, String suffix, Class clazz) { - List stories = null; - if (useExternalStoryLocation(property)) { - stories = findStoriesInFolder(System.getProperty(property), suffix); - } else { - stories = new StoryFinder() - .findPaths(codeLocationFromClass(clazz).getFile(), Arrays.asList(String.format("**/*%s", suffix)), null); - } - return stories; - } - - private static List findStoriesInFolder(String folderAbsolutePath, String suffix) { - List results = Lists.newArrayList(); - File folder = new File(folderAbsolutePath); - File[] listOfFiles = folder.listFiles(); - if (listOfFiles != null) { - for (File file : listOfFiles) { - if (file.getName().endsWith(suffix)) { - results.add(file.getName()); - } - } - } - return results; - } - - private static boolean useExternalStoryLocation(String property) { - String storyLocationProp = System.getProperty(property); - return StringUtils.isNotEmpty(storyLocationProp) && !"NONE".equals(storyLocationProp); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java deleted file mode 100644 index 5417ab1289f..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchUIStories.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.story; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.apache.ambari.logsearch.steps.LogSearchDockerSteps; -import org.apache.ambari.logsearch.steps.LogSearchUISteps; -import org.jbehave.core.configuration.Configuration; -import org.jbehave.core.Embeddable; -import org.jbehave.core.embedder.executors.SameThreadExecutors; -import org.jbehave.core.junit.JUnitStories; -import org.jbehave.core.reporters.StoryReporterBuilder; -import org.jbehave.core.steps.InjectableStepsFactory; -import org.jbehave.core.steps.InstanceStepsFactory; -import org.jbehave.web.selenium.RemoteWebDriverProvider; -import org.jbehave.web.selenium.SeleniumConfiguration; -import org.jbehave.web.selenium.SeleniumContext; -import org.jbehave.web.selenium.WebDriverProvider; -import org.jbehave.web.selenium.WebDriverScreenshotOnFailure; -import org.openqa.selenium.Platform; -import org.openqa.selenium.remote.DesiredCapabilities; - -import java.util.Arrays; -import java.util.List; - -import static org.jbehave.core.io.CodeLocations.codeLocationFromClass; -import static org.jbehave.core.reporters.Format.CONSOLE; -import static org.jbehave.core.reporters.Format.HTML; -import static org.jbehave.core.reporters.Format.TXT; -import static org.jbehave.core.reporters.Format.XML; - -public class LogSearchUIStories extends JUnitStories { - - private WebDriverProvider driverProvider; - private SeleniumContext context; - - private static final String UI_STORIES_LOCATION_PROPERTY = "ui.stories.location"; - private static final String STORY_SUFFIX = ".ui.story"; - - public LogSearchUIStories() { - String dockerHost = System.getProperty("docker.host") != null ? System.getProperty("docker.host") : "localhost"; - System.setProperty("REMOTE_WEBDRIVER_URL", String.format("http://%s:4444/wd/hub", dockerHost)); - DesiredCapabilities capability = DesiredCapabilities.firefox(); - capability.setPlatform(Platform.LINUX); - capability.setVersion("45.8.0"); - driverProvider = new RemoteWebDriverProvider(capability); - StoryDataRegistry.INSTANCE.setWebDriverProvider(driverProvider); - context = new SeleniumContext(); - configuredEmbedder().useExecutorService(new SameThreadExecutors().create(configuredEmbedder().embedderControls())); - } - - @Override - public Configuration configuration() { - Class embeddableClass = this.getClass(); - return new SeleniumConfiguration() - .useSeleniumContext(context) - .useWebDriverProvider(driverProvider) - .useStoryLoader(LogSearchStoryLocator.getStoryLoader(UI_STORIES_LOCATION_PROPERTY, this.getClass())) - .useStoryReporterBuilder(new StoryReporterBuilder() - .withCodeLocation(codeLocationFromClass(embeddableClass)) - .withDefaultFormats() - .withFormats(CONSOLE, TXT, HTML, XML)); - } - - @Override - public InjectableStepsFactory stepsFactory() { - Configuration configuration = configuration(); - return new InstanceStepsFactory(configuration, new LogSearchDockerSteps(), new LogSearchUISteps(driverProvider), - new WebDriverScreenshotOnFailure(driverProvider, configuration.storyReporterBuilder())); - } - - @Override - protected List storyPaths() { - return LogSearchStoryLocator.findStories(UI_STORIES_LOCATION_PROPERTY, STORY_SUFFIX, this.getClass()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java deleted file mode 100644 index b6d0a58813d..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/AbstractPage.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web; - -import org.jbehave.web.selenium.WebDriverPage; -import org.jbehave.web.selenium.WebDriverProvider; - -import java.util.List; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.fail; - -public abstract class AbstractPage extends WebDriverPage { - - public AbstractPage(WebDriverProvider driverProvider) { - super(driverProvider); - } - - public void found(String text) { - found(getPageSource(), text); - } - - public void found(String pageSource, String text) { - if (!pageSource.contains(escapeHtml(text))) { - fail("Text: '" + text + "' not found in page '" + pageSource + "'"); - } - } - - public void found(List texts) { - for (String text : texts) { - found(text); - } - } - - public void notFound(String text) { - notFound(getPageSource(), text); - } - - public void notFound(String pageSource, String text) { - assertThat(pageSource.contains(escapeHtml(text)), is(false)); - } - - private String escapeHtml(String text) { - return text.replace("<", "<").replace(">", ">"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java deleted file mode 100644 index 6c576d49130..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/web/Home.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web; - -import org.apache.ambari.logsearch.domain.StoryDataRegistry; -import org.jbehave.web.selenium.WebDriverProvider; - -import java.util.concurrent.TimeUnit; - -public class Home extends AbstractPage { - - public Home(WebDriverProvider driverProvider) { - super(driverProvider); - } - - public void open() { - get(String.format("http://%s:%d/index.html", - StoryDataRegistry.INSTANCE.getDockerHost(), - StoryDataRegistry.INSTANCE.getLogsearchPort())); - manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/log4j.properties b/ambari-logsearch/ambari-logsearch-it/src/test/resources/log4j.properties deleted file mode 100644 index 956bc6364e9..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/log4j.properties +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story deleted file mode 100644 index 0af00f58f99..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_api_tests.story +++ /dev/null @@ -1,17 +0,0 @@ -Meta: - -Narrative: -As a user -I want to perform queries against Log Search api -So that I can validate the json outputs - -Scenario: Log Search API JSON responses - -Given logsearch docker container -When LogSearch api query sent: -Then The api query result is - -Examples: -|apiQuery|jsonResult| -|/api/v1/service/logs/schema/fields|service-log-schema.json| -|/api/v1/service/logs/levels/counts?page=0&pageSize=25&startIndex=0&q=*%3A*|service-log-level-counts-values.json| \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_cofig_api_tests.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_cofig_api_tests.story deleted file mode 100644 index eafe26871fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/log_search_cofig_api_tests.story +++ /dev/null @@ -1,17 +0,0 @@ -Scenario: scenario description - -Given logsearch docker container -When LogSearch api request sent: /api/v1/shipper/input/cl1/services/ambari -Then Result is an input.config of ambari_audit with log file path /root/test-logs/ambari-server/ambari-audit.log - -Given logsearch docker container -When Update input config of ambari_audit path to /root/test-logs/ambari-server/ambari-audit.log.1 at /api/v1/shipper/input/cl1/services/ambari -Then Result is an input.config of ambari_audit with log file path /root/test-logs/ambari-server/ambari-audit.log.1 - -Given logsearch docker container -When Update input config with data {"unknownField":[]} at /api/v1/shipper/input/cl1/services/ambari -Then Result is status code 400 - -Given logsearch docker container -When Update input config with data not_a_json at /api/v1/shipper/input/cl1/services/ambari -Then Result is status code 400 diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/logfeeder_parsing_tests.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/logfeeder_parsing_tests.story deleted file mode 100644 index 63e5d4cd0a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/backend/logfeeder_parsing_tests.story +++ /dev/null @@ -1,20 +0,0 @@ -Story Service logs are parsed and stored into Solr - -Narrative: -As a user -I want to start logsearch/logfeeder/solr components in a docker container with test logs -So that I can parse and store the logs into Solr - -Scenario: Number of logs for components - -Given logsearch docker container -When logfeeder started (parse logs & send data to solr) -Then the number of docs is: - -Examples: -|component|docSize| -|logsearch_app|1| -|zookeeper|3| -|hst_agent|4| -|secure_log|8| -|system_message|17| diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story deleted file mode 100644 index 4824335406b..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story +++ /dev/null @@ -1,20 +0,0 @@ -Meta: - -Narrative: -As a user -I want to start LogSearch services and login to the UI -So that I can validate the proper user - -Scenario: login with admin/admin - -Given logsearch docker container -And open logsearch home page -When login with admin / admin -Then page contains text: 'Refresh' - -Scenario: login with admin and wrong password - -Given logsearch docker container -And open logsearch home page -When login with admin / wrongpassword -Then page does not contain text: 'Refresh' \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-input-config/input.config-smartsense.json.j2 b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-input-config/input.config-smartsense.json.j2 deleted file mode 100644 index d2037a5c973..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-input-config/input.config-smartsense.json.j2 +++ /dev/null @@ -1,85 +0,0 @@ -{# - # Licensed to the Apache Software Foundation (ASF) under one - # or more contributor license agreements. See the NOTICE file - # distributed with this work for additional information - # regarding copyright ownership. The ASF licenses this file - # to you under the Apache License, Version 2.0 (the - # "License"); you may not use this file except in compliance - # with the License. You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - #} -{ - "input":[ - { - "type":"hst_agent", - "rowtype":"service", - "path":"{{default('/configurations/hst-log4j/hst.log.dir', '/var/log/hst')}}/hst-agent.log" - }, - { - "type":"hst_server", - "rowtype":"service", - "path":"{{default('/configurations/hst-log4j/hst.log.dir', '/var/log/hst')}}/hst-server.log" - }, - { - "type":"activity_analyser", - "rowtype":"service", - "path":"{{default('/configurations/activity-log4j/activity.log.dir', '/var/log/smartsense-activity')}}/activity-analyzer.log" - }, - { - "type":"activity_explorer", - "rowtype":"service", - "path":"{{default('/configurations/activity-log4j/activity.log.dir', '/var/log/smartsense-activity')}}/activity-explorer.log" - } - ], - "filter":[ - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hst_server", - "activity_analyser", - "activity_explorer" - ] - } - }, - "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hst_agent" - ] - } - }, - "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n", - "multiline_pattern":"^(%{LOGLEVEL:level}%{SPACE}%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{LOGLEVEL:level}%{SPACE}%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - } - ] -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json deleted file mode 100644 index 7a6fcd896a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "vNameValues": [ - { - "name": "FATAL", - "value": "0" - }, - { - "name": "ERROR", - "value": "1" - }, - { - "name": "WARN", - "value": "7" - }, - { - "name": "INFO", - "value": "8" - }, - { - "name": "DEBUG", - "value": "1" - }, - { - "name": "TRACE", - "value": "0" - }, - { - "name": "UNKNOWN", - "value": "25" - } - ], - "listSize": 7 -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json deleted file mode 100644 index 2933123dfb7..00000000000 --- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "logfile_line_number": "tint", - "logtime": "tdate", - "text": "text_std_token_lower_case", - "host": "key_lower_case", - "seq_num": "tlong", - "logtype": "key_lower_case", - "rowtype": "key_lower_case", - "log_message": "text_std_token_lower_case", - "line_number": "tint", - "type": "key_lower_case", - "ip": "string", - "id": "string", - "thread_name": "key_lower_case", - "level": "key_lower_case", - "file": "key_lower_case", - "bundle_id": "key_lower_case", - "path": "key_lower_case", - "cluster": "key_lower_case", - "case_id": "key_lower_case", - "logger_name": "key_lower_case", - "method": "key_lower_case", - "event_count": "tlong" -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/pom.xml deleted file mode 100644 index bdcff62d31e..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/pom.xml +++ /dev/null @@ -1,85 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - jar - Ambari Logsearch Log Feeder Container Registry - ambari-logsearch-logfeeder-container-registry - - - junit - junit - test - - - org.slf4j - slf4j-api - 1.7.25 - - - org.slf4j - slf4j-log4j12 - 1.7.25 - - - com.fasterxml.jackson.core - jackson-databind - 2.9.4 - - - com.fasterxml.jackson.core - jackson-annotations - 2.9.4 - - - commons-lang - commons-lang - 2.6 - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.3 - - ${jdk.version} - ${jdk.version} - - - - org.apache.maven.plugins - maven-jar-plugin - 3.1.0 - - - **/log4j.properties - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerMetadata.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerMetadata.java deleted file mode 100644 index df3a80a5fef..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerMetadata.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder; - -/** - * Holds container related metadata - **/ -public interface ContainerMetadata { - - /** - * Id of the container, used for getting the right log path - * @return container id - */ - String getId(); - - /** - * Name of the container - * @return container name - */ - String getName(); - - /** - * Hostname of the container, can be container host itself or the actual hostname - * @return container host name - */ - String getHostName(); - - /** - * Log label - * @return log type label - */ - String getLogTypeLabel(); - - /** - * Log path of the container (should be json file) - * @return log path - */ - String getLogPath(); - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerRegistry.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerRegistry.java deleted file mode 100644 index 94f6a821075..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/ContainerRegistry.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder; - -import java.util.Map; - -/** - * Responsible of register or drop new / existing containers. - * @param type of metadata - could be docker or other container implementation - */ -public interface ContainerRegistry { - - /** - * Register process of running containers - */ - void register(); - - /** - * Holds container metadata per log component type and container id. - * @return container metadata - */ - Map> getContainerMetadataMap(); - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistry.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistry.java deleted file mode 100644 index c3e816e0ea0..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistry.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker; - -import org.apache.ambari.logfeeder.ContainerRegistry; -import org.apache.ambari.logfeeder.docker.command.DockerInspectContainerCommand; -import org.apache.ambari.logfeeder.docker.command.DockerListContainerCommand; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - -/** - * Register docker metadata from docker containers on the host (with listing containers and inspecting them) - */ -public final class DockerContainerRegistry implements ContainerRegistry { - - private static final String LOGFEEDER_CONTAINER_REGISTRY_DOCKER_INTERVAL = "logfeeder.container.registry.docker.interval"; - private static final Logger logger = LoggerFactory.getLogger(DockerContainerRegistry.class); - - private static DockerContainerRegistry INSTANCE = null; - private final Properties configs; - private Map> dockerMetadataMap = new ConcurrentHashMap<>(); - private int waitIntervalMin = 5; - - private DockerContainerRegistry(Properties configs) { - this.configs = configs; - init(configs); - } - - @Override - public synchronized void register() { - Map> actualDockerMetadataMap = renewMetadata(); - if (!actualDockerMetadataMap.isEmpty()) { - dockerMetadataMap.putAll(actualDockerMetadataMap); - dockerMetadataMap = dockerMetadataMap - .entrySet() - .stream() - .filter(e -> actualDockerMetadataMap.keySet().contains(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - for (Map.Entry> entry : dockerMetadataMap.entrySet()) { - for (Map.Entry metadataEntry : entry.getValue().entrySet()) { - logger.debug("Found container metadata: {}", entry.getValue().toString()); - } - } - } - } - - private Map> renewMetadata() { - final Map> actualDockerMetadataMap = new HashMap<>(); - final List containerIds = new DockerListContainerCommand().execute(null); - final Map params = new HashMap<>(); - - params.put("containerIds", StringUtils.join(containerIds, ",")); - List> containerDataList = new DockerInspectContainerCommand().execute(params); - - for (Map containerDataMap : containerDataList) { - String id = containerDataMap.get("Id").toString(); - String name = containerDataMap.get("Name").toString(); - String logPath = containerDataMap.get("LogPath").toString(); - Map dockerConfigMap = (HashMap) containerDataMap.get("Config"); - String hostname = dockerConfigMap.get("Hostname").toString(); - Map labels = (Map) dockerConfigMap.get("Labels"); - Map stateMap = (HashMap) containerDataMap.get("State"); - String componentType = labels.get("logfeeder.log.type"); - boolean running = (Boolean) stateMap.get("Running"); - long timestamp = running ? convertDateStrToLong((String)stateMap.get("StartedAt")) : convertDateStrToLong((String)stateMap.get("FinishedAt")); - - if (componentType != null) { - if (actualDockerMetadataMap.containsKey(componentType)) { - Map componentMetadataMap = actualDockerMetadataMap.get(componentType); - componentMetadataMap.put(id, new DockerMetadata(id, name, hostname, componentType, logPath, running, timestamp)); - actualDockerMetadataMap.put(componentType, componentMetadataMap); - } else { - Map componentMetadataMap = new HashMap<>(); - componentMetadataMap.put(id, new DockerMetadata(id, name, hostname, componentType, logPath, running, timestamp)); - actualDockerMetadataMap.put(componentType, componentMetadataMap); - } - } else { - logger.debug("Ignoring docker metadata from registry as container (id: {}, name: {}) as it has no 'logfeeder.log.type' label", id, name); - } - } - - return actualDockerMetadataMap; - } - - @Override - public synchronized Map> getContainerMetadataMap() { - return dockerMetadataMap; - } - - public void init(Properties configs) { - // init docker related data - String waitStr = configs.getProperty(LOGFEEDER_CONTAINER_REGISTRY_DOCKER_INTERVAL, "5"); - setWaitIntervalMin(Integer.parseInt(waitStr)); - // TODO: add docker authentication settings through this - } - - public static synchronized DockerContainerRegistry getInstance(Properties dockerConfig) { - if (INSTANCE == null) { - return new DockerContainerRegistry(dockerConfig); - } else { - return INSTANCE; - } - } - - public int getWaitIntervalMin() { - return waitIntervalMin; - } - - public void setWaitIntervalMin(int waitIntervalMin) { - this.waitIntervalMin = waitIntervalMin; - } - - private long convertDateStrToLong(String timestampStr) { - LocalDateTime localDateTime = LocalDateTime.parse(timestampStr, DateTimeFormatter.ISO_DATE_TIME); - return localDateTime.toInstant(ZoneOffset.ofTotalSeconds(0)).toEpochMilli(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistryMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistryMonitor.java deleted file mode 100644 index 30c328d5c08..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerContainerRegistryMonitor.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Periodically re-register docker container metadata for {@link org.apache.ambari.logfeeder.docker.DockerContainerRegistry} - * based on a time interval in seconds (property: logfeeder.container.registry.docker.interval, default: 5) - */ -public class DockerContainerRegistryMonitor implements Runnable { - - private static final Logger logger = LoggerFactory.getLogger(DockerContainerRegistryMonitor.class); - - private final DockerContainerRegistry registry; - - public DockerContainerRegistryMonitor(DockerContainerRegistry registry) { - this.registry = registry; - } - - @Override - public void run() { - while (!Thread.currentThread().isInterrupted()) { - try { - logger.debug("Gather docker containers metadata ..."); - registry.register(); - Thread.sleep(1000 * registry.getWaitIntervalMin()); - } catch (Exception e) { - logger.error("Error during gather docker containers metadata.", e); - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerMetadata.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerMetadata.java deleted file mode 100644 index 65842b402cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/DockerMetadata.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker; - -import org.apache.ambari.logfeeder.ContainerMetadata; - -public class DockerMetadata implements ContainerMetadata { - - private final String id; - private final String name; - private final String logTypeLabel; - private final String logPath; - private final String hostName; - private final boolean running; - private final long timestamp; - - public DockerMetadata(String id, String name, String hostName, String logTypeLabel, String logPath, boolean running, long timestamp) { - this.id = id; - this.name = name; - this.hostName = hostName; - this.logTypeLabel = logTypeLabel; - this.logPath = logPath; - this.running = running; - this.timestamp = timestamp; - } - - public String getId() { - return id; - } - - public String getName() { - return name; - } - - public String getHostName() { - return hostName; - } - - public String getLogTypeLabel() { - return logTypeLabel; - } - - public String getLogPath() { - return logPath; - } - - public boolean isRunning() { - return running; - } - - public long getTimestamp() { - return timestamp; - } - - @Override - public String toString() { - return "DockerMetadata{" + - "id='" + id + '\'' + - ", name='" + name + '\'' + - ", logTypeLabel='" + logTypeLabel + '\'' + - ", logPath='" + logPath + '\'' + - ", hostName='" + hostName + '\'' + - '}'; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandExecutionHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandExecutionHelper.java deleted file mode 100644 index aa65c600cc6..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandExecutionHelper.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker.command; - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public class CommandExecutionHelper { - - public static CommandResponse executeCommand(List commands, Map envMap) throws Exception { - ProcessBuilder processBuilder = new ProcessBuilder(commands); - Map env = processBuilder.environment(); - if (envMap != null) { - env.putAll(envMap); - } - Process shell = processBuilder.start(); - - BufferedReader stdInput = new BufferedReader(new InputStreamReader(shell.getInputStream())); - BufferedReader stdError = new BufferedReader(new InputStreamReader(shell.getErrorStream())); - List stdOutLines = new ArrayList<>(); - StringBuilder errOut = new StringBuilder(); - String s = null; - while ((s = stdInput.readLine()) != null) { - stdOutLines.add(s); - } - while ((s = stdError.readLine()) != null) { - errOut.append(s); - } - int exitCode = shell.waitFor(); - - return new CommandResponse(exitCode, stdOutLines, errOut.toString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandResponse.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandResponse.java deleted file mode 100644 index 7ead7915991..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/CommandResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker.command; - -import java.util.List; - -/** - * Represent a bash command response (stdout as string list, stderr in string and an exit code) - */ -public class CommandResponse { - private final int exitCode; - private final List stdOut; - private final String stdErr; - - CommandResponse(int exitCode, List stdOut, String stdErr) { - this.exitCode = exitCode; - this.stdOut = stdOut; - this.stdErr = stdErr; - } - - public int getExitCode() { - return exitCode; - } - - public List getStdOut() { - return stdOut; - } - - public String getStdErr() { - return stdErr; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/ContainerCommand.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/ContainerCommand.java deleted file mode 100644 index db3de0198c8..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/ContainerCommand.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker.command; - -import java.util.Map; - -/** - * Responsible of execute container commands. (like listing or inspecting containers) - * @param - */ -public interface ContainerCommand { - - /** - * Execute a container command - * @param params extra parameters for the command - * @return return type of the execution - can be anything - */ - RESPONSE_TYPE execute(Map params); -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerInspectContainerCommand.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerInspectContainerCommand.java deleted file mode 100644 index d4fc182671d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerInspectContainerCommand.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker.command; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -/** - * Run 'docker inspect' on container ids - and read response and convert it from json response to a map object - */ -public class DockerInspectContainerCommand implements ContainerCommand>> { - - private static final Logger logger = LoggerFactory.getLogger(DockerInspectContainerCommand.class); - - @Override - public List> execute(Map params) { - List containerIds = Arrays.asList(params.get("containerIds").split(",")); - CommandResponse commandResponse = null; - List> listResponse = new ArrayList<>(); - List commandList = new ArrayList<>(); - commandList.add("/usr/local/bin/docker"); - commandList.add("inspect"); - commandList.addAll(containerIds); - try { - commandResponse = CommandExecutionHelper.executeCommand(commandList, null); - if (commandResponse.getExitCode() != 0) { - logger.error("Error during inspect containers request: {} (exit code: {})", commandResponse.getStdErr(), commandResponse.getExitCode()); - } else { - String jsonResponse = StringUtils.join(commandResponse.getStdOut(), ""); - ObjectMapper mapper = new ObjectMapper(); - listResponse = mapper.readValue(jsonResponse, List.class); - } - } catch (Exception e) { - logger.error("Error during inspect containers request", e); - } - return listResponse; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerListContainerCommand.java b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerListContainerCommand.java deleted file mode 100644 index a0596caf41f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/java/org/apache/ambari/logfeeder/docker/command/DockerListContainerCommand.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.docker.command; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * Run 'docker ps -a -q' (+ logfeeder type filter) and save the response in a string list (container ids) - */ -public class DockerListContainerCommand implements ContainerCommand> { - - private static final Logger logger = LoggerFactory.getLogger(DockerListContainerCommand.class); - - @Override - public List execute(Map params) { - CommandResponse commandResponse = null; - List commandList = new ArrayList<>(); - commandList.add("/usr/local/bin/docker"); - commandList.add("ps"); - commandList.add("-a"); - commandList.add("-q"); - // TODO: add --filter="label=logfeeder.log.type" - try { - commandResponse = CommandExecutionHelper.executeCommand(commandList, null); - if (commandResponse.getExitCode() != 0) { - logger.error("Error during inspect containers request: {} (exit code: {})", commandResponse.getStdErr(), commandResponse.getExitCode()); - } - } catch (Exception e) { - logger.error("Error during inspect containers request", e); - } - return commandResponse != null ? commandResponse.getStdOut() : null; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/resources/log4j.properties b/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/resources/log4j.properties deleted file mode 100644 index 6380ac70e02..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-container-registry/src/main/resources/log4j.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=DEBUG, stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd'T'HH:mm:ss.SSS} %-5p [%t] - %m%n \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/pom.xml deleted file mode 100644 index fa5f97eff6a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/pom.xml +++ /dev/null @@ -1,76 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-logfeeder-plugin-api - jar - Ambari Logsearch Log Feeder Plugin Api - http://maven.apache.org - - - UTF-8 - - - - - org.apache.ambari - ambari-logsearch-config-api - ${project.version} - - - junit - junit - test - - - org.slf4j - slf4j-api - 1.7.20 - - - org.codehaus.jackson - jackson-core-asl - 1.9.13 - - - org.codehaus.jackson - jackson-mapper-asl - 1.9.13 - - - com.google.code.gson - gson - 2.2.2 - - - com.google.guava - guava - 25.0-jre - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/AliasUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/AliasUtil.java deleted file mode 100644 index 521e0bdb1aa..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/AliasUtil.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.common; - -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; - -public class AliasUtil { - - private static final Logger LOG = LoggerFactory.getLogger(AliasUtil.class); - - private static final String ALIAS_CONFIG_JSON = "alias_config.json"; - private static HashMap aliasMap = null; - - static { - aliasMap = getJsonFileContentFromClassPath(ALIAS_CONFIG_JSON); - } - - public static enum AliasType { - INPUT, FILTER, MAPPER, OUTPUT - } - - private AliasUtil() { - throw new UnsupportedOperationException(); - } - - public static Object getClassInstance(String key, AliasType aliasType) { - String classFullName = getClassFullName(key, aliasType); - - Object instance = null; - try { - instance = Class.forName(classFullName).getConstructor().newInstance(); - } catch (Exception exception) { - LOG.error("Unsupported class = " + classFullName, exception.getCause()); - } - - if (instance != null) { - boolean isValid = false; - switch (aliasType) { - case FILTER: - isValid = Filter.class.isAssignableFrom(instance.getClass()); - break; - case INPUT: - isValid = Input.class.isAssignableFrom(instance.getClass()); - break; - case OUTPUT: - isValid = Output.class.isAssignableFrom(instance.getClass()); - break; - case MAPPER: - isValid = Mapper.class.isAssignableFrom(instance.getClass()); - break; - default: - LOG.warn("Unhandled aliasType: " + aliasType); - isValid = true; - } - if (!isValid) { - LOG.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name()); - } - } - return instance; - } - - private static String getClassFullName(String key, AliasType aliastype) { - String className = null;// key as a default value; - - HashMap aliasInfo = getAliasInfo(key, aliastype); - String value = aliasInfo.get("klass"); - if (value != null && !value.isEmpty()) { - className = value; - LOG.debug("Class name found for key :" + key + ", class name :" + className + " aliastype:" + aliastype.name()); - } else { - LOG.debug("Class name not found for key :" + key + " aliastype:" + aliastype.name()); - } - - return className; - } - - @SuppressWarnings("unchecked") - private static HashMap getAliasInfo(String key, AliasType aliastype) { - HashMap aliasInfo = new HashMap<>(); - - if (aliasMap != null) { - String typeKey = aliastype.name().toLowerCase(); - HashMap typeJson = (HashMap) aliasMap.get(typeKey); - if (typeJson != null) { - aliasInfo = (HashMap) typeJson.get(key); - } - } - - return aliasInfo; - } - - public static HashMap getJsonFileContentFromClassPath(String fileName) { - ObjectMapper mapper = new ObjectMapper(); - try (InputStream inputStream = AliasUtil.class.getClassLoader().getResourceAsStream(fileName)) { - return mapper.readValue(inputStream, new TypeReference>() {}); - } catch (IOException e) { - LOG.error("Error occurred during loading alias json file: {}", e); - } - return new HashMap(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/ConfigItem.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/ConfigItem.java deleted file mode 100644 index 5b50a7e4042..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/ConfigItem.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.common; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.reflect.TypeToken; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.Serializable; -import java.lang.reflect.Type; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public abstract class ConfigItem implements Cloneable, Serializable { - - private static final Logger LOG = LoggerFactory.getLogger(ConfigItem.class); - - private final static String GSON_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - private static Gson gson = new GsonBuilder().setDateFormat(GSON_DATE_FORMAT).create(); - - public static Gson getGson() { - return gson; - } - - private Map configs; - private Map contextFields = new HashMap<>(); - private boolean drain = false; - public MetricData statMetric = new MetricData(getStatMetricName(), false); - - public abstract void init(PROP_TYPE logFeederProperties) throws Exception; - - /** - * Used while logging. Keep it short and meaningful - */ - public abstract String getShortDescription(); - - public abstract String getStatMetricName(); - - public abstract boolean logConfigs(); - - public void loadConfig(Map map) { - configs = cloneObject(map); - - Map nvList = getNVList("add_fields"); - if (nvList != null) { - contextFields.putAll(nvList); - } - } - - @SuppressWarnings("unchecked") - public Map getNVList(String key) { - return (Map) configs.get(key); - } - - public Map getConfigs() { - return configs; - } - - public boolean isEnabled() { - return getBooleanValue("is_enabled", true); - } - - public void addMetricsContainers(List metricsList) { - metricsList.add(statMetric); - } - - public void incrementStat(int count) { - statMetric.value += count; - } - - public synchronized void logStat() { - logStatForMetric(statMetric, "Stat"); - } - - public void logStatForMetric(MetricData metric, String prefixStr) { - long currStat = metric.value; - long currMS = System.currentTimeMillis(); - String postFix = ", key=" + getShortDescription(); - if (currStat > metric.prevLogValue) { - LOG.info(prefixStr + ": total_count=" + metric.value + ", duration=" + (currMS - metric.prevLogTime) / 1000 + - " secs, count=" + (currStat - metric.prevLogValue) + postFix); - } - metric.prevLogValue = currStat; - metric.prevLogTime = currMS; - } - - public boolean isDrain() { - return drain; - } - - public void setDrain(boolean drain) { - this.drain = drain; - } - - public List getListValue(String key) { - return getListValue(key, null); - } - - public List getListValue(String key, List defaultValue) { - Object value = configs.get(key); - if (value != null) { - return (List)value; - } else { - return defaultValue; - } - } - - public String getStringValue(String property) { - return getStringValue(property, null); - } - - public String getStringValue(String property, String defaultValue) { - Object strValue = configs.getOrDefault(property, defaultValue); - if (strValue != null) { - return strValue.toString(); - } - return null; - } - - public Boolean getBooleanValue(String property) { - return getBooleanValue(property, false); - } - - public Boolean getBooleanValue(String property, Boolean defaultValue) { - Object booleanValue = configs.getOrDefault(property, defaultValue); - if (booleanValue != null) { - if (booleanValue.getClass().isAssignableFrom(Boolean.class)) { - return (Boolean) booleanValue; - } else { - String strValue = booleanValue.toString(); - return strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("yes"); - } - } - return false; - } - - public Long getLongValue(String property) { - return getLongValue(property, null); - } - - public Long getLongValue(String property, Long defaultValue) { - Object longValue = configs.getOrDefault(property, defaultValue); - if (longValue != null) { - if (longValue.getClass().isAssignableFrom(Long.class)) { - return (Long) longValue; - } else { - return Long.parseLong(longValue.toString()); - } - } - return null; - } - - public Integer getIntValue(String property) { - return getIntValue(property, null); - } - - public Integer getIntValue(String property, Integer defaultValue) { - Object intValue = configs.getOrDefault(property, defaultValue); - if (intValue != null) { - if (intValue.getClass().isAssignableFrom(Integer.class)) { - return (Integer) intValue; - } else { - return Integer.parseInt(intValue.toString()); - } - } - return null; - } - - private Map cloneObject(Map map) { - if (map == null) { - return null; - } - String jsonStr = gson.toJson(map); - Type type = new TypeToken>() {}.getType(); - return gson.fromJson(jsonStr, type); - } - - private Object getValue(String property) { - return configs.get(property); - } - - private Object getValue(String property, Object defaultValue) { - return configs.getOrDefault(property, defaultValue); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/LogFeederProperties.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/LogFeederProperties.java deleted file mode 100644 index 7fac01a96f0..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/LogFeederProperties.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.common; - -import java.io.Serializable; -import java.util.Properties; - -/** - * Static application level configuration interface for Log Feeder - */ -public interface LogFeederProperties extends Serializable { - - /** - * Get all key-value pairs from static application level Log Feeder configuration - */ - Properties getProperties(); -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/MetricData.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/MetricData.java deleted file mode 100644 index 54cdb7e7dec..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/common/MetricData.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.common; - -import java.io.Serializable; - -public class MetricData implements Serializable { - public final String metricsName; - public final boolean isPointInTime; - - public MetricData(String metricsName, boolean isPointInTime) { - this.metricsName = metricsName; - this.isPointInTime = isPointInTime; - } - - public long value = 0; - public long prevPublishValue = 0; - - public long prevLogValue = 0; - public long prevLogTime = System.currentTimeMillis(); - - public int publishCount = 0; // Number of times the metric was published so far -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/Filter.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/Filter.java deleted file mode 100644 index f0982452891..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/Filter.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.filter; - -import org.apache.ambari.logfeeder.plugin.common.AliasUtil; -import org.apache.ambari.logfeeder.plugin.common.ConfigItem; -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public abstract class Filter extends ConfigItem { - - private static final Logger LOG = LoggerFactory.getLogger(Filter.class); - - private final Map> postFieldValueMappers = new HashMap<>(); - private FilterDescriptor filterDescriptor; - private PROP_TYPE logFeederProperties; - private Filter nextFilter = null; - private Input input; - private OutputManager outputManager; - - public void loadConfigs(FilterDescriptor filterDescriptor, PROP_TYPE logFeederProperties, OutputManager outputManager) { - this.filterDescriptor = filterDescriptor; - this.logFeederProperties = logFeederProperties; - this.outputManager = outputManager; - } - - public FilterDescriptor getFilterDescriptor() { - return filterDescriptor; - } - - public PROP_TYPE getLogFeederProperties() { - return logFeederProperties; - } - - @Override - public void init(PROP_TYPE logFeederProperties) throws Exception { - initializePostMapValues(); - if (nextFilter != null) { - nextFilter.init(logFeederProperties); - } - } - - private void initializePostMapValues() { - Map> postMapValues = filterDescriptor.getPostMapValues(); - if (postMapValues == null) { - return; - } - for (String fieldName : postMapValues.keySet()) { - List values = postMapValues.get(fieldName); - for (PostMapValues pmv : values) { - for (MapFieldDescriptor mapFieldDescriptor : pmv.getMappers()) { - String mapClassCode = mapFieldDescriptor.getJsonName(); - Mapper mapper = (Mapper) AliasUtil.getClassInstance(mapClassCode, AliasUtil.AliasType.MAPPER); - if (mapper == null) { - LOG.warn("Unknown mapper type: " + mapClassCode); - continue; - } - if (mapper.init(getInput().getShortDescription(), fieldName, mapClassCode, mapFieldDescriptor)) { - List fieldMapList = postFieldValueMappers.computeIfAbsent(fieldName, k -> new ArrayList<>()); - fieldMapList.add(mapper); - } - } - } - } - } - - /** - * Deriving classes should implement this at the minimum - */ - public void apply(String inputStr, InputMarker inputMarker) throws Exception { - // TODO: There is no transformation for string types. - if (nextFilter != null) { - nextFilter.apply(inputStr, inputMarker); - } else { - outputManager.write(inputStr, inputMarker); - } - } - - public void apply(Map jsonObj, InputMarker inputMarker) throws Exception { - for (String fieldName : postFieldValueMappers.keySet()) { - Object value = jsonObj.get(fieldName); - if (value != null) { - for (Mapper mapper : postFieldValueMappers.get(fieldName)) { - value = mapper.apply(jsonObj, value); - } - } - } - if (nextFilter != null) { - nextFilter.apply(jsonObj, inputMarker); - } else { - outputManager.write(jsonObj, inputMarker); - } - } - - public void loadConfig(FilterDescriptor filterDescriptor) { - this.filterDescriptor = filterDescriptor; - } - - public Filter getNextFilter() { - return nextFilter; - } - - public void setNextFilter(Filter nextFilter) { - this.nextFilter = nextFilter; - } - - public Input getInput() { - return input; - } - - public void setInput(Input input) { - this.input = input; - } - - public OutputManager getOutputManager() { - return outputManager; - } - - public void setOutputManager(OutputManager outputManager) { - this.outputManager = outputManager; - } - - public void flush() { - // empty - } - - public void close() { - if (nextFilter != null) { - nextFilter.close(); - } - } - - @Override - public boolean isEnabled() { - return filterDescriptor.isEnabled() != null ? filterDescriptor.isEnabled() : true; - } - - @Override - public void addMetricsContainers(List metricsList) { - super.addMetricsContainers(metricsList); - if (nextFilter != null) { - nextFilter.addMetricsContainers(metricsList); - } - } - - @Override - public boolean logConfigs() { - LOG.info("filter=" + getShortDescription()); - return true; - } - - @Override - public String getStatMetricName() { - // no metrics yet - return null; - } - - public Object clone() throws CloneNotSupportedException { - return super.clone(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/mapper/Mapper.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/mapper/Mapper.java deleted file mode 100644 index d52bc01677b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/filter/mapper/Mapper.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.filter.mapper; - -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; - -import java.util.Map; - -public abstract class Mapper { - - private MapFieldDescriptor mapFieldDescriptor; - private PROP_TYPE logFeederProperties; - - private String inputDesc; - private String fieldName; - private String mapClassCode; - - protected void init(String inputDesc, String fieldName, String mapClassCode) { - this.inputDesc = inputDesc; - this.fieldName = fieldName; - this.mapClassCode = mapClassCode; - } - - public void loadConfigs(MapFieldDescriptor mapFieldDescriptor, PROP_TYPE logFeederProperties) { - this.mapFieldDescriptor = mapFieldDescriptor; - this.logFeederProperties = logFeederProperties; - } - - public MapFieldDescriptor getMapFieldDescriptor() { - return mapFieldDescriptor; - } - - public PROP_TYPE getLogFeederProperties() { - return logFeederProperties; - } - - public abstract boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor); - - public abstract Object apply(Map jsonObj, Object value); - - public String getInputDesc() { - return inputDesc; - } - - public String getFieldName() { - return fieldName; - } - - public String getMapClassCode() { - return mapClassCode; - } - - @Override - public String toString() { - return "mapClass=" + mapClassCode + ", input=" + inputDesc + ", fieldName=" + fieldName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/Input.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/Input.java deleted file mode 100644 index 1b1fed5bb1e..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/Input.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.input; - -import org.apache.ambari.logfeeder.plugin.common.ConfigItem; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.cache.LRUCache; -import org.apache.ambari.logfeeder.plugin.manager.InputManager; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions; -import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public abstract class Input extends ConfigItem implements Runnable { - - private static final Logger LOG = LoggerFactory.getLogger(Input.class); - - private INPUT_DESC_TYPE inputDescriptor; - private PROP_TYPE logFeederProperties; - private LogSearchConfigLogFeeder logSearchConfig; - private InputManager inputManager; - private OutputManager outputManager; - private final List outputList = new ArrayList<>(); - private Filter firstFilter; - private boolean isClosed; - private String type; - private String logType; - private boolean useEventMD5 = false; - private boolean genEventMD5 = true; - private Thread thread; - private LRUCache cache; - private String cacheKeyField; - private boolean initDefaultFields; - protected MetricData readBytesMetric = new MetricData(getReadBytesMetricName(), false); - - public void loadConfigs(INPUT_DESC_TYPE inputDescriptor, PROP_TYPE logFeederProperties, - InputManager inputManager, OutputManager outputManager) { - this.inputDescriptor = inputDescriptor; - this.logFeederProperties = logFeederProperties; - this.inputManager = inputManager; - this.outputManager = outputManager; - } - - public void setLogSearchConfig(LogSearchConfigLogFeeder logSearchConfig) { - this.logSearchConfig = logSearchConfig; - } - - public LogSearchConfigLogFeeder getLogSearchConfig() { - return logSearchConfig; - } - - public abstract boolean monitor(); - - public abstract INPUT_MARKER getInputMarker(); - - public abstract boolean isReady(); - - public abstract void setReady(boolean isReady); - - public abstract void checkIn(INPUT_MARKER inputMarker); - - public abstract void lastCheckIn(); - - public abstract String getReadBytesMetricName(); - - public PROP_TYPE getLogFeederProperties() { - return logFeederProperties; - } - - public INPUT_DESC_TYPE getInputDescriptor() { - return inputDescriptor; - } - - public InputManager getInputManager() { - return inputManager; - } - - public OutputManager getOutputManager() { - return outputManager; - } - - public void setOutputManager(OutputManager outputManager) { - this.outputManager = outputManager; - } - - public void setInputManager(InputManager inputManager) { - this.inputManager = inputManager; - } - - public void addOutput(Output output) { - outputList.add(output); - } - - public void addFilter(Filter filter) { - if (firstFilter == null) { - firstFilter = filter; - } else { - Filter f = firstFilter; - while (f.getNextFilter() != null) { - f = f.getNextFilter(); - } - f.setNextFilter(filter); - } - } - - public boolean isFilterRequired(FilterDescriptor filterDescriptor) { - Conditions conditions = filterDescriptor.getConditions(); - Fields fields = conditions.getFields(); - return fields.getType().contains(inputDescriptor.getType()); - } - - @SuppressWarnings("unchecked") - public boolean isOutputRequired(Output output) { - Map conditions = (Map) output.getConfigs().get("conditions"); - if (conditions == null) { - return false; - } - - Map fields = (Map) conditions.get("fields"); - if (fields == null) { - return false; - } - - List types = (List) fields.get("rowtype"); - return types.contains(inputDescriptor.getRowtype()); - } - - @Override - public boolean isEnabled() { - return inputDescriptor.isEnabled() != null ? inputDescriptor.isEnabled() : true; - } - - @Override - public void init(PROP_TYPE logFeederProperties) throws Exception { - if (firstFilter != null) { - firstFilter.init(logFeederProperties); - } - } - - @Override - public void run() { - try { - LOG.info("Started to monitor. " + getShortDescription()); - start(); - } catch (Exception e) { - LOG.error("Error writing to output.", e); - } - LOG.info("Exiting thread. " + getShortDescription()); - } - - /** - * This method will be called from the thread spawned for the output. This - * method should only exit after all data are read from the source or the - * process is exiting - */ - public abstract void start() throws Exception; - - public void outputLine(String line, INPUT_MARKER marker) { - statMetric.value++; - readBytesMetric.value += (line.length()); - - if (firstFilter != null) { - try { - firstFilter.apply(line, marker); - } catch (Exception e) { - LOG.error("Error during filter apply: {}", e); - } - } else { - // TODO: For now, let's make filter mandatory, so that no one accidently forgets to write filter - // outputManager.write(line, this); - } - } - - public void close() { - LOG.info("Close called. " + getShortDescription()); - try { - if (firstFilter != null) { - firstFilter.close(); - } - } catch (Throwable t) { - // Ignore - } - } - - public void flush() { - if (firstFilter != null) { - firstFilter.flush(); - } - } - - public void loadConfig(INPUT_DESC_TYPE inputDescriptor) { - this.inputDescriptor = inputDescriptor; - } - - public void setClosed(boolean isClosed) { - this.isClosed = isClosed; - } - - public boolean isClosed() { - return isClosed; - } - - public String getNameForThread() { - return this.getClass().getSimpleName(); - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getLogType() { - return logType; - } - - public void setLogType(String logType) { - this.logType = logType; - } - - public boolean isUseEventMD5() { - return useEventMD5; - } - - public boolean isGenEventMD5() { - return genEventMD5; - } - - public Filter getFirstFilter() { - return this.firstFilter; - } - - public Thread getThread() { - return thread; - } - - public void setThread(Thread thread) { - this.thread = thread; - } - - public void setUseEventMD5(boolean useEventMD5) { - this.useEventMD5 = useEventMD5; - } - - public void setGenEventMD5(boolean genEventMD5) { - this.genEventMD5 = genEventMD5; - } - - public LRUCache getCache() { - return this.cache; - } - - public String getCacheKeyField() { - return this.cacheKeyField; - } - - public void setCache(LRUCache cache) { - this.cache = cache; - } - - public void setCacheKeyField(String cacheKeyField) { - this.cacheKeyField = cacheKeyField; - } - - public List getOutputList() { - return outputList; - } - - public void addMetricsContainers(List metricsList) { - super.logStat(); - logStatForMetric(readBytesMetric, "Stat: Bytes Read"); - - if (firstFilter != null) { - firstFilter.logStat(); - } - } - - public void logStat() { - super.logStat(); - logStatForMetric(readBytesMetric, "Stat: Bytes Read"); - - if (firstFilter != null) { - firstFilter.logStat(); - } - } - - public void initCache(boolean cacheEnabled, String cacheKeyField, int cacheSize, - boolean cacheLastDedupEnabled, String cacheDedupInterval, String fileName) { - boolean enabled = getInputDescriptor().isCacheEnabled() != null - ? getInputDescriptor().isCacheEnabled() - : cacheEnabled; - if (enabled) { - String keyField = getInputDescriptor().getCacheKeyField() != null - ? getInputDescriptor().getCacheKeyField() - : cacheKeyField; - - setCacheKeyField(keyField); - - int size = getInputDescriptor().getCacheSize() != null - ? getInputDescriptor().getCacheSize() - : cacheSize; - - boolean lastDedupEnabled = getInputDescriptor().getCacheLastDedupEnabled() != null - ? getInputDescriptor().getCacheLastDedupEnabled() - : cacheLastDedupEnabled; - - long dedupInterval = getInputDescriptor().getCacheDedupInterval() != null - ? getInputDescriptor().getCacheDedupInterval() - : Long.parseLong(cacheDedupInterval); - - setCache(new LRUCache(size, fileName, dedupInterval, lastDedupEnabled)); - } - } - - @Override - public String toString() { - return getShortDescription(); - } - - public void setFirstFilter(Filter firstFilter) { - this.firstFilter = firstFilter; - } - - public boolean isInitDefaultFields() { - return initDefaultFields; - } - - public void setInitDefaultFields(boolean initDefaultFields) { - this.initDefaultFields = initDefaultFields; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/InputMarker.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/InputMarker.java deleted file mode 100644 index aa54019ea57..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/InputMarker.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.input; - -import java.util.Map; - -public interface InputMarker { - - INPUT_TYPE getInput(); - - Map getAllProperties(); - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/cache/LRUCache.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/cache/LRUCache.java deleted file mode 100644 index 5e13811894b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/input/cache/LRUCache.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.input.cache; - -import java.io.Serializable; -import java.util.LinkedHashMap; -import java.util.Map; - -/** - * LRU cache for handle de-duplications per input files. - * It won't put already existing entries into the cache map if de-duplication interval not higher then a specific value - * or if the new value is the most recently used one (in case of lastDedupEnabled is true) - */ -public class LRUCache implements Serializable { - private final LinkedHashMap keyValueMap; - private final String fileName; - private final long dedupInterval; - private final boolean lastDedupEnabled; - private final String[] mostRecentLogs; - - public LRUCache(final int limit, final String fileName, final long dedupInterval, boolean lastDedupEnabled) { - this.fileName = fileName; - this.dedupInterval = dedupInterval; - this.lastDedupEnabled = lastDedupEnabled; - this.mostRecentLogs = new String[1]; // for now, we will just store 1 mru entry TODO: use an MRU implementation - keyValueMap = new LinkedHashMap(16, 0.75f, true) { - @Override - protected boolean removeEldestEntry(final Map.Entry eldest) { - return size() > limit; - } - }; - } - - public boolean isEntryReplaceable(String key, Long value) { - boolean result = true; - Long existingValue = keyValueMap.get(key); - if (existingValue == null) { - result = true; - } else if (lastDedupEnabled && containsMRUKey(key)) { // TODO: get peek element if mostRecentLogs will contain more than 1 element - result = false; - } else if (Math.abs(value - existingValue) < dedupInterval) { - result = false; - } - addMRUKey(key); - return result; - } - - public void put(String key, Long value) { - if (isEntryReplaceable(key, value)) { - keyValueMap.put(key, value); - } - } - - public Long get(String key) { - addMRUKey(key); - return keyValueMap.get(key); - } - - public int size() { - return keyValueMap.size(); - } - - public long getDedupInterval() { - return dedupInterval; - } - - public boolean containsKey(String key) { - return keyValueMap.containsKey(key); - } - - public String getFileName() { - return this.fileName; - } - - public boolean isLastDedupEnabled() { - return lastDedupEnabled; - } - - public String getMRUKey() { - return mostRecentLogs[0]; - } - - private void addMRUKey(String key) { - mostRecentLogs[0] = key; - } - - private boolean containsMRUKey(String key) { - return key != null && key.equals(mostRecentLogs[0]); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/BlockManager.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/BlockManager.java deleted file mode 100644 index 674f51fae25..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/BlockManager.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.manager; - -import org.apache.ambari.logfeeder.plugin.common.MetricData; - -import java.util.List; - -public interface BlockManager { - - void init() throws Exception; - - void close(); - - void logStats(); - - void addMetricsContainers(List metricsList); - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/CheckpointManager.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/CheckpointManager.java deleted file mode 100644 index abf14659ad7..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/CheckpointManager.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.manager; - -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; - -import java.io.IOException; - -public interface CheckpointManager { - - void init(P properties); - - void checkIn(I inputFile, IFM inputMarker); - - int resumeLineNumber(I input); - - void cleanupCheckpoints(); - - void printCheckpoints(String checkpointLocation, String logTypeFilter, - String fileKeyFilter) throws IOException; - - void cleanCheckpoint(String checkpointLocation, String logTypeFilter, - String fileKeyFilter, boolean all) throws IOException; - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/InputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/InputManager.java deleted file mode 100644 index 6dc14234f7b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/InputManager.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.manager; - -import org.apache.ambari.logfeeder.plugin.input.Input; - -import java.io.File; -import java.util.List; - - -public abstract class InputManager implements BlockManager { - - public abstract void addToNotReady(Input input); - - public abstract void checkInAll(); - - public abstract List getInputList(String serviceName); - - public abstract void add(String serviceName, Input input); - - public abstract void removeInput(Input input); - - public abstract void removeInputsForService(String serviceName); - - public abstract void startInputs(String serviceName); - - public abstract CheckpointManager getCheckpointHandler(); -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/OutputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/OutputManager.java deleted file mode 100644 index 3a3c6018c95..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/manager/OutputManager.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.manager; - -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logsearch.config.api.OutputConfigMonitor; - -import java.io.File; -import java.util.List; -import java.util.Map; - -public abstract class OutputManager implements BlockManager { - - public abstract void write(Map jsonObj, InputMarker inputMarker); - - public abstract void write(String jsonBlock, InputMarker inputMarker); - - public abstract void copyFile(File file, InputMarker marker); - - public abstract void add(Output output); - - public abstract List getOutputs(); - - public abstract List getOutputsToMonitor(); - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/output/Output.java b/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/output/Output.java deleted file mode 100644 index 13e5ad86867..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder-plugin-api/src/main/java/org/apache/ambari/logfeeder/plugin/output/Output.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.plugin.output; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.apache.ambari.logfeeder.plugin.common.ConfigItem; -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.apache.ambari.logsearch.config.api.OutputConfigMonitor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public abstract class Output extends ConfigItem implements OutputConfigMonitor { - - private static final Logger LOG = LoggerFactory.getLogger(Output.class); - - private final static String GSON_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - private static Gson gson = new GsonBuilder().setDateFormat(GSON_DATE_FORMAT).create(); - - private LogSearchConfigLogFeeder logSearchConfig; - private String destination = null; - private boolean isClosed; - protected MetricData writeBytesMetric = new MetricData(getWriteBytesMetricName(), false); - - public abstract String getOutputType(); - - public abstract void copyFile(File inputFile, InputMarker inputMarker) throws Exception; - - public abstract void write(String jsonStr, INPUT_MARKER inputMarker) throws Exception; - - public abstract Long getPendingCount(); - - public abstract String getWriteBytesMetricName(); - - public String getNameForThread() { - return this.getClass().getSimpleName(); - } - - public boolean monitorConfigChanges() { - return false; - }; - - public void setLogSearchConfig(LogSearchConfigLogFeeder logSearchConfig) { - this.logSearchConfig = logSearchConfig; - } - - public LogSearchConfigLogFeeder getLogSearchConfig() { - return logSearchConfig; - } - - public String getDestination() { - return destination; - } - - public void setDestination(String destination) { - this.destination = destination; - } - - /** - * Get the list of fields that will be used for ID generation of log entries. - */ - public List getIdFields() { - return new ArrayList<>(); - } - - public boolean isClosed() { - return isClosed; - } - - public void setClosed(boolean closed) { - isClosed = closed; - } - - public void write(Map jsonObj, INPUT_MARKER inputMarker) throws Exception { - write(gson.toJson(jsonObj), inputMarker); - } - - @Override - public void addMetricsContainers(List metricsList) { - super.addMetricsContainers(metricsList); - metricsList.add(writeBytesMetric); - } - - @Override - public synchronized void logStat() { - super.logStat(); - logStatForMetric(writeBytesMetric, "Stat: Bytes Written"); - } - - @Override - public boolean logConfigs() { - // TODO: log something about the configs - return true; - } - - public void trimStrValue(Map jsonObj) { - if (jsonObj != null) { - for (Map.Entry entry : jsonObj.entrySet()) { - String key = entry.getKey(); - Object value = entry.getValue(); - if (value != null && value instanceof String) { - String valueStr = value.toString().trim(); - jsonObj.put(key, valueStr); - } - } - } - } - - public void close() { - LOG.info("Calling base close()." + getShortDescription()); - isClosed = true; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/.gitignore b/ambari-logsearch/ambari-logsearch-logfeeder/.gitignore deleted file mode 100644 index 7b00482b010..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.pid \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/README.md b/ambari-logsearch/ambari-logsearch-logfeeder/README.md deleted file mode 100644 index 48cb81b49b1..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/README.md +++ /dev/null @@ -1,49 +0,0 @@ - - -# Log Search - Log Feeder: - -Log Feeder is a component of the Log Search service that reads logs, parses them and stores them in Apache Solr for the purpose -of later analysis. - -## Start locally from maven / IDE - -First you need to start every required service (except logfeeder), go to `ambari-logsearch/docker` folder and run: -```bash -docker-compose up -d zookeeper solr logsearch -``` - -Secondly, if you are planning to run Log Feeder from an IDE, for running the LogFeeder main methoud, you will need to set the working directory to `ambari/ambari-logsearch/ambari-logsearch-logfeeder` or set `LOGFEEDER_RELATIVE_LOCATION` env variable. -With Maven, you won't need these steps, just run this command from the ambari-logsearch-logfeeder folder: - -```bash -mvn clean package -DskipTests spring-boot:run -``` - -# Input Configuration - -The configuration for the log feeder contains -* description of the log files -* description of the filters that parse the data of the log entries -* description of the mappers that modify the parsed fields - -The element description can be found [here](docs/inputConfig.md) - -All these data are stored in json files, which should be named in the directory /etc/ambari-logsearch-logfeeder/conf, and the -name of the files should be input.config-.json \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/build.xml b/ambari-logsearch/ambari-logsearch-logfeeder/build.xml deleted file mode 100644 index bde400f4eb5..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/build.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/docs/filter.md b/ambari-logsearch/ambari-logsearch-logfeeder/docs/filter.md deleted file mode 100644 index d82529072aa..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/docs/filter.md +++ /dev/null @@ -1,61 +0,0 @@ - - -# Filter - -The filter element in the [input configuration](inputConfig.md) contains a list of filter descriptions, each describing one filter -applied on an input. - -The general elements in the json are the following: - -| Field | Description | Default | -|-----------------------|-------------------------------------------------------------------------------------------------|-------------| -| filter | The type of the filter, currently grok, keyvalue and json are supported | - | -| conditions | The conditions of which input to filter | - | -| sort\_order | Describes the order in which the filters should be applied | - | -| source\_field | The source of the filter, must be set for keyvalue filters | log_message | -| remove\_source\_field | Remove the source field after the filter is applied | false | -| post\_map\_values | Mappings done after the filtering provided it's result, see [post map values](postMapValues.md) | - | -| is\_enabled | A flag to show if the filter should be used | true | - - -## Grok Filter - -Grok filters have the following additional parameters: - -| Field | Description | Default | -|--------------------|------------------------------------------------------------------------------------------------------------|---------| -| log4j\_format | The log4j pattern of the log, not used, it is only there for documentation | - | -| multiline\_pattern | The grok pattern that shows that the line is not a log line on it's own but the part of a multi line entry | - | -| message\_pattern | The grok pattern to use to parse the log entry | - | - - -## Key-value Filter - -value\_borders is only used if it is specified, and value\_split is not. - -Key-value filters have the following additional parameters: - -| Field | Description | Default | -|----------------|-------------------------------------------------------------------------------------------|---------| -| field\_split | The string that splits the key-value pairs | "\t" | -| value\_split | The string that separates keys from values | "=" | -| value\_borders | The borders around the value, must be 2 characters long, first before it, second after it | - | - - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/docs/input.md b/ambari-logsearch/ambari-logsearch-logfeeder/docs/input.md deleted file mode 100644 index 1a9ce8db91a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/docs/input.md +++ /dev/null @@ -1,61 +0,0 @@ - - -# Input - -The input element in the [input configuration](inputConfig.md) contains a list of input descriptions, each describing one source -of input. The general elements in the json are the following: - -| Field | Description | Default | -|-----------------------------|-------------------------------------------------------------------------------------------------------|--------------| -| type | The log id for this source | - | -| rowtype | The type of the row, can be service / audit | - | -| path | The path of the source, may contain '*' characters too | - | -| add\_fields | The element contains field\_name: field\_value pairs which will be added to each rows data | - | -| source | The type of the input source, currently file and s3_file are supported | - | -| tail | The input should check for only the latest file matching the pattern, not all of them | true | -| gen\_event\_md5 | Generate an event\_md5 field for each row by creating a hash of the row data | true | -| use\_event\_md5\_as\_id | Generate an id for each row by creating a hash of the row data | false | -| cache\_enabled | Allows the input to use a cache to filter out duplications | true | -| cache\_key\_field | Specifies the field for which to use the cache to find duplications of | log\_message | -| cache\_last\_dedup\_enabled | Allow to filter out entries which are same as the most recent one irrelevant of it's time | false | -| cache\_size | The number of entries to store in the cache | 100 | -| cache\_dedup\_interval | The maximum interval in ms which may pass between two identical log messages to filter the latter out | 1000 | -| is\_enabled | A flag to show if the input should be used | true | - - -## File Input - -File inputs have some additional parameters: - -| Field | Description | Default | -|--------------------------|--------------------------------------------------------------------|---------| -| checkpoint\_interval\_ms | The time interval in ms when the checkpoint file should be updated | 5000 | -| process\_file | Should the file be processed | true | -| copy\_file | Should the file be copied \(only if not processed\) | false | - - -## S3 File Input - -S3 file inputs have the following parameters in addition to the general file parameters: - -| Field | Description | Default | -|-----------------|-----------------------------------------|---------| -| s3\_access\_key | The access key used for AWS credentials | - | -| s3\_secret\_key | The secret key used for AWS credentials | - | diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/docs/inputConfig.md b/ambari-logsearch/ambari-logsearch-logfeeder/docs/inputConfig.md deleted file mode 100644 index a507bfd1070..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/docs/inputConfig.md +++ /dev/null @@ -1,29 +0,0 @@ - - -# Input Configuration - -The input configurations are stored in json files. Each of them are describing the processing of the log files of a service. - -The json contains two elements: - -| Field | Description | -|--------|--------------------------------------------| -| input | A list of [input descriptions](input.md) | -| filter | A list of [filter descriptions](filter.md) | \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/docs/postMapValues.md b/ambari-logsearch/ambari-logsearch-logfeeder/docs/postMapValues.md deleted file mode 100644 index bc219df7897..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/docs/postMapValues.md +++ /dev/null @@ -1,70 +0,0 @@ - - -# Post Map Values - -The Post Map Values element in the [filter](filter.md) field names as keys, the values are lists of sets of post map values, each -describing one mapping done on a field named before obtained after filtering. - -Currently there are four kind of mappings are supported: - -## Map Date - -The name of the mapping element should be map\_date. The value json element may contain the following parameters: - -| Field | Description | -|-----------------------|--------------------------------------------------------------------------------------------------------| -| src\_date\_pattern | If it is specified than the mapper converts from this format to the target, and also adds missing year | -| target\_date\_pattern | If 'epoch' then the field is parsed as seconds from 1970, otherwise the content used as pattern | - - -## Map Copy - -The name of the mapping element should be map\_copy. The value json element should contain the following parameter: - -| Field | Description | -|------------|-------------------------------| -| copy\_name | The name of the copied field | - - -## Map Field Name - -The name of the mapping element should be map\_fieldname. The value json element should contain the following parameter: - -| Field | Description | -|------------------|-------------------------------| -| new\_field\_name | The name of the renamed field | - -## Map Field Value - -The name of the mapping element should be map\_fieldvalue. The value json element should contain the following parameter: - -| Field | Description | -|-------------|--------------------------------------------------------------------| -| pre\_value | The value that the field must match \(ignoring case\) to be mapped | -| post\_value | The value to which the field is modified to | - -## Map Anonymize - -The name of the mapping element should be map\_anonymize. The value json element should contain the following parameter: - -| Field | Description | -|------------|-----------------------------------------------------------------------------------------------------------------| -| pattern | The pattern to use to identify parts to anonymize. The parts to hide should be marked with the "" string. | -| hide\_char | The character to hide with, if it is not specified then the default is '*' | diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml deleted file mode 100644 index 1b116b3c9c6..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - - ambari-logsearch-logfeeder - jar - Ambari Logsearch Log Feeder - http://maven.apache.org - - - - UTF-8 - 4.3.17.RELEASE - 1.5.13.RELEASE - - - - - org.apache.ambari - ambari-logsearch-appender - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-json - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-zookeeper - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-local - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-solr - ${project.version} - - - org.apache.ambari - ambari-logsearch-logfeeder-plugin-api - ${project.version} - - - org.apache.ambari - ambari-logsearch-logfeeder-container-registry - ${project.version} - - - commons-cli - commons-cli - 1.3.1 - - - commons-codec - commons-codec - 1.8 - - - junit - junit - test - - - org.easymock - easymock - 3.4 - test - - - io.thekraken - grok - 0.1.4 - - - org.apache.solr - solr-solrj - ${solr.version} - - - org.codehaus.jackson - jackson-core-asl - 1.9.13 - - - org.codehaus.jackson - jackson-mapper-asl - 1.9.13 - - - com.google.guava - guava - 25.0-jre - - - org.apache.commons - commons-csv - 1.2 - - - org.apache.kafka - kafka-clients - 0.9.0.0 - - - org.codehaus.jackson - jackson-xc - 1.9.13 - - - org.apache.ambari - ambari-metrics-common - ${project.version} - - - com.amazonaws - aws-java-sdk-s3 - 1.11.5 - - - org.apache.commons - commons-compress - 1.16.1 - - - com.amazonaws - aws-java-sdk-iam - 1.11.5 - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - com.fasterxml.woodstox - woodstox-core - - - commons-httpclient - commons-httpclient - - - slf4j-log4j12 - org.slf4j - - - org.apache.curator - curator-framework - - - org.apache.curator - curator-client - - - org.apache.curator - curator-recipes - - - jdk.tools - jdk.tools - - - - - org.apache.hadoop - hadoop-hdfs-client - ${hadoop.version} - - - commons-io - commons-io - ${common.io.version} - - - javax.inject - javax.inject - 1 - - - org.apache.commons - commons-lang3 - 3.6 - - - org.springframework.boot - spring-boot-starter - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-logging - - - - - org.springframework.boot - spring-boot-starter-log4j - 1.3.8.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - ${spring-boot.version} - - - io.netty - netty-all - 4.0.37.Final - - - org.apache.ant - ant - 1.10.3 - - - - commons-beanutils - commons-beanutils - 1.7.0 - provided - - - commons-logging - commons-logging - 1.1.1 - provided - - - - - - - maven-compiler-plugin - 3.0 - - - maven-dependency-plugin - 2.8 - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.3 - - ${jdk.version} - ${jdk.version} - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - copy-dependencies - package - - - copy-dependencies - - - - true - ${basedir}/target/libs - false - false - true - runtime - - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - package - - - - - - - - - run - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/run.sh b/ambari-logsearch/ambari-logsearch-logfeeder/run.sh deleted file mode 100755 index 47a9e6cf383..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/run.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -mvn clean spring-boot:run diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java deleted file mode 100644 index 4025d3d747f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder; - -import org.springframework.boot.Banner; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.system.ApplicationPidFileWriter; - -@SpringBootApplication( - scanBasePackages = {"org.apache.ambari.logfeeder"}, - exclude = { - SolrAutoConfiguration.class - } -) -public class LogFeeder { - - public static void main(String[] args) { - String pidFile = System.getenv("LOGFEEDER_PID_FILE") == null ? "logfeeder.pid" : System.getenv("LOGFEEDER_PID_FILE"); - new SpringApplicationBuilder(LogFeeder.class) - .bannerMode(Banner.Mode.OFF) - .listeners(new ApplicationPidFileWriter(pidFile)) - .run(args); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederCommandLine.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederCommandLine.java deleted file mode 100644 index 3812ed15879..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederCommandLine.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder; - -import com.google.gson.GsonBuilder; -import org.apache.ambari.logfeeder.common.LogEntryParseTester; -import org.apache.ambari.logfeeder.input.file.checkpoint.FileCheckpointManager; -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.LogManager; - -import java.io.File; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -public class LogFeederCommandLine { - - private static final String TEST_COMMAND = "test"; - private static final String CHECKPOINTS_COMMAND = "checkpoints"; - private static final String CHECKPOINTS_FOLDER_OPTION = "checkpoints-folder"; - private static final String CHECKPOINTS_LIST_OPTION = "list"; - private static final String CHECKPOINTS_CLEAN_OPTION = "clean"; - private static final String CHECKPOINTS_ALL_OPTION = "all"; - private static final String CHECKPOINTS_FILE_KEY = "file-key"; - private static final String CHECKPOINTS_LOG_TYPE = "log-type"; - private static final String TEST_LOG_ENTRY_OPTION = "test-log-entry"; - private static final String TEST_SHIPPER_CONFIG_OPTION = "test-shipper-config"; - private static final String TEST_GLOBAL_CONFIG_OPTION = "test-global-config"; - private static final String TEST_LOG_ID_OPTION = "test-log-id"; - - private static final String COMMAND_LINE_SYNTAX = "java org.apache.ambari.logfeeder.LogFeederCommandLine [args]"; - - public static void main(String[] args) { - Options options = new Options(); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.setDescPadding(10); - helpFormatter.setWidth(200); - - Option helpOption = Option.builder("h") - .longOpt("help") - .desc("Print commands") - .build(); - - Option testOption = Option.builder("t") - .longOpt(TEST_COMMAND) - .desc("Test if log entry is parseable") - .build(); - - Option checkpointsOption = Option.builder("cp") - .longOpt(CHECKPOINTS_COMMAND) - .desc("Use checkpoint operations") - .build(); - - Option checkpointsListOption = Option.builder("l") - .longOpt(CHECKPOINTS_LIST_OPTION) - .desc("Print checkpoints") - .build(); - - Option checkpointsCleanOption = Option.builder("c") - .longOpt(CHECKPOINTS_CLEAN_OPTION) - .desc("Remove a checkpoint file (by key/log type or use on all)") - .build(); - - Option checkpointsFolderOption = Option.builder("cf") - .longOpt(CHECKPOINTS_FOLDER_OPTION) - .hasArg() - .desc("Checkpoints folder location") - .build(); - - Option checkpointsFileKeyOption = Option.builder("k") - .longOpt(CHECKPOINTS_FILE_KEY) - .hasArg() - .desc("Filter on file key (for list and clean)") - .build(); - - Option checkpointsLogTypeOption = Option.builder("lt") - .longOpt(CHECKPOINTS_LOG_TYPE) - .hasArg() - .desc("Filter on log type (for list and clean)") - .build(); - - Option checkpointAllOption = Option.builder("a") - .longOpt(CHECKPOINTS_ALL_OPTION) - .desc("") - .build(); - - Option testLogEntryOption = Option.builder("tle") - .longOpt(TEST_LOG_ENTRY_OPTION) - .hasArg() - .desc("Log entry to test if it's parseable") - .build(); - - Option testShipperConfOption = Option.builder("tsc") - .longOpt(TEST_SHIPPER_CONFIG_OPTION) - .hasArg() - .desc("Shipper configuration file for testing if log entry is parseable") - .build(); - - Option testGlobalConfOption = Option.builder("tgc") - .longOpt(TEST_GLOBAL_CONFIG_OPTION) - .hasArg() - .desc("Global configuration files (comma separated list) for testing if log entry is parseable") - .build(); - - Option testLogIdOption = Option.builder("tli") - .longOpt(TEST_LOG_ID_OPTION) - .hasArg() - .desc("The id of the log to test") - .build(); - - options.addOption(helpOption); - options.addOption(testOption); - options.addOption(testLogEntryOption); - options.addOption(testShipperConfOption); - options.addOption(testGlobalConfOption); - options.addOption(testLogIdOption); - options.addOption(checkpointsOption); - options.addOption(checkpointsListOption); - options.addOption(checkpointsCleanOption); - options.addOption(checkpointsFolderOption); - options.addOption(checkpointAllOption); - options.addOption(checkpointsFileKeyOption); - options.addOption(checkpointsLogTypeOption); - - try { - CommandLineParser cmdLineParser = new DefaultParser(); - CommandLine cli = cmdLineParser.parse(options, args); - - if (cli.hasOption('h')) { - helpFormatter.printHelp(COMMAND_LINE_SYNTAX, options); - System.exit(0); - } - String command = ""; - if (cli.hasOption("cp")) { - String checkpointLocation = ""; - if (cli.hasOption("cf")) { - checkpointLocation = cli.getOptionValue("cf"); - } else { - Properties prop = new Properties(); - prop.load(LogFeederCommandLine.class.getClassLoader().getResourceAsStream("logfeeder.properties")); - checkpointLocation = prop.getProperty("logfeeder.checkpoint.folder"); - } - boolean cleanCommand = cli.hasOption("c"); - boolean listCommand = cli.hasOption("l") || !cleanCommand; // Use list if clean is not used - boolean allOption = cli.hasOption("a"); - String logTypeFilter = cli.hasOption("lt") ? cli.getOptionValue("lt") : null; - String fileKeyFilter = cli.hasOption("k") ? cli.getOptionValue("k") : null; - - final CheckpointManager checkpointManager = new FileCheckpointManager(); - if (listCommand) { - checkpointManager.printCheckpoints(checkpointLocation, logTypeFilter, fileKeyFilter); - } else { - checkpointManager.cleanCheckpoint(checkpointLocation, logTypeFilter, fileKeyFilter, allOption); - } - - System.out.println("Checkpoint operation has finished successfully."); - return; - } - if (cli.hasOption("t")) { - command = TEST_COMMAND; - validateRequiredOptions(cli, command, testLogEntryOption, testShipperConfOption); - } - test(cli); - } catch (Exception e) { - e.printStackTrace(); - helpFormatter.printHelp(COMMAND_LINE_SYNTAX, options); - System.exit(1); - } - } - - private static void validateRequiredOptions(CommandLine cli, String command, Option... optionsToValidate) { - List requiredOptions = new ArrayList<>(); - for (Option opt : optionsToValidate) { - if (!cli.hasOption(opt.getOpt())) { - requiredOptions.add(opt.getOpt()); - } - } - if (!requiredOptions.isEmpty()) { - throw new IllegalArgumentException( - String.format("The following options required for '%s' : %s", command, StringUtils.join(requiredOptions, ","))); - } - } - - private static void test(CommandLine cli) { - try { - LogManager.shutdown(); - String testLogEntry = cli.getOptionValue("tle"); - String testShipperConfig = FileUtils.readFileToString(new File(cli.getOptionValue("tsc")), Charset.defaultCharset()); - List testGlobalConfigs = new ArrayList<>(); - for (String testGlobalConfigFile : cli.getOptionValue("tgc").split(",")) { - testGlobalConfigs.add(FileUtils.readFileToString(new File(testGlobalConfigFile), Charset.defaultCharset())); - } - String testLogId = cli.getOptionValue("tli"); - Map result = new LogEntryParseTester(testLogEntry, testShipperConfig, testGlobalConfigs, testLogId).parse(); - String parsedLogEntry = new GsonBuilder().setPrettyPrinting().create().toJson(result); - System.out.println("The result of the parsing is:\n" + parsedLogEntry); - } catch (Exception e) { - System.out.println("Exception occurred, could not test if log entry is parseable"); - e.printStackTrace(System.out); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java deleted file mode 100644 index 1ceef3bd6fd..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java +++ /dev/null @@ -1,451 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.common; - -import com.google.common.collect.Maps; -import com.google.gson.reflect.TypeToken; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputSimulate; -import org.apache.ambari.logfeeder.plugin.common.AliasUtil; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.manager.InputManager; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.InputConfigMonitor; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.FilterDescriptorImpl; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputDescriptorImpl; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.BooleanUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.core.io.ClassPathResource; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.inject.Inject; -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileNotFoundException; -import java.lang.reflect.Type; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class ConfigHandler implements InputConfigMonitor { - private static final Logger LOG = Logger.getLogger(org.apache.ambari.logfeeder.common.ConfigHandler.class); - - private final LogSearchConfigLogFeeder logSearchConfig; - - @Inject - private InputManager inputManager; - @Inject - private OutputManager outputManager; - @Inject - private LogFeederProps logFeederProps; - - private final Map globalConfigs = new HashMap<>(); - private final List globalConfigJsons = new ArrayList<>(); - - private final List inputConfigList = new ArrayList<>(); - private final List filterConfigList = new ArrayList<>(); - private final List> outputConfigList = new ArrayList<>(); - - private boolean simulateMode = false; - - public ConfigHandler(LogSearchConfigLogFeeder logSearchConfig) { - this.logSearchConfig = logSearchConfig; - } - - @PostConstruct - public void init() throws Exception { - loadConfigFiles(); - logSearchConfig.init(Maps.fromProperties(logFeederProps.getProperties()), logFeederProps.getClusterName()); - loadOutputs(); - simulateIfNeeded(); - - inputManager.init(); - outputManager.init(); - } - - private void loadConfigFiles() throws Exception { - List configFiles = getConfigFiles(); - for (String configFileName : configFiles) { - LOG.info("Going to load config file:" + configFileName); - configFileName = configFileName.replace("\\ ", "%20"); - File configFile = new File(configFileName); - if (configFile.exists() && configFile.isFile()) { - LOG.info("Config file exists in path." + configFile.getAbsolutePath()); - loadConfigsUsingFile(configFile); - } else { - LOG.info("Trying to load config file from classloader: " + configFileName); - loadConfigsUsingClassLoader(configFileName); - LOG.info("Loaded config file from classloader: " + configFileName); - } - } - } - - private List getConfigFiles() { - List configFiles = new ArrayList<>(); - - String logFeederConfigFilesProperty = logFeederProps.getConfigFiles(); - LOG.info("logfeeder.config.files=" + logFeederConfigFilesProperty); - if (logFeederConfigFilesProperty != null) { - configFiles.addAll(Arrays.asList(logFeederConfigFilesProperty.split(","))); - } - - return configFiles; - } - - private void loadConfigsUsingFile(File configFile) throws Exception { - try { - String configData = FileUtils.readFileToString(configFile, Charset.defaultCharset()); - loadConfigs(configData); - } catch (Exception t) { - LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath()); - throw t; - } - } - - private void loadConfigsUsingClassLoader(String configFileName) throws Exception { - try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) { - ClassPathResource configFile = new ClassPathResource(configFileName); - if (!configFile.exists()) { - throw new FileNotFoundException(configFileName); - } - String configData = IOUtils.toString(fis, Charset.defaultCharset()); - loadConfigs(configData); - } - } - - @Override - public void loadInputConfigs(String serviceName, InputConfig inputConfig) throws Exception { - inputConfigList.clear(); - filterConfigList.clear(); - - inputConfigList.addAll(inputConfig.getInput()); - filterConfigList.addAll(inputConfig.getFilter()); - - if (simulateMode) { - InputSimulate.loadTypeToFilePath(inputConfigList); - } else { - loadInputs(serviceName); - loadFilters(serviceName); - assignOutputsToInputs(serviceName); - - inputManager.startInputs(serviceName); - } - } - - @Override - public void removeInputs(String serviceName) { - inputManager.removeInputsForService(serviceName); - } - - public Input getTestInput(InputConfig inputConfig, String logId) { - for (InputDescriptor inputDescriptor : inputConfig.getInput()) { - if (inputDescriptor.getType().equals(logId)) { - inputConfigList.add(inputDescriptor); - break; - } - } - if (inputConfigList.isEmpty()) { - throw new IllegalArgumentException("Log Id " + logId + " was not found in shipper configuriaton"); - } - - for (FilterDescriptor filterDescriptor : inputConfig.getFilter()) { -// if ("grok".equals(filterDescriptor.getFilter())) { -// // Thus ensure that the log entry passed will be parsed immediately -// ((FilterGrokDescriptor)filterDescriptor).setMultilinePattern(null); -// } - filterConfigList.add(filterDescriptor); - } - loadInputs("test"); - loadFilters("test"); - List inputList = inputManager.getInputList("test"); - - return inputList != null && inputList.size() == 1 ? inputList.get(0) : null; - } - - @SuppressWarnings("unchecked") - public void loadConfigs(String configData) throws Exception { - Type type = new TypeToken>() {}.getType(); - Map configMap = LogFeederUtil.getGson().fromJson(configData, type); - - // Get the globals - for (String key : configMap.keySet()) { - switch (key) { - case "global" : - globalConfigs.putAll((Map) configMap.get(key)); - globalConfigJsons.add(configData); - break; - case "output" : - List> outputConfig = (List>) configMap.get(key); - outputConfigList.addAll(outputConfig); - break; - default : - LOG.warn("Unknown config key: " + key); - } - } - } - - @Override - public List getGlobalConfigJsons() { - return globalConfigJsons; - } - - private void simulateIfNeeded() throws Exception { - int simulatedInputNumber = logFeederProps.getInputSimulateConfig().getSimulateInputNumber(); - if (simulatedInputNumber == 0) - return; - - InputConfigImpl simulateInputConfig = new InputConfigImpl(); - List inputConfigDescriptors = new ArrayList<>(); - simulateInputConfig.setInput(inputConfigDescriptors); - simulateInputConfig.setFilter(new ArrayList()); - for (int i = 0; i < simulatedInputNumber; i++) { - InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {}; - inputDescriptor.setSource("simulate"); - inputDescriptor.setRowtype("service"); - inputDescriptor.setAddFields(new HashMap()); - inputConfigDescriptors.add(inputDescriptor); - } - - loadInputConfigs("Simulation", simulateInputConfig); - - simulateMode = true; - } - - private void loadOutputs() { - for (Map map : outputConfigList) { - if (map == null) { - continue; - } - mergeBlocks(globalConfigs, map); - - String value = (String) map.get("destination"); - if (StringUtils.isEmpty(value)) { - LOG.error("Output block doesn't have destination element"); - continue; - } - Output output = (Output) AliasUtil.getClassInstance(value, AliasUtil.AliasType.OUTPUT); - if (output == null) { - LOG.error("Output object could not be found"); - continue; - } - output.setDestination(value); - output.loadConfig(map); - output.setLogSearchConfig(logSearchConfig); - - // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input - if (output.isEnabled()) { - output.logConfigs(); - outputManager.add(output); - } else { - LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription()); - } - } - } - - private void loadInputs(String serviceName) { - for (InputDescriptor inputDescriptor : inputConfigList) { - if (inputDescriptor == null) { - continue; - } - - String source = (String) inputDescriptor.getSource(); - if (StringUtils.isEmpty(source)) { - LOG.error("Input block doesn't have source element"); - continue; - } - Input input = (Input) AliasUtil.getClassInstance(source, AliasUtil.AliasType.INPUT); - if (input == null) { - LOG.error("Input object could not be found"); - continue; - } - input.setType(source); - input.setLogType(inputDescriptor.getType()); - input.loadConfig(inputDescriptor); - - if (input.isEnabled()) { - input.setOutputManager(outputManager); - input.setInputManager(inputManager); - inputManager.add(serviceName, input); - input.logConfigs(); - } else { - LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription()); - } - } - } - - private void loadFilters(String serviceName) { - sortFilters(); - - List toRemoveInputList = new ArrayList(); - for (Input input : inputManager.getInputList(serviceName)) { - for (FilterDescriptor filterDescriptor : filterConfigList) { - if (filterDescriptor == null) { - continue; - } - if (BooleanUtils.isFalse(filterDescriptor.isEnabled())) { - LOG.debug("Ignoring filter " + filterDescriptor.getFilter() + " because it is disabled"); - continue; - } - if (!input.isFilterRequired(filterDescriptor)) { - LOG.debug("Ignoring filter " + filterDescriptor.getFilter() + " for input " + input.getShortDescription()); - continue; - } - - String value = filterDescriptor.getFilter(); - if (StringUtils.isEmpty(value)) { - LOG.error("Filter block doesn't have filter element"); - continue; - } - Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasUtil.AliasType.FILTER); - if (filter == null) { - LOG.error("Filter object could not be found"); - continue; - } - filter.loadConfig(filterDescriptor); - filter.setInput(input); - - filter.setOutputManager(outputManager); - input.addFilter(filter); - filter.logConfigs(); - } - - if (input.getFirstFilter() == null) { - toRemoveInputList.add(input); - } - } - - for (Input toRemoveInput : toRemoveInputList) { - LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription()); - inputManager.removeInput(toRemoveInput); - } - } - - private void sortFilters() { - Collections.sort(filterConfigList, (o1, o2) -> { - Integer o1Sort = o1.getSortOrder(); - Integer o2Sort = o2.getSortOrder(); - if (o1Sort == null || o2Sort == null) { - return 0; - } - - return o1Sort - o2Sort; - }); - } - - private void assignOutputsToInputs(String serviceName) { - Set usedOutputSet = new HashSet(); - for (Input input : inputManager.getInputList(serviceName)) { - for (Output output : outputManager.getOutputs()) { - if (input.isOutputRequired(output)) { - usedOutputSet.add(output); - input.addOutput(output); - } - } - } - - // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager - for (Output output : InputSimulate.getSimulateOutputs()) { - output.setLogSearchConfig(logSearchConfig); - outputManager.add(output); - usedOutputSet.add(output); - } - } - - @SuppressWarnings("unchecked") - private void mergeBlocks(Map fromMap, Map toMap) { - for (String key : fromMap.keySet()) { - Object objValue = fromMap.get(key); - if (objValue == null) { - continue; - } - if (objValue instanceof Map) { - Map globalFields = LogFeederUtil.cloneObject((Map) objValue); - - Map localFields = (Map) toMap.get(key); - if (localFields == null) { - localFields = new HashMap(); - toMap.put(key, localFields); - } - - if (globalFields != null) { - for (String fieldKey : globalFields.keySet()) { - if (!localFields.containsKey(fieldKey)) { - localFields.put(fieldKey, globalFields.get(fieldKey)); - } - } - } - } - } - - // Let's add the rest of the top level fields if missing - for (String key : fromMap.keySet()) { - if (!toMap.containsKey(key)) { - toMap.put(key, fromMap.get(key)); - } - } - } - - public void cleanCheckPointFiles() { - inputManager.getCheckpointHandler().cleanupCheckpoints(); - } - - public void logStats() { - inputManager.logStats(); - outputManager.logStats(); - } - - public void addMetrics(List metricsList) { - inputManager.addMetricsContainers(metricsList); - outputManager.addMetricsContainers(metricsList); - } - - @PreDestroy - public void close() { - inputManager.close(); - outputManager.close(); - inputManager.checkInAll(); - } - - public void setInputManager(InputManager inputManager) { - this.inputManager = inputManager; - } - - public void setOutputManager(OutputManager outputManager) { - this.outputManager = outputManager; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/IdGeneratorHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/IdGeneratorHelper.java deleted file mode 100644 index 74305739095..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/IdGeneratorHelper.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.common; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; - -import java.util.List; -import java.util.Map; -import java.util.UUID; - -/** - * Helper class to generete UUID (random or based on specific fields) - */ -public class IdGeneratorHelper { - - private IdGeneratorHelper() { - } - - /** - * Generate UUID based on fields (or just randomly) - * @param data object map which can contain the field key-value pairs - * @param idFields field names that used for generating uuid - * @return generated UUID string - */ - public static String generateUUID(Map data, List idFields) { - String uuid = null; - if (CollectionUtils.isNotEmpty(idFields)) { - final StringBuilder sb = new StringBuilder(); - for (String idField : idFields) { - if (data.containsKey(idField)) { - sb.append(data.get(idField).toString()); - } - } - String concatId = sb.toString(); - if (StringUtils.isNotEmpty(concatId)) { - uuid = UUID.nameUUIDFromBytes(concatId.getBytes()).toString(); - } else { - uuid = UUID.randomUUID().toString(); - } - } else { - uuid = UUID.randomUUID().toString(); - } - return uuid; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogEntryParseTester.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogEntryParseTester.java deleted file mode 100644 index b000aed4cb8..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogEntryParseTester.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.common; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logfeeder.conf.LogEntryCacheConfig; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.input.InputManagerImpl; -import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler; -import org.apache.ambari.logfeeder.output.OutputManagerImpl; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.json.JsonHelper; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; - -import com.google.common.collect.ImmutableMap; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; - -public class LogEntryParseTester { - - private final String logEntry; - private final String shipperConfig; - private final List globalConfigs; - private final String logId; - - public LogEntryParseTester(String logEntry, String shipperConfig, String globalConfigsJson, String logId) { - this.logEntry = logEntry; - this.shipperConfig = shipperConfig; - this.globalConfigs = new ArrayList<>(); - this.logId = logId; - - JsonParser jsonParser = new JsonParser(); - JsonArray globalConfigArray = jsonParser.parse(globalConfigsJson).getAsJsonArray(); - for (JsonElement e : globalConfigArray) { - globalConfigs.add(e.getAsJsonObject()); - } - } - - public LogEntryParseTester(String logEntry, String shipperConfig, List globalConfigJsons, String logId) { - this.logEntry = logEntry; - this.shipperConfig = shipperConfig; - this.globalConfigs = new ArrayList<>(); - this.logId = logId; - - JsonParser jsonParser = new JsonParser(); - for (String globalConfig : globalConfigJsons) { - JsonObject globalConfigObject = jsonParser.parse(globalConfig).getAsJsonObject(); - globalConfigs.add(globalConfigObject.get("global").getAsJsonObject()); - } - } - - public Map parse() throws Exception { - InputConfig inputConfig = getInputConfig(); - ConfigHandler configHandler = new ConfigHandler(null); - configHandler.setInputManager(new InputManagerImpl()); - OutputManagerImpl outputManager = new OutputManagerImpl(); - LogFeederProps logFeederProps = new LogFeederProps(); - LogEntryCacheConfig logEntryCacheConfig = new LogEntryCacheConfig(); - logEntryCacheConfig.setCacheEnabled(false); - logEntryCacheConfig.setCacheSize(0); - logFeederProps.setLogEntryCacheConfig(logEntryCacheConfig); - outputManager.setLogFeederProps(logFeederProps); - LogLevelFilterHandler logLevelFilterHandler = new LogLevelFilterHandler(null); - logLevelFilterHandler.setLogFeederProps(logFeederProps); - outputManager.setLogLevelFilterHandler(logLevelFilterHandler); - configHandler.setOutputManager(outputManager); - Input input = configHandler.getTestInput(inputConfig, logId); - input.init(logFeederProps); - final Map result = new HashMap<>(); - input.getFirstFilter().init(logFeederProps); - input.addOutput(new Output() { - @Override - public void init(LogFeederProps logFeederProperties) throws Exception { - } - - @Override - public String getShortDescription() { - return null; - } - - @Override - public String getStatMetricName() { - return null; - } - - @Override - public void write(String block, InputFileMarker inputMarker) throws Exception { - } - - @Override - public Long getPendingCount() { - return null; - } - - @Override - public String getWriteBytesMetricName() { - return null; - } - - @Override - public String getOutputType() { - return null; - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException { - } - - @Override - public void write(Map jsonObj, InputFileMarker inputMarker) { - result.putAll(jsonObj); - } - }); - input.outputLine(logEntry, new InputFileMarker(input, null, 0)); - input.outputLine(logEntry, new InputFileMarker(input, null, 0)); - - return result.isEmpty() ? - ImmutableMap.of("errorMessage", (Object)"Could not parse test log entry") : - result; - } - - private InputConfig getInputConfig() { - JsonParser jsonParser = new JsonParser(); - JsonElement shipperConfigJson = jsonParser.parse(shipperConfig); - for (JsonObject globalConfig : globalConfigs) { - for (Map.Entry typeEntry : shipperConfigJson.getAsJsonObject().entrySet()) { - for (JsonElement e : typeEntry.getValue().getAsJsonArray()) { - JsonHelper.merge(globalConfig, e.getAsJsonObject()); - } - } - } - return InputConfigGson.gson.fromJson(shipperConfigJson, InputConfigImpl.class); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java deleted file mode 100644 index 1d56924f06a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.common; - -public class LogFeederConstants { - - public static final String ALL = "all"; - public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config"; - public static final String LOG_LEVEL_UNKNOWN = "UNKNOWN"; - - // solr fields - public static final String SOLR_LEVEL = "level"; - public static final String SOLR_COMPONENT = "type"; - public static final String SOLR_HOST = "host"; - - // Event History Constants History - public static final String VALUES = "jsons"; - public static final String ROW_TYPE = "rowtype"; - - // S3 Constants - public static final String S3_PATH_START_WITH = "s3://"; - public static final String S3_PATH_SEPARATOR = "/"; - - public static final String IN_MEMORY_TIMESTAMP = "in_memory_timestamp"; - - public static final String LOGFEEDER_PROPERTIES_FILE = "logfeeder.properties"; - public static final String CLUSTER_NAME_PROPERTY = "cluster.name"; - public static final String TMP_DIR_PROPERTY = "logfeeder.tmp.dir"; - - public static final String METRICS_COLLECTOR_PROTOCOL_PROPERTY = "logfeeder.metrics.collector.protocol"; - public static final String METRICS_COLLECTOR_PORT_PROPERTY = "logfeeder.metrics.collector.port"; - public static final String METRICS_COLLECTOR_HOSTS_PROPERTY = "logfeeder.metrics.collector.hosts"; - public static final String METRICS_COLLECTOR_PATH_PROPERTY = "logfeeder.metrics.collector.path"; - - public static final String LOG_FILTER_ENABLE_PROPERTY = "logfeeder.log.filter.enable"; - public static final String INCLUDE_DEFAULT_LEVEL_PROPERTY = "logfeeder.include.default.level"; - public static final String SOLR_IMPLICIT_ROUTING_PROPERTY = "logfeeder.solr.implicit.routing"; - - public static final String CONFIG_DIR_PROPERTY = "logfeeder.config.dir"; - public static final String CONFIG_FILES_PROPERTY = "logfeeder.config.files"; - - public static final String SIMULATE_INPUT_NUMBER_PROPERTY = "logfeeder.simulate.input_number"; - public static final int DEFAULT_SIMULATE_INPUT_NUMBER = 0; - public static final String SIMULATE_LOG_LEVEL_PROPERTY = "logfeeder.simulate.log_level"; - public static final String DEFAULT_SIMULATE_LOG_LEVEL = "WARN"; - public static final String SIMULATE_NUMBER_OF_WORDS_PROPERTY = "logfeeder.simulate.number_of_words"; - public static final int DEFAULT_SIMULATE_NUMBER_OF_WORDS = 1000; - public static final String SIMULATE_MIN_LOG_WORDS_PROPERTY = "logfeeder.simulate.min_log_words"; - public static final int DEFAULT_SIMULATE_MIN_LOG_WORDS = 5; - public static final String SIMULATE_MAX_LOG_WORDS_PROPERTY = "logfeeder.simulate.max_log_words"; - public static final int DEFAULT_SIMULATE_MAX_LOG_WORDS = 5; - public static final String SIMULATE_SLEEP_MILLISECONDS_PROPERTY = "logfeeder.simulate.sleep_milliseconds"; - public static final int DEFAULT_SIMULATE_SLEEP_MILLISECONDS = 10000; - public static final String SIMULATE_LOG_IDS_PROPERTY = "logfeeder.simulate.log_ids"; - - public static final String SOLR_KERBEROS_ENABLE_PROPERTY = "logfeeder.solr.kerberos.enable"; - public static final boolean DEFAULT_SOLR_KERBEROS_ENABLE = false; - public static final String DEFAULT_SOLR_JAAS_FILE = "/etc/security/keytabs/logsearch_solr.service.keytab"; - public static final String SOLR_JAAS_FILE_PROPERTY = "logfeeder.solr.jaas.file"; - - public static final String CACHE_ENABLED_PROPERTY = "logfeeder.cache.enabled"; - public static final boolean DEFAULT_CACHE_ENABLED = false; - public static final String CACHE_KEY_FIELD_PROPERTY = "logfeeder.cache.key.field"; - public static final String DEFAULT_CACHE_KEY_FIELD = "log_message"; - public static final String CACHE_SIZE_PROPERTY = "logfeeder.cache.size"; - public static final int DEFAULT_CACHE_SIZE = 100; - public static final String CACHE_LAST_DEDUP_ENABLED_PROPERTY = "logfeeder.cache.last.dedup.enabled"; - public static final boolean DEFAULT_CACHE_LAST_DEDUP_ENABLED = false; - public static final String CACHE_DEDUP_INTERVAL_PROPERTY = "logfeeder.cache.dedup.interval"; - public static final long DEFAULT_CACHE_DEDUP_INTERVAL = 1000; - - public static final String CHECKPOINT_FOLDER_PROPERTY = "logfeeder.checkpoint.folder"; - public static final String CHECKPOINT_EXTENSION_PROPERTY = "logfeeder.checkpoint.extension"; - public static final String DEFAULT_CHECKPOINT_EXTENSION = ".cp"; - - public static final String DOCKER_CONTAINER_REGISTRY_ENABLED_PROPERTY = "logfeeder.docker.registry.enabled"; - public static final boolean DOCKER_CONTAINER_REGISTRY_ENABLED_DEFAULT = false; - - public static final String USE_LOCAL_CONFIGS_PROPERTY = "logfeeder.configs.local.enabled"; - public static final boolean USE_LOCAL_CONFIGS_DEFAULT = false; - - public static final String USE_SOLR_FILTER_STORAGE_PROPERTY = "logfeeder.configs.filter.solr.enabled"; - public static final boolean USE_SOLR_FILTER_STORAGE_DEFAULT = false; - - public static final String USE_ZK_FILTER_STORAGE_PROPERTY = "logfeeder.configs.filter.zk.enabled"; - public static final boolean USE_ZK_FILTER_STORAGE_DEFAULT = false; - - public static final String MONITOR_SOLR_FILTER_STORAGE_PROPERTY = "logfeeder.configs.filter.solr.monitor.enabled"; - public static final boolean MONITOR_SOLR_FILTER_STORAGE_DEFAULT = true; - - public static final String SOLR_ZK_CONNECTION_STRING = "logfeeder.solr.zk_connect_string"; - public static final String SOLR_URLS = "logfeeder.solr.urls"; - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederException.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederException.java deleted file mode 100644 index 3653475ff16..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.common; - -public class LogFeederException extends Exception { - - public LogFeederException(String message, Throwable throwable) { - super(message, throwable); - } - - public LogFeederException(String message) { - super(message); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java deleted file mode 100644 index cf94fb545bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.common; - -import org.apache.commons.lang3.StringUtils; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.client.solrj.impl.LBHttpSolrClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class LogFeederSolrClientFactory { - - private static final Logger logger = LoggerFactory.getLogger(LogFeederSolrClientFactory.class); - - public SolrClient createSolrClient(String zkConnectionString, String[] solrUrls, String collection) { - logger.info("Creating solr client ..."); - logger.info("Using collection=" + collection); - if (solrUrls != null && solrUrls.length > 0) { - logger.info("Using lbHttpSolrClient with urls: {}", - StringUtils.join(appendTo("/" + collection, solrUrls), ",")); - LBHttpSolrClient.Builder builder = new LBHttpSolrClient.Builder(); - builder.withBaseSolrUrls(solrUrls); - return builder.build(); - } else { - logger.info("Using zookeepr. zkConnectString=" + zkConnectionString); - CloudSolrClient.Builder builder = new CloudSolrClient.Builder(); - builder.withZkHost(zkConnectionString); - CloudSolrClient solrClient = builder.build(); - solrClient.setDefaultCollection(collection); - return solrClient; - } - } - - private String[] appendTo(String toAppend, String... appendees) { - for (int i = 0; i < appendees.length; i++) { - appendees[i] = appendees[i] + toAppend; - } - return appendees; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java deleted file mode 100644 index 8c7e7d92256..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import com.google.common.collect.Maps; -import org.apache.ambari.logfeeder.common.LogFeederSolrClientFactory; -import org.apache.ambari.logfeeder.docker.DockerContainerRegistry; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.input.InputConfigUploader; -import org.apache.ambari.logfeeder.input.InputManagerImpl; -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.apache.ambari.logfeeder.input.file.checkpoint.FileCheckpointManager; -import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler; -import org.apache.ambari.logfeeder.common.ConfigHandler; -import org.apache.ambari.logfeeder.metrics.MetricsManager; -import org.apache.ambari.logfeeder.metrics.StatsLogger; -import org.apache.ambari.logfeeder.output.OutputManagerImpl; -import org.apache.ambari.logfeeder.plugin.manager.InputManager; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logsearch.config.api.LogLevelFilterManager; -import org.apache.ambari.logsearch.config.api.LogLevelFilterUpdater; -import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.apache.ambari.logsearch.config.local.LogSearchConfigLogFeederLocal; -import org.apache.ambari.logsearch.config.solr.LogLevelFilterManagerSolr; -import org.apache.ambari.logsearch.config.solr.LogLevelFilterUpdaterSolr; -import org.apache.ambari.logsearch.config.zookeeper.LogLevelFilterManagerZK; -import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigLogFeederZK; -import org.apache.solr.client.solrj.SolrClient; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.DependsOn; -import org.springframework.context.annotation.PropertySource; -import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; - -import javax.inject.Inject; -import java.util.HashMap; - -@Configuration -@PropertySource(value = { - "classpath:" + LogFeederConstants.LOGFEEDER_PROPERTIES_FILE -}) -public class ApplicationConfig { - - @Inject - private LogFeederProps logFeederProps; - - @Bean - public static PropertySourcesPlaceholderConfigurer propertyConfigurer() { - return new PropertySourcesPlaceholderConfigurer(); - } - - @Bean - public LogFeederSecurityConfig logFeederSecurityConfig() { - return new LogFeederSecurityConfig(); - } - - @Bean - @DependsOn({"logSearchConfigLogFeeder", "propertyConfigurer"}) - public ConfigHandler configHandler() throws Exception { - return new ConfigHandler(logSearchConfigLogFeeder()); - } - - @Bean - @DependsOn("logFeederSecurityConfig") - public LogSearchConfigLogFeeder logSearchConfigLogFeeder() throws Exception { - if (logFeederProps.isUseLocalConfigs()) { - LogSearchConfigLogFeeder logfeederConfig = LogSearchConfigFactory.createLogSearchConfigLogFeeder( - Maps.fromProperties(logFeederProps.getProperties()), - logFeederProps.getClusterName(), - LogSearchConfigLogFeederLocal.class, false); - logfeederConfig.setLogLevelFilterManager(logLevelFilterManager()); - return logfeederConfig; - } else { - return LogSearchConfigFactory.createLogSearchConfigLogFeeder( - Maps.fromProperties(logFeederProps.getProperties()), - logFeederProps.getClusterName(), - LogSearchConfigLogFeederZK.class, false); - } - } - - @Bean - public LogLevelFilterManager logLevelFilterManager() throws Exception { - if (logFeederProps.isSolrFilterStorage()) { - SolrClient solrClient = new LogFeederSolrClientFactory().createSolrClient( - logFeederProps.getSolrZkConnectString(), logFeederProps.getSolrUrls(), "history"); - return new LogLevelFilterManagerSolr(solrClient); - } else if (logFeederProps.isUseLocalConfigs() && logFeederProps.isZkFilterStorage()) { - final HashMap map = new HashMap<>(); - for (final String name : logFeederProps.getProperties().stringPropertyNames()) { - map.put(name, logFeederProps.getProperties().getProperty(name)); - } - return new LogLevelFilterManagerZK(map); - } else { // no default filter manager - return null; - } - } - - @Bean - @DependsOn("logLevelFilterHandler") - public LogLevelFilterUpdater logLevelFilterUpdater() throws Exception { - if (logFeederProps.isSolrFilterStorage() && logFeederProps.isSolrFilterMonitor()) { - LogLevelFilterUpdater logLevelFilterUpdater = new LogLevelFilterUpdaterSolr( - "filter-updater-solr", logLevelFilterHandler(), - 30, (LogLevelFilterManagerSolr) logLevelFilterManager(), logFeederProps.getClusterName()); - logLevelFilterUpdater.start(); - return logLevelFilterUpdater; - } else { // no default filter updater - return null; - } - } - @Bean - public MetricsManager metricsManager() { - return new MetricsManager(); - } - - @Bean - @DependsOn("configHandler") - public LogLevelFilterHandler logLevelFilterHandler() throws Exception { - return new LogLevelFilterHandler(logSearchConfigLogFeeder()); - } - - @Bean - @DependsOn({"configHandler", "logSearchConfigLogFeeder", "logLevelFilterHandler"}) - public InputConfigUploader inputConfigUploader() { - return new InputConfigUploader(); - } - - @Bean - @DependsOn("inputConfigUploader") - public StatsLogger statsLogger() { - return new StatsLogger(); - } - - - @Bean - @DependsOn({"containerRegistry", "checkpointHandler"}) - public InputManager inputManager() { - return new InputManagerImpl(); - } - - @Bean - public OutputManager outputManager() { - return new OutputManagerImpl(); - } - - @Bean - public CheckpointManager checkpointHandler() { - return new FileCheckpointManager(); - } - - @Bean - public DockerContainerRegistry containerRegistry() { - if (logFeederProps.isDockerContainerRegistryEnabled()) { - return DockerContainerRegistry.getInstance(logFeederProps.getProperties()); - } else { - return null; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/InputSimulateConfig.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/InputSimulateConfig.java deleted file mode 100644 index 82880e24e1f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/InputSimulateConfig.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Lazy; - -@Configuration -@Lazy -public class InputSimulateConfig { - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_INPUT_NUMBER_PROPERTY, - description = "The number of the simulator instances to run with. O means no simulation.", - examples = {"10"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_INPUT_NUMBER + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.SIMULATE_INPUT_NUMBER_PROPERTY + ":0}") - private Integer simulateInputNumber; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_LOG_LEVEL_PROPERTY, - description = "The log level to create the simulated log entries with.", - examples = {"INFO"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_LOG_LEVEL, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.SIMULATE_LOG_LEVEL_PROPERTY + ":"+ LogFeederConstants.DEFAULT_SIMULATE_LOG_LEVEL + "}") - private String simulateLogLevel; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_NUMBER_OF_WORDS_PROPERTY, - description = "The size of the set of words that may be used to create the simulated log entries with.", - examples = {"100"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_NUMBER_OF_WORDS + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SIMULATE_NUMBER_OF_WORDS_PROPERTY + ":" + LogFeederConstants.DEFAULT_SIMULATE_NUMBER_OF_WORDS + "}") - private Integer simulateNumberOfWords; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_MIN_LOG_WORDS_PROPERTY, - description = "The minimum number of words in a simulated log entry.", - examples = {"3"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_MIN_LOG_WORDS + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SIMULATE_MIN_LOG_WORDS_PROPERTY + ":" + LogFeederConstants.DEFAULT_SIMULATE_MIN_LOG_WORDS + "}") - private Integer simulateMinLogWords; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_MAX_LOG_WORDS_PROPERTY, - description = "The maximum number of words in a simulated log entry.", - examples = {"8"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_MAX_LOG_WORDS + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SIMULATE_MAX_LOG_WORDS_PROPERTY + ":" + LogFeederConstants.DEFAULT_SIMULATE_MAX_LOG_WORDS + "}") - private Integer simulateMaxLogWords; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_SLEEP_MILLISECONDS_PROPERTY, - description = "The milliseconds to sleep between creating two simulated log entries.", - examples = {"5000"}, - defaultValue = LogFeederConstants.DEFAULT_SIMULATE_SLEEP_MILLISECONDS + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SIMULATE_SLEEP_MILLISECONDS_PROPERTY + ":" + LogFeederConstants.DEFAULT_SIMULATE_SLEEP_MILLISECONDS + "}") - private Integer simulateSleepMilliseconds; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SIMULATE_LOG_IDS_PROPERTY, - description = "The comma separated list of log ids for which to create the simulated log entries.", - examples = {"ambari_server,zookeeper,infra_solr,logsearch_app"}, - defaultValue = "The log ids of the installed services in the cluster", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SIMULATE_LOG_IDS_PROPERTY + ":}") - private String simulateLogIds; - - public Integer getSimulateInputNumber() { - return simulateInputNumber; - } - - public void setSimulateInputNumber(Integer simulateInputNumber) { - this.simulateInputNumber = simulateInputNumber; - } - - public String getSimulateLogLevel() { - return simulateLogLevel; - } - - public void setSimulateLogLevel(String simulateLogLevel) { - this.simulateLogLevel = simulateLogLevel; - } - - public Integer getSimulateNumberOfWords() { - return simulateNumberOfWords; - } - - public void setSimulateNumberOfWords(Integer simulateNumberOfWords) { - this.simulateNumberOfWords = simulateNumberOfWords; - } - - public Integer getSimulateMinLogWords() { - return simulateMinLogWords; - } - - public void setSimulateMinLogWords(Integer simulateMinLogWords) { - this.simulateMinLogWords = simulateMinLogWords; - } - - public Integer getSimulateMaxLogWords() { - return simulateMaxLogWords; - } - - public void setSimulateMaxLogWords(Integer simulateMaxLogWords) { - this.simulateMaxLogWords = simulateMaxLogWords; - } - - public Integer getSimulateSleepMilliseconds() { - return simulateSleepMilliseconds; - } - - public void setSimulateSleepMilliseconds(Integer simulateSleepMilliseconds) { - this.simulateSleepMilliseconds = simulateSleepMilliseconds; - } - - public String getSimulateLogIds() { - return simulateLogIds; - } - - public void setSimulateLogIds(String simulateLogIds) { - this.simulateLogIds = simulateLogIds; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogEntryCacheConfig.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogEntryCacheConfig.java deleted file mode 100644 index e93f5be4b2f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogEntryCacheConfig.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Lazy; - -@Configuration -@Lazy -public class LogEntryCacheConfig { - - @LogSearchPropertyDescription( - name = LogFeederConstants.CACHE_ENABLED_PROPERTY, - description = "Enables the usage of a cache to avoid duplications.", - examples = {"true"}, - defaultValue = LogFeederConstants.DEFAULT_CACHE_ENABLED + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CACHE_ENABLED_PROPERTY + ":" + LogFeederConstants.DEFAULT_CACHE_ENABLED + "}") - private boolean cacheEnabled; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CACHE_KEY_FIELD_PROPERTY, - description = "The field which's value should be cached and should be checked for repetitions.", - examples = {"some_field_prone_to_repeating_value"}, - defaultValue = LogFeederConstants.DEFAULT_CACHE_KEY_FIELD, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CACHE_KEY_FIELD_PROPERTY + ":" + LogFeederConstants.DEFAULT_CACHE_KEY_FIELD + "}") - private String cacheKeyField; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CACHE_SIZE_PROPERTY, - description = "The number of log entries to cache in order to avoid duplications.", - examples = {"50"}, - defaultValue = LogFeederConstants.DEFAULT_CACHE_SIZE + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CACHE_SIZE_PROPERTY + ":" + LogFeederConstants.DEFAULT_CACHE_SIZE + "}") - private Integer cacheSize; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CACHE_LAST_DEDUP_ENABLED_PROPERTY, - description = "Enable filtering directly repeating log entries irrelevant of the time spent between them.", - examples = {"true"}, - defaultValue = LogFeederConstants.DEFAULT_CACHE_LAST_DEDUP_ENABLED + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CACHE_LAST_DEDUP_ENABLED_PROPERTY + ":" + LogFeederConstants.DEFAULT_CACHE_LAST_DEDUP_ENABLED + "}") - private boolean cacheLastDedupEnabled; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CACHE_DEDUP_INTERVAL_PROPERTY, - description = "Maximum number of milliseconds between two identical messages to be filtered out.", - examples = {"500"}, - defaultValue = LogFeederConstants.DEFAULT_CACHE_DEDUP_INTERVAL + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CACHE_DEDUP_INTERVAL_PROPERTY + ":" + LogFeederConstants.DEFAULT_CACHE_DEDUP_INTERVAL + "}") - private String cacheDedupInterval; - - public boolean isCacheEnabled() { - return cacheEnabled; - } - - public void setCacheEnabled(boolean cacheEnabled) { - this.cacheEnabled = cacheEnabled; - } - - public String getCacheKeyField() { - return cacheKeyField; - } - - public void setCacheKeyField(String cacheKeyField) { - this.cacheKeyField = cacheKeyField; - } - - public Integer getCacheSize() { - return cacheSize; - } - - public void setCacheSize(Integer cacheSize) { - this.cacheSize = cacheSize; - } - - public boolean isCacheLastDedupEnabled() { - return this.cacheLastDedupEnabled; - } - - public void setCacheLastDedupEnabled(boolean cacheLastDedupEnabled) { - this.cacheLastDedupEnabled = cacheLastDedupEnabled; - } - - public String getCacheDedupInterval() { - return cacheDedupInterval; - } - - public void setCacheDedupInterval(String cacheDedupInterval) { - this.cacheDedupInterval = cacheDedupInterval; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java deleted file mode 100644 index 859de8f0bef..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.plugin.common.LogFeederProperties; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.commons.lang.StringUtils; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.env.AbstractEnvironment; -import org.springframework.core.env.Environment; -import org.springframework.core.env.MapPropertySource; -import org.springframework.core.env.MutablePropertySources; -import org.springframework.core.io.support.ResourcePropertySource; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import java.util.Arrays; -import java.util.List; -import java.util.Properties; -import java.util.stream.Stream; - -@Configuration -public class LogFeederProps implements LogFeederProperties { - - @Inject - private Environment env; - - private Properties properties; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CLUSTER_NAME_PROPERTY, - description = "The name of the cluster the Log Feeder program runs in.", - examples = {"cl1"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("#{'${" + LogFeederConstants.CLUSTER_NAME_PROPERTY + "}'.toLowerCase()}") - private String clusterName; - - @LogSearchPropertyDescription( - name = LogFeederConstants.TMP_DIR_PROPERTY, - description = "The tmp dir used for creating temporary files.", - examples = {"/tmp/"}, - defaultValue = "java.io.tmpdir", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.TMP_DIR_PROPERTY + ":#{systemProperties['java.io.tmpdir']}}") - private String tmpDir; - - @LogSearchPropertyDescription( - name = LogFeederConstants.LOG_FILTER_ENABLE_PROPERTY, - description = "Enables the filtering of the log entries by log level filters.", - examples = {"true"}, - defaultValue = "false", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.LOG_FILTER_ENABLE_PROPERTY + "}") - private boolean logLevelFilterEnabled; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SOLR_IMPLICIT_ROUTING_PROPERTY, - description = "Use implicit routing for Solr Collections.", - examples = {"true"}, - defaultValue = "false", - sources = {LogFeederConstants.SOLR_IMPLICIT_ROUTING_PROPERTY} - ) - @Value("${"+ LogFeederConstants.SOLR_IMPLICIT_ROUTING_PROPERTY + ":false}") - private boolean solrImplicitRouting; - - @LogSearchPropertyDescription( - name = LogFeederConstants.INCLUDE_DEFAULT_LEVEL_PROPERTY, - description = "Comma separated list of the default log levels to be enabled by the filtering.", - examples = {"FATAL,ERROR,WARN"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("#{'${" + LogFeederConstants.INCLUDE_DEFAULT_LEVEL_PROPERTY + ":}'.split(',')}") - private List includeDefaultLogLevels; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CONFIG_DIR_PROPERTY, - description = "The directory where shipper configuration files are looked for.", - examples = {"/usr/lib/ambari-logsearch-logfeeder/conf"}, - defaultValue = "/usr/lib/ambari-logsearch-logfeeder/conf", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.CONFIG_DIR_PROPERTY + ":/usr/lib/ambari-logsearch-logfeeder/conf}") - private String confDir; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CONFIG_FILES_PROPERTY, - description = "Comma separated list of the config files containing global / output configurations.", - examples = {"global.json,output.json", "/usr/lib/ambari-logsearch-logfeeder/conf/global.config.json"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.CONFIG_FILES_PROPERTY + ":}") - private String configFiles; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CHECKPOINT_EXTENSION_PROPERTY, - description = "The extension used for checkpoint files.", - examples = {"ckp"}, - defaultValue = LogFeederConstants.DEFAULT_CHECKPOINT_EXTENSION, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CHECKPOINT_EXTENSION_PROPERTY + ":" + LogFeederConstants.DEFAULT_CHECKPOINT_EXTENSION + "}") - private String checkPointExtension; - - @LogSearchPropertyDescription( - name = LogFeederConstants.CHECKPOINT_FOLDER_PROPERTY, - description = "The folder where checkpoint files are stored.", - examples = {"/usr/lib/ambari-logsearch-logfeeder/conf/checkpoints"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.CHECKPOINT_FOLDER_PROPERTY + ":/usr/lib/ambari-logsearch-logfeeder/conf/checkpoints}") - public String checkpointFolder; - - @LogSearchPropertyDescription( - name = LogFeederConstants.DOCKER_CONTAINER_REGISTRY_ENABLED_PROPERTY, - description = "Enable to monitor docker containers and store their metadata in an in-memory registry.", - examples = {"true"}, - defaultValue = LogFeederConstants.DOCKER_CONTAINER_REGISTRY_ENABLED_DEFAULT + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.DOCKER_CONTAINER_REGISTRY_ENABLED_PROPERTY + ":false}") - public boolean dockerContainerRegistryEnabled; - - @LogSearchPropertyDescription( - name = LogFeederConstants.USE_LOCAL_CONFIGS_PROPERTY, - description = "Monitor local input.config-*.json files (do not upload them to zookeeper or solr)", - examples = {"true"}, - defaultValue = LogFeederConstants.USE_LOCAL_CONFIGS_DEFAULT + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.USE_LOCAL_CONFIGS_PROPERTY + ":" + LogFeederConstants.USE_LOCAL_CONFIGS_DEFAULT +"}") - public boolean useLocalConfigs; - - @LogSearchPropertyDescription( - name = LogFeederConstants.USE_SOLR_FILTER_STORAGE_PROPERTY, - description = "Use solr as a log level filter storage", - examples = {"true"}, - defaultValue = LogFeederConstants.USE_SOLR_FILTER_STORAGE_DEFAULT + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.USE_SOLR_FILTER_STORAGE_PROPERTY + ":" + LogFeederConstants.USE_SOLR_FILTER_STORAGE_DEFAULT +"}") - public boolean solrFilterStorage; - - @LogSearchPropertyDescription( - name = LogFeederConstants.USE_ZK_FILTER_STORAGE_PROPERTY, - description = "Use zk as a log level filter storage (works only with local config)", - examples = {"true"}, - defaultValue = LogFeederConstants.USE_ZK_FILTER_STORAGE_DEFAULT + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.USE_ZK_FILTER_STORAGE_PROPERTY + ":" + LogFeederConstants.USE_ZK_FILTER_STORAGE_DEFAULT +"}") - public boolean zkFilterStorage; - - @LogSearchPropertyDescription( - name = LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_PROPERTY, - description = "Monitor log level filters (in solr) periodically - used for checking updates.", - examples = {"false"}, - defaultValue = LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_DEFAULT + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_PROPERTY + ":" + LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_DEFAULT +"}") - public boolean solrFilterMonitor; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SOLR_ZK_CONNECTION_STRING, - description = "Zookeeper connection string for Solr.", - examples = {"localhost1:2181,localhost2:2181/mysolr_znode"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SOLR_ZK_CONNECTION_STRING + ":}") - private String solrZkConnectString; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SOLR_URLS, - description = "Comma separated solr urls (with protocol and port), override "+ LogFeederConstants.SOLR_ZK_CONNECTION_STRING + " config", - examples = {"https://localhost1:8983/solr,https://localhost2:8983"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SOLR_URLS + ":}") - private String solrUrlsStr; - - @Inject - private LogEntryCacheConfig logEntryCacheConfig; - - @Inject - private InputSimulateConfig inputSimulateConfig; - - @Inject - private LogFeederSecurityConfig logFeederSecurityConfig; - - public String getClusterName() { - return clusterName; - } - - public void setClusterName(String clusterName) { - this.clusterName = clusterName; - } - - public Properties getProperties() { - return properties; - } - - public String getTmpDir() { - return tmpDir; - } - - public boolean isLogLevelFilterEnabled() { - return logLevelFilterEnabled; - } - - public List getIncludeDefaultLogLevels() { - return includeDefaultLogLevels; - } - - public String getConfDir() { - return confDir; - } - - public void setConfDir(String confDir) { - this.confDir = confDir; - } - - public String getConfigFiles() { - return configFiles; - } - - public void setConfigFiles(String configFiles) { - this.configFiles = configFiles; - } - - public LogEntryCacheConfig getLogEntryCacheConfig() { - return logEntryCacheConfig; - } - - public void setLogEntryCacheConfig(LogEntryCacheConfig logEntryCacheConfig) { - this.logEntryCacheConfig = logEntryCacheConfig; - } - - public InputSimulateConfig getInputSimulateConfig() { - return inputSimulateConfig; - } - - public void setInputSimulateConfig(InputSimulateConfig inputSimulateConfig) { - this.inputSimulateConfig = inputSimulateConfig; - } - - public LogFeederSecurityConfig getLogFeederSecurityConfig() { - return logFeederSecurityConfig; - } - - public void setLogFeederSecurityConfig(LogFeederSecurityConfig logFeederSecurityConfig) { - this.logFeederSecurityConfig = logFeederSecurityConfig; - } - - public String getCheckPointExtension() { - return checkPointExtension; - } - - public void setCheckPointExtension(String checkPointExtension) { - this.checkPointExtension = checkPointExtension; - } - - public String getCheckpointFolder() { - return checkpointFolder; - } - - public void setCheckpointFolder(String checkpointFolder) { - this.checkpointFolder = checkpointFolder; - } - - public boolean isSolrImplicitRouting() { - return solrImplicitRouting; - } - - public void setSolrImplicitRouting(boolean solrImplicitRouting) { - this.solrImplicitRouting = solrImplicitRouting; - } - - public boolean isDockerContainerRegistryEnabled() { - return dockerContainerRegistryEnabled; - } - - public void setDockerContainerRegistryEnabled(boolean dockerContainerRegistryEnabled) { - this.dockerContainerRegistryEnabled = dockerContainerRegistryEnabled; - } - - public boolean isUseLocalConfigs() { - return this.useLocalConfigs; - } - - public void setUseLocalConfigs(boolean useLocalConfigs) { - this.useLocalConfigs = useLocalConfigs; - } - - public boolean isSolrFilterStorage() { - return solrFilterStorage; - } - - public void setSolrFilterStorage(boolean solrFilterStorage) { - this.solrFilterStorage = solrFilterStorage; - } - - public String getSolrZkConnectString() { - return solrZkConnectString; - } - - public void setSolrZkConnectString(String solrZkConnectString) { - this.solrZkConnectString = solrZkConnectString; - } - - public boolean isSolrFilterMonitor() { - return solrFilterMonitor; - } - - public void setSolrFilterMonitor(boolean solrFilterMonitor) { - this.solrFilterMonitor = solrFilterMonitor; - } - - public String getSolrUrlsStr() { - return this.solrUrlsStr; - } - - public void setSolrUrlsStr(String solrUrlsStr) { - this.solrUrlsStr = solrUrlsStr; - } - - public boolean isZkFilterStorage() { - return zkFilterStorage; - } - - public void setZkFilterStorage(boolean zkFilterStorage) { - this.zkFilterStorage = zkFilterStorage; - } - - public String[] getSolrUrls() { - if (StringUtils.isNotBlank(this.solrUrlsStr)) { - return this.solrUrlsStr.split(","); - } - return null; - } - - @PostConstruct - public void init() { - properties = new Properties(); - MutablePropertySources propSrcs = ((AbstractEnvironment) env).getPropertySources(); - ResourcePropertySource propertySource = (ResourcePropertySource) propSrcs.get("class path resource [" + - LogFeederConstants.LOGFEEDER_PROPERTIES_FILE + "]"); - if (propertySource != null) { - Stream.of(propertySource) - .map(MapPropertySource::getPropertyNames) - .flatMap(Arrays::stream) - .forEach(propName -> properties.setProperty(propName, env.getProperty(propName))); - } else { - throw new IllegalArgumentException("Cannot find logfeeder.properties on the classpath"); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederSecurityConfig.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederSecurityConfig.java deleted file mode 100644 index faa035980ae..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederSecurityConfig.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.ArrayUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Value; - -import javax.annotation.PostConstruct; -import java.io.File; -import java.nio.charset.Charset; - -public class LogFeederSecurityConfig { - - private static final Logger LOG = LoggerFactory.getLogger(LogFeederSecurityConfig.class); - - private static final String KEYSTORE_LOCATION_ARG = "javax.net.ssl.keyStore"; - private static final String TRUSTSTORE_LOCATION_ARG = "javax.net.ssl.trustStore"; - private static final String KEYSTORE_TYPE_ARG = "javax.net.ssl.keyStoreType"; - private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType"; - private static final String KEYSTORE_PASSWORD_ARG = "javax.net.ssl.keyStorePassword"; - private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword"; - private static final String KEYSTORE_PASSWORD_PROPERTY_NAME = "logfeeder_keystore_password"; - private static final String TRUSTSTORE_PASSWORD_PROPERTY_NAME = "logfeeder_truststore_password"; - private static final String KEYSTORE_PASSWORD_FILE = "ks_pass.txt"; - private static final String TRUSTSTORE_PASSWORD_FILE = "ts_pass.txt"; - - private static final String LOGFEEDER_CERT_DEFAULT_FOLDER = "/usr/lib/ambari-logsearch-logfeeder/conf/keys"; - private static final String LOGFEEDER_STORE_DEFAULT_PASSWORD = "bigdata"; - - private static final String CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY = "hadoop.security.credential.provider.path"; - - @LogSearchPropertyDescription( - name = CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY, - description = "The jceks file that provides passwords.", - examples = {"jceks://file/etc/ambari-logsearch-logfeeder/conf/logfeeder.jceks"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY + ":}") - private String credentialStoreProviderPath; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SOLR_JAAS_FILE_PROPERTY, - description = "The jaas file used for solr.", - examples = {"/usr/lib/ambari-logsearch-logfeeder/conf/logfeeder_jaas.conf"}, - defaultValue = LogFeederConstants.DEFAULT_SOLR_JAAS_FILE, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.SOLR_JAAS_FILE_PROPERTY + ":" + LogFeederConstants.DEFAULT_SOLR_JAAS_FILE + "}") - private String solrJaasFile; - - @LogSearchPropertyDescription( - name = LogFeederConstants.SOLR_KERBEROS_ENABLE_PROPERTY, - description = "Enables using kerberos for accessing solr.", - examples = {"true"}, - defaultValue = LogFeederConstants.DEFAULT_SOLR_KERBEROS_ENABLE + "", - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${"+ LogFeederConstants.SOLR_KERBEROS_ENABLE_PROPERTY + ":" + LogFeederConstants.DEFAULT_SOLR_KERBEROS_ENABLE + "}") - private Boolean solrKerberosEnabled; - - public String getKeyStoreLocation() { - return System.getProperty(KEYSTORE_LOCATION_ARG); - } - - public String getKeyStoreType() { - return System.getProperty(KEYSTORE_TYPE_ARG); - } - - public String getKeyStorePassword() { - return System.getProperty(KEYSTORE_PASSWORD_ARG); - } - - public String getTrustStoreLocation() { - return System.getProperty(TRUSTSTORE_LOCATION_ARG); - } - - public String getTrustStoreType() { - return System.getProperty(TRUSTSTORE_TYPE_ARG); - } - - public String getTrustStorePassword() { - return System.getProperty(TRUSTSTORE_PASSWORD_ARG); - } - - public String getCredentialStoreProviderPath() { - return credentialStoreProviderPath; - } - - public void setCredentialStoreProviderPath(String credentialStoreProviderPath) { - this.credentialStoreProviderPath = credentialStoreProviderPath; - } - - public String getSolrJaasFile() { - return solrJaasFile; - } - - public void setSolrJaasFile(String solrJaasFile) { - this.solrJaasFile = solrJaasFile; - } - - public boolean isSolrKerberosEnabled() { - return solrKerberosEnabled; - } - - public void setSolrKerberosEnabled(Boolean solrKerberosEnabled) { - this.solrKerberosEnabled = solrKerberosEnabled; - } - - @PostConstruct - public void ensureStorePasswords() { - ensureStorePassword(KEYSTORE_LOCATION_ARG, KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE); - ensureStorePassword(TRUSTSTORE_LOCATION_ARG, TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE); - } - - private void ensureStorePassword(String locationArg, String pwdArg, String propertyName, String fileName) { - if (StringUtils.isNotEmpty(System.getProperty(locationArg)) && StringUtils.isEmpty(System.getProperty(pwdArg))) { - String password = getPassword(propertyName, fileName); - System.setProperty(pwdArg, password); - } - } - - private String getPassword(String propertyName, String fileName) { - String credentialStorePassword = getPasswordFromCredentialStore(propertyName); - if (credentialStorePassword != null) { - return credentialStorePassword; - } - - String filePassword = getPasswordFromFile(fileName); - if (filePassword != null) { - return filePassword; - } - - return LOGFEEDER_STORE_DEFAULT_PASSWORD; - } - - private String getPasswordFromCredentialStore(String propertyName) { - try { - if (StringUtils.isEmpty(credentialStoreProviderPath)) { - return null; - } - - org.apache.hadoop.conf.Configuration config = new org.apache.hadoop.conf.Configuration(); - config.set(CREDENTIAL_STORE_PROVIDER_PATH_PROPERTY, credentialStoreProviderPath); - char[] passwordChars = config.getPassword(propertyName); - return (ArrayUtils.isNotEmpty(passwordChars)) ? new String(passwordChars) : null; - } catch (Exception e) { - LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName)); - return null; - } - } - - private String getPasswordFromFile(String fileName) { - try { - File pwdFile = new File(LOGFEEDER_CERT_DEFAULT_FOLDER, fileName); - if (!pwdFile.exists()) { - FileUtils.writeStringToFile(pwdFile, LOGFEEDER_STORE_DEFAULT_PASSWORD, Charset.defaultCharset()); - return LOGFEEDER_STORE_DEFAULT_PASSWORD; - } else { - return FileUtils.readFileToString(pwdFile, Charset.defaultCharset()); - } - } catch (Exception e) { - LOG.warn("Exception occurred during read/write password file for keystore/truststore.", e); - return null; - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/MetricsCollectorConfig.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/MetricsCollectorConfig.java deleted file mode 100644 index 23c8a8abd84..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/MetricsCollectorConfig.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.conf; - -import com.google.common.base.Splitter; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.commons.lang.StringUtils; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Lazy; - -import javax.annotation.PostConstruct; -import java.util.List; - -@Configuration -@Lazy -public class MetricsCollectorConfig { - - @LogSearchPropertyDescription( - name = LogFeederConstants.METRICS_COLLECTOR_HOSTS_PROPERTY, - description = "Comma separtaed list of metric collector hosts.", - examples = {"c6401.ambari.apache.org,c6402.ambari.apache.org"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.METRICS_COLLECTOR_HOSTS_PROPERTY + ":}") - private String hostsString; - - private List hosts; - - @LogSearchPropertyDescription( - name = LogFeederConstants.METRICS_COLLECTOR_PROTOCOL_PROPERTY, - description = "The protocol used by metric collectors.", - examples = {"http", "https"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.METRICS_COLLECTOR_PROTOCOL_PROPERTY + ":#{NULL}}") - private String protocol; - - @LogSearchPropertyDescription( - name = LogFeederConstants.METRICS_COLLECTOR_PORT_PROPERTY, - description = "The port used by metric collectors.", - examples = {"6188"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.METRICS_COLLECTOR_PORT_PROPERTY + ":#{NULL}}") - private String port; - - @LogSearchPropertyDescription( - name = LogFeederConstants.METRICS_COLLECTOR_PATH_PROPERTY, - description = "The path used by metric collectors.", - examples = {"/ws/v1/timeline/metrics"}, - sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE} - ) - @Value("${" + LogFeederConstants.METRICS_COLLECTOR_PATH_PROPERTY + ":#{NULL}}") - private String path; - - public List getHosts() { - return hosts; - } - - public void setHosts(List hosts) { - this.hosts = hosts; - } - - public String getProtocol() { - return protocol; - } - - public String getPort() { - return port; - } - - public void setPort(String port) { - this.port = port; - } - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } - - public String getHostsString() { - return hostsString; - } - - @PostConstruct - public void init() { - if (StringUtils.isNotBlank(hostsString)) { - hosts = Splitter.on(',').splitToList(hostsString); - } else { - hosts = null; - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/DockerLogFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/DockerLogFilter.java deleted file mode 100644 index ab137759bb1..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/DockerLogFilter.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.filter; - -import org.apache.ambari.logfeeder.util.LogFeederUtil; - -import java.util.Map; - -public class DockerLogFilter { - - private DockerLogFilter() { - } - - public static String getLogFromDockerJson(String jsonInput) { - Map jsonMap = LogFeederUtil.toJSONObject(jsonInput); - return jsonMap.get("log").toString(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java deleted file mode 100644 index 5ed61ccd700..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.filter; - -import com.google.gson.reflect.TypeToken; -import oi.thekraken.grok.api.Grok; -import oi.thekraken.grok.api.exception.GrokException; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor; -import org.apache.commons.lang3.BooleanUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.regex.Pattern; - -public class FilterGrok extends Filter { - private static final Logger LOG = Logger.getLogger(FilterGrok.class); - - private static final String GROK_PATTERN_FILE = "grok-patterns"; - - private String messagePattern = null; - private String multilinePattern = null; - - private Grok grokMultiline = null; - private Grok grokMessage = null; - - private StringBuilder strBuff = null; - private String currMultilineJsonStr = null; - - private InputMarker savedInputMarker = null; - - private String sourceField = null; - private boolean removeSourceField = true; - - private Set namedParamList = new HashSet(); - private Set multiLineamedParamList = new HashSet(); - - private Type jsonType = new TypeToken>() {}.getType(); - - private MetricData grokErrorMetric = new MetricData("filter.error.grok", false); - - private boolean skipOnError = false; - - private boolean dockerEnabled = false; - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - super.init(logFeederProps); - - try { - messagePattern = escapePattern(((FilterGrokDescriptor)getFilterDescriptor()).getMessagePattern()); - multilinePattern = escapePattern(((FilterGrokDescriptor)getFilterDescriptor()).getMultilinePattern()); - sourceField = getFilterDescriptor().getSourceField(); - removeSourceField = BooleanUtils.toBooleanDefaultIfNull(getFilterDescriptor().isRemoveSourceField(), removeSourceField); - skipOnError = ((FilterGrokDescriptor) getFilterDescriptor()).isSkipOnError(); - if (logFeederProps.isDockerContainerRegistryEnabled()) { - Input input = getInput(); - if (input != null && input instanceof InputFile) { - dockerEnabled = BooleanUtils.toBooleanDefaultIfNull(((InputFileDescriptor) input.getInputDescriptor()).getDockerEnabled(), false); - } - } - - LOG.info("init() done. grokPattern=" + messagePattern + ", multilinePattern=" + multilinePattern + ", " + - getShortDescription()); - if (StringUtils.isEmpty(messagePattern)) { - LOG.error("message_pattern is not set for filter."); - return; - } - extractNamedParams(messagePattern, namedParamList); - - grokMessage = new Grok(); - loadPatterns(grokMessage); - grokMessage.compile(messagePattern); - if (((FilterGrokDescriptor)getFilterDescriptor()).isDeepExtract()) { - extractNamedParams(grokMessage.getNamedRegexCollection()); - } else { - extractNamedParams(messagePattern, namedParamList); - } - if (!StringUtils.isEmpty(multilinePattern)) { - extractNamedParams(multilinePattern, multiLineamedParamList); - - grokMultiline = new Grok(); - loadPatterns(grokMultiline); - grokMultiline.compile(multilinePattern); - } - } catch (Throwable t) { - LOG.fatal("Caught exception while initializing Grok. multilinePattern=" + multilinePattern + ", messagePattern=" - + messagePattern, t); - grokMessage = null; - grokMultiline = null; - } - - } - - private String escapePattern(String inPattern) { - String inStr = inPattern; - if (inStr != null) { - if (inStr.contains("(?m)") && !inStr.contains("(?s)")) { - inStr = inStr.replaceFirst("(?m)", "(?s)"); - } - } - return inStr; - } - - private void extractNamedParams(String patternStr, Set paramList) { - String grokRegEx = "%\\{" + - "(?" + "(?[A-z0-9]+)" + "(?::(?[A-z0-9_:]+))?" + ")" + - "(?:=(?" + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" + - "\\}"; - - Pattern pattern = Pattern.compile(grokRegEx); - java.util.regex.Matcher matcher = pattern.matcher(patternStr); - while (matcher.find()) { - String subname = matcher.group(3); - if (subname != null) { - paramList.add(subname); - } - } - } - - private void extractNamedParams(Map namedRegexCollection) { - if (namedRegexCollection != null) { - for (String paramValue : namedRegexCollection.values()) { - if (paramValue.toLowerCase().equals(paramValue)) { - namedParamList.add(paramValue); - } - } - } - } - - private boolean loadPatterns(Grok grok) { - InputStreamReader grokPatternsReader = null; - LOG.info("Loading pattern file " + GROK_PATTERN_FILE); - try { - InputStream fileInputStream = getClass().getClassLoader().getResourceAsStream(GROK_PATTERN_FILE); - if (fileInputStream == null) { - LOG.fatal("Couldn't load grok-patterns file " + GROK_PATTERN_FILE + ". Things will not work"); - return false; - } - grokPatternsReader = new InputStreamReader(fileInputStream); - } catch (Throwable t) { - LOG.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE + " from classpath. Grok filtering will not work.", t); - return false; - } - try { - grok.addPatternFromReader(grokPatternsReader); - } catch (GrokException e) { - LOG.fatal("Error loading patterns from grok-patterns reader for file " + GROK_PATTERN_FILE, e); - return false; - } - - return true; - } - - @Override - public void apply(String inputStr, InputMarker inputMarker) throws Exception { - if (dockerEnabled) { - inputStr = DockerLogFilter.getLogFromDockerJson(inputStr); - } - if (grokMessage == null) { - return; - } - - if (grokMultiline != null) { - String jsonStr = grokMultiline.capture(inputStr); - if (!"{}".equals(jsonStr) || skipOnError) { - if (strBuff != null) { - Map jsonObj = Collections.synchronizedMap(new HashMap()); - try { - LogFeederUtil.fillMapWithFieldDefaults(jsonObj, inputMarker, false); - applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr); - } finally { - strBuff = null; - savedInputMarker = null; - } - } - currMultilineJsonStr = jsonStr; - } - - if (strBuff == null) { - strBuff = new StringBuilder(); - } else { - strBuff.append("\r\n"); - } - strBuff.append(inputStr); - savedInputMarker = inputMarker; - } else { - savedInputMarker = inputMarker; - Map jsonObj = Collections.synchronizedMap(new HashMap()); - LogFeederUtil.fillMapWithFieldDefaults(jsonObj, inputMarker, false); - applyMessage(inputStr, jsonObj, null); - } - } - - @Override - public void apply(Map jsonObj, InputMarker inputMarker) throws Exception { - if (sourceField != null) { - savedInputMarker = inputMarker; - LogFeederUtil.fillMapWithFieldDefaults(jsonObj, inputMarker, false); - applyMessage((String) jsonObj.get(sourceField), jsonObj, null); - if (removeSourceField) { - jsonObj.remove(sourceField); - } - } - } - - private void applyMessage(String inputStr, Map jsonObj, String multilineJsonStr) throws Exception { - String jsonStr = grokMessage.capture(inputStr); - - boolean parseError = false; - if ("{}".equals(jsonStr) && !skipOnError) { - parseError = true; - logParseError(inputStr); - - if (multilineJsonStr == null) { - // TODO: Should we just add this as raw message in solr? - return; - } - } - - if (parseError) { - jsonStr = multilineJsonStr; - } - Map jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr, jsonType); - for (String namedParam : namedParamList) { - if (jsonSrc.get(namedParam) != null) { - jsonObj.put(namedParam, jsonSrc.get(namedParam)); - } - } - if (parseError) { - @SuppressWarnings("unchecked") - List tagsList = (List) jsonObj.get("tags"); - if (tagsList == null) { - tagsList = new ArrayList(); - jsonObj.put("tags", tagsList); - } - tagsList.add("error_grok_parsing"); - if (sourceField == null) { - // For now let's put the raw message in log_message, so it is will be searchable - jsonObj.put("log_message", inputStr); - } - } - super.apply(jsonObj, savedInputMarker); - statMetric.value++; - } - - private void logParseError(String inputStr) { - grokErrorMetric.value++; - String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR"; - int inputStrLength = inputStr != null ? inputStr.length() : 0; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStrLength + ", input=" + - getInput().getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG, - Level.WARN); - } - - @Override - public void flush() { - if (strBuff != null) { - Map jsonObj = Collections.synchronizedMap(new HashMap()); - try { - applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr); - } catch (Exception e) { - LOG.error(e.getLocalizedMessage(), e.getCause()); - } - strBuff = null; - savedInputMarker = null; - } - super.flush(); - } - - @Override - public String getShortDescription() { - return "filter:filter=grok,regex=" + messagePattern; - } - - @Override - public void addMetricsContainers(List metricsList) { - super.addMetricsContainers(metricsList); - metricsList.add(grokErrorMetric); - } - - @Override - public void logStat() { - super.logStat(); - logStatForMetric(grokErrorMetric, "Stat: Grok Errors"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java deleted file mode 100644 index 207d6f89fe1..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.filter; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.common.LogFeederException; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.util.DateUtil; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - -public class FilterJSON extends Filter { - - private static final Logger LOG = LoggerFactory.getLogger(FilterJSON.class); - - @Override - public void apply(String inputStr, InputMarker inputMarker) throws Exception { - Map jsonMap = null; - try { - jsonMap = LogFeederUtil.toJSONObject(inputStr); - } catch (Exception e) { - LOG.error(e.getLocalizedMessage()); - throw new LogFeederException("Json parsing failed for inputstr = " + inputStr ,e.getCause()); - } - Double lineNumberD = (Double) jsonMap.get("line_number"); - if (lineNumberD != null) { - long lineNumber = lineNumberD.longValue(); - jsonMap.put("line_number", lineNumber); - } - String timeStampStr = (String) jsonMap.get("logtime"); - if (timeStampStr != null && !timeStampStr.isEmpty()) { - String logtime = DateUtil.getDate(timeStampStr); - jsonMap.put("logtime", logtime); - jsonMap.put(LogFeederConstants.IN_MEMORY_TIMESTAMP, Long.parseLong(timeStampStr)); - } - super.apply(jsonMap, inputMarker); - } - - @Override - public String getShortDescription() { - return "filter:filter=json,input=" + getInput().getShortDescription(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java deleted file mode 100644 index 695c7e376d0..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.filter; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -public class FilterKeyValue extends Filter { - - private static final Logger LOG = Logger.getLogger(FilterKeyValue.class); - - private String sourceField = null; - private String valueSplit = "="; - private String fieldSplit = "\t"; - private String valueBorders = null; - - private MetricData errorMetric = new MetricData("filter.error.keyvalue", false); - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - super.init(logFeederProps); - - sourceField = getFilterDescriptor().getSourceField(); - valueSplit = StringUtils.defaultString(((FilterKeyValueDescriptor)getFilterDescriptor()).getValueSplit(), valueSplit); - fieldSplit = StringUtils.defaultString(((FilterKeyValueDescriptor)getFilterDescriptor()).getFieldSplit(), fieldSplit); - valueBorders = ((FilterKeyValueDescriptor)getFilterDescriptor()).getValueBorders(); - - LOG.info("init() done. source_field=" + sourceField + ", value_split=" + valueSplit + ", " + ", field_split=" + - fieldSplit + ", " + getShortDescription()); - if (StringUtils.isEmpty(sourceField)) { - LOG.fatal("source_field is not set for filter. Thiss filter will not be applied"); - return; - } - } - - @Override - public void apply(String inputStr, InputMarker inputMarker) throws Exception { - apply(LogFeederUtil.toJSONObject(inputStr), inputMarker); - } - - @Override - public void apply(Map jsonObj, InputMarker inputMarker) throws Exception { - if (sourceField == null) { - return; - } - if (jsonObj.containsKey(sourceField)) { - String keyValueString = (String) jsonObj.get(sourceField); - Map valueMap = new HashMap<>(); - if (valueBorders != null) { - keyValueString = preProcessBorders(keyValueString, valueMap); - } - - String splitPattern = Pattern.quote(fieldSplit); - String[] tokens = keyValueString.split(splitPattern); - for (String nv : tokens) { - String[] nameValue = getNameValue(nv); - String name = nameValue != null && nameValue.length == 2 ? nameValue[0] : null; - String value = nameValue != null && nameValue.length == 2 ? nameValue[1] : null; - if (name != null && value != null) { - if (valueMap.containsKey(value)) { - value = valueMap.get(value); - } - jsonObj.put(name, value); - } else { - logParseError("name=" + name + ", pair=" + nv + ", field=" + sourceField + ", field_value=" + keyValueString); - } - } - } - super.apply(jsonObj, inputMarker); - statMetric.value++; - } - - private String preProcessBorders(String keyValueString, Map valueMap) { - char openBorder = valueBorders.charAt(0); - char closeBorder = valueBorders.charAt(1); - - StringBuilder processed = new StringBuilder(); - int lastPos = 0; - int openBorderNum = 0; - int valueNum = 0; - for (int pos = 0; pos < keyValueString.length(); pos++) { - char c = keyValueString.charAt(pos); - if (c == openBorder) { - if (openBorderNum == 0 ) { - processed.append(keyValueString.substring(lastPos, pos)); - lastPos = pos + 1; - } - openBorderNum++; - } - if (c == closeBorder) { - openBorderNum--; - if (openBorderNum == 0) { - String value = keyValueString.substring(lastPos, pos).trim(); - String valueId = "$VALUE" + (++valueNum); - valueMap.put(valueId, value); - processed.append(valueSplit + valueId); - lastPos = pos + 1; - } - } - } - - return processed.toString(); - } - - private String[] getNameValue(String nv) { - String splitPattern = Pattern.quote(valueSplit); - return nv.split(splitPattern, 2); - } - - private void logParseError(String inputStr) { - errorMetric.value++; - String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStr.length() + ", input=" + - getInput().getShortDescription() + ". First upto 200 characters=" + StringUtils.abbreviate(inputStr, 200), null, LOG, - Level.ERROR); - } - - @Override - public String getShortDescription() { - return "filter:filter=keyvalue,regex=" + sourceField; - } - - @Override - public void addMetricsContainers(List metricsList) { - super.addMetricsContainers(metricsList); - metricsList.add(errorMetric); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java deleted file mode 100644 index 0c551cdf449..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import com.google.common.io.Files; -import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler; -import org.apache.ambari.logfeeder.common.ConfigHandler; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logsearch.config.api.LogSearchConfigLogFeeder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import java.io.File; -import java.io.FilenameFilter; -import java.nio.charset.Charset; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class InputConfigUploader extends Thread { - protected static final Logger LOG = LoggerFactory.getLogger(InputConfigUploader.class); - - private static final long SLEEP_BETWEEN_CHECK = 2000; - - private File configDir; - private final FilenameFilter inputConfigFileFilter = (dir, name) -> name.startsWith("input.config-") && name.endsWith(".json"); - private final Set filesHandled = new HashSet<>(); - private final Pattern serviceNamePattern = Pattern.compile("input.config-(.+).json"); - - @Inject - private LogSearchConfigLogFeeder config; - - @Inject - private LogFeederProps logFeederProps; - - @Inject - private LogLevelFilterHandler logLevelFilterHandler; - - @Inject - private ConfigHandler configHandler; - - public InputConfigUploader() { - super("Input Config Loader"); - setDaemon(true); - } - - @PostConstruct - public void init() throws Exception { - this.configDir = new File(logFeederProps.getConfDir()); - this.start(); - config.monitorInputConfigChanges(configHandler, logLevelFilterHandler, logFeederProps.getClusterName()); - } - - @Override - public void run() { - while (true) { - File[] inputConfigFiles = configDir.listFiles(inputConfigFileFilter); - if (inputConfigFiles != null) { - for (File inputConfigFile : inputConfigFiles) { - if (!filesHandled.contains(inputConfigFile.getAbsolutePath())) { - try { - Matcher m = serviceNamePattern.matcher(inputConfigFile.getName()); - m.find(); - String serviceName = m.group(1); - String inputConfig = Files.toString(inputConfigFile, Charset.defaultCharset()); - if (!config.inputConfigExists(serviceName)) { - config.createInputConfig(logFeederProps.getClusterName(), serviceName, inputConfig); - } - filesHandled.add(inputConfigFile.getAbsolutePath()); - } catch (Exception e) { - LOG.warn("Error handling file " + inputConfigFile.getAbsolutePath(), e); - } - } - } - } else { - LOG.warn("Cannot find input config files in config dir ({})", logFeederProps.getConfDir()); - } - - try { - Thread.sleep(SLEEP_BETWEEN_CHECK); - } catch (InterruptedException e) { - LOG.debug("Interrupted during sleep", e); - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java deleted file mode 100644 index c31f4040546..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java +++ /dev/null @@ -1,616 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import org.apache.ambari.logfeeder.conf.LogEntryCacheConfig; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.docker.DockerContainerRegistry; -import org.apache.ambari.logfeeder.docker.DockerMetadata; -import org.apache.ambari.logfeeder.input.monitor.DockerLogFileUpdateMonitor; -import org.apache.ambari.logfeeder.input.monitor.LogFileDetachMonitor; -import org.apache.ambari.logfeeder.input.monitor.LogFilePathUpdateMonitor; -import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory; -import org.apache.ambari.logfeeder.input.file.ProcessFileHelper; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.util.FileUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor; -import org.apache.commons.lang.BooleanUtils; -import org.apache.commons.lang.ObjectUtils; -import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.solr.common.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.util.*; - -public class InputFile extends Input { - - private static final Logger LOG = LoggerFactory.getLogger(InputFile.class); - - private static final boolean DEFAULT_TAIL = true; - private static final boolean DEFAULT_USE_EVENT_MD5 = false; - private static final boolean DEFAULT_GEN_EVENT_MD5 = true; - private static final int DEFAULT_CHECKPOINT_INTERVAL_MS = 5 * 1000; - - private static final int DEFAULT_DETACH_INTERVAL_MIN = 300; - private static final int DEFAULT_DETACH_TIME_MIN = 2000; - private static final int DEFAULT_LOG_PATH_UPDATE_INTERVAL_MIN = 5; - - private boolean isReady; - - private boolean tail; - - private String filePath; - private File[] logFiles; - private String logPath; - private Object fileKey; - private String base64FileKey; - private String checkPointExtension; - private int checkPointIntervalMS; - private int detachIntervalMin; - private int detachTimeMin; - private int pathUpdateIntervalMin; - private Integer maxAgeMin; - - private Map checkPointFiles = new HashMap<>(); - private Map lastCheckPointTimeMSs = new HashMap<>(); - private Map> jsonCheckPoints = new HashMap<>(); - private Map lastCheckPointInputMarkers = new HashMap<>(); - - private Thread thread; - private Thread logFileDetacherThread; - private Thread logFilePathUpdaterThread; - private Thread dockerLogFileUpdateMonitorThread; - private ThreadGroup threadGroup; - - private boolean multiFolder = false; - private boolean dockerLog = false; - private boolean dockerLogParent = true; - private DockerContainerRegistry dockerContainerRegistry; - private Map> folderMap; - private Map inputChildMap = new HashMap<>(); - - @Override - public boolean isReady() { - if (!isReady) { - if (dockerLog) { - if (dockerContainerRegistry != null) { - Map> metadataMap = dockerContainerRegistry.getContainerMetadataMap(); - String logType = getLogType(); - if (metadataMap.containsKey(logType)) { - isReady = true; - } - } else { - LOG.warn("Docker registry is not set, probably docker registry usage is not enabled."); - } - } else { - logFiles = getActualInputLogFiles(); - Map> foldersMap = FileUtil.getFoldersForFiles(logFiles); - setFolderMap(foldersMap); - if (!ArrayUtils.isEmpty(logFiles) && logFiles[0].isFile()) { - if (tail && logFiles.length > 1) { - LOG.warn("Found multiple files (" + logFiles.length + ") for the file filter " + filePath + - ". Will follow only the first one. Using " + logFiles[0].getAbsolutePath()); - } - LOG.info("File filter " + filePath + " expanded to " + logFiles[0].getAbsolutePath()); - isReady = true; - } else { - LOG.debug(logPath + " file doesn't exist. Ignoring for now"); - } - } - } - return isReady; - } - - @Override - public void setReady(boolean isReady) { - this.isReady = isReady; - } - - @Override - public String getNameForThread() { - if (filePath != null) { - try { - return (getType() + "=" + (new File(filePath)).getName()); - } catch (Throwable ex) { - LOG.warn("Couldn't get basename for filePath=" + filePath, ex); - } - } - return super.getNameForThread() + ":" + getType(); - } - - @Override - public synchronized void checkIn(InputFileMarker inputMarker) { - getInputManager().getCheckpointHandler().checkIn(this, inputMarker); - } - - @Override - public void lastCheckIn() { - for (InputFileMarker lastCheckPointInputMarker : lastCheckPointInputMarkers.values()) { - checkIn(lastCheckPointInputMarker); - } - } - - @Override - public String getStatMetricName() { - return "input.files.read_lines"; - } - - @Override - public String getReadBytesMetricName() { - return "input.files.read_bytes"; - } - - @Override - public boolean monitor() { - if (isReady()) { - if (dockerLog && dockerLogParent) { - Map> metadataMap = dockerContainerRegistry.getContainerMetadataMap(); - String logType = getLogType(); - threadGroup = new ThreadGroup("docker-parent-" + logType); - if (metadataMap.containsKey(logType)) { - Map dockerMetadataMap = metadataMap.get(logType); - for (Map.Entry dockerMetadataEntry : dockerMetadataMap.entrySet()) { - try { - startNewChildDockerInputFileThread(dockerMetadataEntry.getValue()); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - dockerLogFileUpdateMonitorThread = new Thread(new DockerLogFileUpdateMonitor((InputFile) this, pathUpdateIntervalMin, detachTimeMin), "docker_logfiles_updater=" + logType); - dockerLogFileUpdateMonitorThread.setDaemon(true); - dockerLogFileUpdateMonitorThread.start(); - } - } - else if (multiFolder) { - try { - threadGroup = new ThreadGroup(getNameForThread()); - if (getFolderMap() != null) { - for (Map.Entry> folderFileEntry : getFolderMap().entrySet()) { - startNewChildInputFileThread(folderFileEntry); - } - logFilePathUpdaterThread = new Thread(new LogFilePathUpdateMonitor((InputFile) this, pathUpdateIntervalMin, detachTimeMin), "logfile_path_updater=" + filePath); - logFilePathUpdaterThread.setDaemon(true); - logFileDetacherThread = new Thread(new LogFileDetachMonitor((InputFile) this, detachIntervalMin, detachTimeMin), "logfile_detacher=" + filePath); - logFileDetacherThread.setDaemon(true); - - logFilePathUpdaterThread.start(); - logFileDetacherThread.start(); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - } else { - LOG.info("Starting thread. " + getShortDescription()); - thread = new Thread(this, getNameForThread()); - thread.start(); - } - return true; - } else { - return false; - } - } - - @Override - public InputFileMarker getInputMarker() { - // TODO: use this - return null; - } - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - super.init(logFeederProps); - LOG.info("init() called"); - - InputFileDescriptor inputFileDescriptor = (InputFileDescriptor) getInputDescriptor(); // cast as InputS3 uses InputFileBaseDescriptor - checkPointExtension = logFeederProps.getCheckPointExtension(); - checkPointIntervalMS = (int) ObjectUtils.defaultIfNull(inputFileDescriptor.getCheckpointIntervalMs(), DEFAULT_CHECKPOINT_INTERVAL_MS); - detachIntervalMin = (int) ObjectUtils.defaultIfNull(inputFileDescriptor.getDetachIntervalMin(), DEFAULT_DETACH_INTERVAL_MIN * 60); - detachTimeMin = (int) ObjectUtils.defaultIfNull(inputFileDescriptor.getDetachTimeMin(), DEFAULT_DETACH_TIME_MIN * 60); - pathUpdateIntervalMin = (int) ObjectUtils.defaultIfNull(inputFileDescriptor.getPathUpdateIntervalMin(), DEFAULT_LOG_PATH_UPDATE_INTERVAL_MIN * 60); - maxAgeMin = (int) ObjectUtils.defaultIfNull(inputFileDescriptor.getMaxAgeMin(), 0); - boolean initDefaultFields = BooleanUtils.toBooleanDefaultIfNull(inputFileDescriptor.isInitDefaultFields(), false); - setInitDefaultFields(initDefaultFields); - - // Let's close the file and set it to true after we start monitoring it - setClosed(true); - dockerLog = BooleanUtils.toBooleanDefaultIfNull(inputFileDescriptor.getDockerEnabled(), false); - if (dockerLog) { - if (logFeederProps.isDockerContainerRegistryEnabled()) { - boolean isFileReady = isReady(); - LOG.info("Container type to monitor " + getType() + ", tail=" + tail + ", isReady=" + isFileReady); - } else { - LOG.warn("Using docker input, but docker registry usage is not enabled."); - } - } else { - logPath = getInputDescriptor().getPath(); - if (StringUtils.isEmpty(logPath)) { - LOG.error("path is empty for file input. " + getShortDescription()); - return; - } - - setFilePath(logPath); - // Check there can have pattern in folder - if (getFilePath() != null && getFilePath().contains("/")) { - int lastIndexOfSlash = getFilePath().lastIndexOf("/"); - String folderBeforeLogName = getFilePath().substring(0, lastIndexOfSlash); - if (folderBeforeLogName.contains("*")) { - LOG.info("Found regex in folder path ('" + getFilePath() + "'), will check against multiple folders."); - setMultiFolder(true); - } - } - boolean isFileReady = isReady(); - LOG.info("File to monitor " + logPath + ", tail=" + tail + ", isReady=" + isFileReady); - } - - LogEntryCacheConfig cacheConfig = logFeederProps.getLogEntryCacheConfig(); - initCache( - cacheConfig.isCacheEnabled(), - cacheConfig.getCacheKeyField(), - cacheConfig.getCacheSize(), - cacheConfig.isCacheLastDedupEnabled(), - cacheConfig.getCacheDedupInterval(), - getFilePath()); - - tail = BooleanUtils.toBooleanDefaultIfNull(getInputDescriptor().isTail(), DEFAULT_TAIL); - setUseEventMD5(BooleanUtils.toBooleanDefaultIfNull(getInputDescriptor().isUseEventMd5AsId(), DEFAULT_USE_EVENT_MD5)); - setGenEventMD5(BooleanUtils.toBooleanDefaultIfNull(getInputDescriptor().isGenEventMd5(), DEFAULT_GEN_EVENT_MD5)); - } - - @Override - public void start() throws Exception { - boolean isProcessFile = BooleanUtils.toBooleanDefaultIfNull(getInputDescriptor().getProcessFile(), true); - if (isProcessFile) { - for (int i = logFiles.length - 1; i >= 0; i--) { - File file = logFiles[i]; - if (i == 0 || !tail) { - try { - processFile(file, i == 0); - if (isClosed() || isDrain()) { - LOG.info("isClosed or isDrain. Now breaking loop."); - break; - } - } catch (Throwable t) { - LOG.error("Error processing file=" + file.getAbsolutePath(), t); - } - } - } - close(); - } else { - copyFiles(logFiles); - } - } - - public int getResumeFromLineNumber() { - return this.getInputManager().getCheckpointHandler().resumeLineNumber(this); - } - - public void processFile(File logPathFile, boolean follow) throws Exception { - ProcessFileHelper.processFile(this, logPathFile, follow); - } - - public BufferedReader openLogFile(File logFile) throws Exception { - BufferedReader br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logFile)); - fileKey = getFileKeyFromLogFile(logFile); - base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes()); - LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription()); - return br; - } - - public Object getFileKeyFromLogFile(File logFile) { - return FileUtil.getFileKey(logFile); - } - - private void copyFiles(File[] files) { - boolean isCopyFile = BooleanUtils.toBooleanDefaultIfNull(((InputFileDescriptor)getInputDescriptor()).getCopyFile(), false); - if (isCopyFile && files != null) { - for (File file : files) { - try { - InputFileMarker marker = new InputFileMarker(this, null, 0); - getOutputManager().copyFile(file, marker); - if (isClosed() || isDrain()) { - LOG.info("isClosed or isDrain. Now breaking loop."); - break; - } - } catch (Throwable t) { - LOG.error("Error processing file=" + file.getAbsolutePath(), t); - } - } - } - } - - public void startNewChildDockerInputFileThread(DockerMetadata dockerMetadata) throws CloneNotSupportedException { - LOG.info("Start docker child input thread - " + dockerMetadata.getLogPath()); - InputFile clonedObject = (InputFile) this.clone(); - clonedObject.setDockerLogParent(false); - clonedObject.logPath = dockerMetadata.getLogPath(); - clonedObject.setFilePath(logPath); - clonedObject.logFiles = new File[]{new File(dockerMetadata.getLogPath())}; - clonedObject.setInputChildMap(new HashMap<>()); - clonedObject.setDockerLogFileUpdateMonitorThread(null); - copyFilters(clonedObject, getFirstFilter()); - Thread thread = new Thread(threadGroup, clonedObject, "file=" + dockerMetadata.getLogPath()); - clonedObject.setThread(thread); - inputChildMap.put(dockerMetadata.getLogPath(), clonedObject); - thread.start(); - } - - public void stopChildDockerInputFileThread(String logPathKey) { - LOG.info("Stop child input thread - " + logPathKey); - String filePath = new File(logPathKey).getName(); - if (inputChildMap.containsKey(logPathKey)) { - InputFile inputFile = inputChildMap.get(logPathKey); - inputFile.setClosed(true); - if (inputFile.getThread() != null && inputFile.getThread().isAlive()) { - inputFile.getThread().interrupt(); - } - inputChildMap.remove(logPathKey); - } else { - LOG.warn(logPathKey + " not found as an input child."); - } - } - - public void startNewChildInputFileThread(Map.Entry> folderFileEntry) throws CloneNotSupportedException { - LOG.info("Start child input thread - " + folderFileEntry.getKey()); - InputFile clonedObject = (InputFile) this.clone(); - String folderPath = folderFileEntry.getKey(); - String filePath = new File(getFilePath()).getName(); - String fullPathWithWildCard = String.format("%s/%s", folderPath, filePath); - if (clonedObject.getMaxAgeMin() != 0 && FileUtil.isFileTooOld(new File(fullPathWithWildCard), clonedObject.getMaxAgeMin().longValue())) { - LOG.info(String.format("File ('%s') is too old (max age min: %d), monitor thread not starting...", getFilePath(), clonedObject.getMaxAgeMin())); - } else { - clonedObject.setMultiFolder(false); - clonedObject.logFiles = folderFileEntry.getValue().toArray(new File[0]); // TODO: works only with tail - clonedObject.logPath = fullPathWithWildCard; - clonedObject.setLogFileDetacherThread(null); - clonedObject.setLogFilePathUpdaterThread(null); - clonedObject.setInputChildMap(new HashMap<>()); - copyFilters(clonedObject, getFirstFilter()); - Thread thread = new Thread(threadGroup, clonedObject, "file=" + fullPathWithWildCard); - clonedObject.setThread(thread); - inputChildMap.put(fullPathWithWildCard, clonedObject); - thread.start(); - } - } - - private void copyFilters(InputFile clonedInput, Filter firstFilter) { - if (firstFilter != null) { - try { - LOG.info("Cloning filters for input=" + clonedInput.logPath); - Filter newFilter = (Filter) firstFilter.clone(); - newFilter.setInput(clonedInput); - clonedInput.setFirstFilter(newFilter); - Filter actFilter = firstFilter; - Filter actClonedFilter = newFilter; - while (actFilter != null) { - if (actFilter.getNextFilter() != null) { - actFilter = actFilter.getNextFilter(); - Filter newClonedFilter = (Filter) actFilter.clone(); - newClonedFilter.setInput(clonedInput); - actClonedFilter.setNextFilter(newClonedFilter); - actClonedFilter = newClonedFilter; - } else { - actClonedFilter.setNextFilter(null); - actFilter = null; - } - } - LOG.info("Cloning filters has finished for input=" + clonedInput.logPath); - } catch (Exception e) { - LOG.error("Could not clone filters for input=" + clonedInput.logPath); - } - } - } - - public void stopChildInputFileThread(String folderPathKey) { - LOG.info("Stop child input thread - " + folderPathKey); - String filePath = new File(getFilePath()).getName(); - String fullPathWithWildCard = String.format("%s/%s", folderPathKey, filePath); - if (inputChildMap.containsKey(fullPathWithWildCard)) { - InputFile inputFile = inputChildMap.get(fullPathWithWildCard); - inputFile.setClosed(true); - if (inputFile.getThread() != null && inputFile.getThread().isAlive()) { - inputFile.getThread().interrupt(); - } - inputChildMap.remove(fullPathWithWildCard); - } else { - LOG.warn(fullPathWithWildCard + " not found as an input child."); - } - } - - @Override - public boolean isEnabled() { - return BooleanUtils.isNotFalse(getInputDescriptor().isEnabled()); - } - - @Override - public String getShortDescription() { - return "input:source=" + getInputDescriptor().getSource() + ", path=" + - (!ArrayUtils.isEmpty(logFiles) ? logFiles[0].getAbsolutePath() : logPath); - } - - @Override - public boolean logConfigs() { - LOG.info("Printing Input=" + getShortDescription()); - LOG.info("description=" + getInputDescriptor().getPath()); - return true; - } - - @Override - public void close() { - super.close(); - LOG.info("close() calling checkPoint checkIn(). " + getShortDescription()); - lastCheckIn(); - setClosed(true); - } - - public File[] getActualInputLogFiles() { - return FileUtil.getInputFilesByPattern(logPath); - } - - public String getFilePath() { - return filePath; - } - - public void setFilePath(String filePath) { - this.filePath = filePath; - } - - public String getLogPath() { - return logPath; - } - - public Object getFileKey() { - return fileKey; - } - - public String getBase64FileKey() { - return base64FileKey; - } - - public void setFileKey(Object fileKey) { - this.fileKey = fileKey; - } - - public boolean isTail() { - return tail; - } - - public File[] getLogFiles() { - return logFiles; - } - - public void setBase64FileKey(String base64FileKey) { - this.base64FileKey = base64FileKey; - } - - public void setLogFiles(File[] logFiles) { - this.logFiles = logFiles; - } - - public String getCheckPointExtension() { - return checkPointExtension; - } - - public int getCheckPointIntervalMS() { - return checkPointIntervalMS; - } - - public Map getCheckPointFiles() { - return checkPointFiles; - } - - public Map getLastCheckPointTimeMSs() { - return lastCheckPointTimeMSs; - } - - public Map> getJsonCheckPoints() { - return jsonCheckPoints; - } - - public Map getLastCheckPointInputMarkers() { - return lastCheckPointInputMarkers; - } - - public boolean isMultiFolder() { - return multiFolder; - } - - public void setMultiFolder(boolean multiFolder) { - this.multiFolder = multiFolder; - } - - public Map> getFolderMap() { - return folderMap; - } - - public void setFolderMap(Map> folderMap) { - this.folderMap = folderMap; - } - - public Map getInputChildMap() { - return inputChildMap; - } - - public void setInputChildMap(Map inputChildMap) { - this.inputChildMap = inputChildMap; - } - - @Override - public Thread getThread() { - return thread; - } - - @Override - public void setThread(Thread thread) { - this.thread = thread; - } - - public Thread getLogFileDetacherThread() { - return logFileDetacherThread; - } - - public void setLogFileDetacherThread(Thread logFileDetacherThread) { - this.logFileDetacherThread = logFileDetacherThread; - } - - public Thread getLogFilePathUpdaterThread() { - return logFilePathUpdaterThread; - } - - public void setLogFilePathUpdaterThread(Thread logFilePathUpdaterThread) { - this.logFilePathUpdaterThread = logFilePathUpdaterThread; - } - - public Thread getDockerLogFileUpdateMonitorThread() { - return dockerLogFileUpdateMonitorThread; - } - - public void setDockerLogFileUpdateMonitorThread(Thread dockerLogFileUpdateMonitorThread) { - this.dockerLogFileUpdateMonitorThread = dockerLogFileUpdateMonitorThread; - } - - public Integer getMaxAgeMin() { - return maxAgeMin; - } - - public void setDockerContainerRegistry(DockerContainerRegistry dockerContainerRegistry) { - this.dockerContainerRegistry = dockerContainerRegistry; - } - - public DockerContainerRegistry getDockerContainerRegistry() { - return dockerContainerRegistry; - } - - public boolean isDockerLog() { - return dockerLog; - } - - public boolean isDockerLogParent() { - return dockerLogParent; - } - - public void setDockerLogParent(boolean dockerLogParent) { - this.dockerLogParent = dockerLogParent; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFileMarker.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFileMarker.java deleted file mode 100644 index 70b439e4988..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFileMarker.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; - -import java.util.HashMap; -import java.util.Map; - -public class InputFileMarker implements InputMarker { - - private final Input input; - private final String base64FileKey; - private final Integer lineNumber; - - private final Map properties = new HashMap<>(); - - public InputFileMarker(Input input, String base64FileKey, Integer lineNumber) { - this.input = input; - this.base64FileKey = base64FileKey; - this.lineNumber = lineNumber; - properties.put("line_number", lineNumber); - properties.put("file_key", base64FileKey); - } - - @Override - public Input getInput() { - return this.input; - } - - @Override - public Map getAllProperties() { - return properties; - } - - public String getBase64FileKey() { - return base64FileKey; - } - - public int getLineNumber() { - return lineNumber; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManagerImpl.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManagerImpl.java deleted file mode 100644 index a256fd77578..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManagerImpl.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.docker.DockerContainerRegistry; -import org.apache.ambari.logfeeder.docker.DockerContainerRegistryMonitor; -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.manager.InputManager; -import org.apache.log4j.Logger; - -import javax.inject.Inject; -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class InputManagerImpl extends InputManager { - - private static final Logger LOG = Logger.getLogger(InputManagerImpl.class); - - private Map> inputs = new HashMap<>(); - private Set notReadyList = new HashSet<>(); - - private boolean isDrain = false; - - private MetricData filesCountMetric = new MetricData("input.files.count", true); - - private Thread inputIsReadyMonitor; - - @Inject - private DockerContainerRegistry dockerContainerRegistry; - - @Inject - private LogFeederProps logFeederProps; - - @Inject - private CheckpointManager checkpointHandler; - - public List getInputList(String serviceName) { - return inputs.get(serviceName); - } - - @Override - public void add(String serviceName, Input input) { - List inputList = inputs.get(serviceName); - if (inputList == null) { - inputList = new ArrayList<>(); - inputs.put(serviceName, inputList); - } - inputList.add(input); - } - - @Override - public void removeInputsForService(String serviceName) { - List inputList = inputs.get(serviceName); - for (Input input : inputList) { - input.setDrain(true); - } - for (Input input : inputList) { - while (!input.isClosed()) { - try { Thread.sleep(100); } catch (InterruptedException e) {} - } - } - inputList.clear(); - inputs.remove(serviceName); - } - - @Override - public void removeInput(Input input) { - LOG.info("Trying to remove from inputList. " + input.getShortDescription()); - for (List inputList : inputs.values()) { - Iterator iter = inputList.iterator(); - while (iter.hasNext()) { - Input iterInput = iter.next(); - if (iterInput.equals(input)) { - LOG.info("Removing Input from inputList. " + input.getShortDescription()); - iter.remove(); - } - } - } - } - - private int getActiveFilesCount() { - int count = 0; - for (List inputList : inputs.values()) { - for (Input input : inputList) { - if (input.isReady()) { - count++; - } - } - } - return count; - } - - @Override - public void init() throws Exception { - checkpointHandler.init(logFeederProps); - startMonitorThread(); - startDockerMetadataThread(); - } - - - private void startDockerMetadataThread() { - if (logFeederProps.isDockerContainerRegistryEnabled()) { - Thread obtaiinDockerMetadataThread = new Thread(new DockerContainerRegistryMonitor(dockerContainerRegistry), "obtain_docker_metadata"); - obtaiinDockerMetadataThread.start(); - } - } - - private void startMonitorThread() { - inputIsReadyMonitor = new Thread("InputIsReadyMonitor") { - @Override - public void run() { - LOG.info("Going to monitor for these missing files: " + notReadyList.toString()); - while (true) { - if (isDrain) { - LOG.info("Exiting missing file monitor."); - break; - } - try { - Iterator iter = notReadyList.iterator(); - while (iter.hasNext()) { - Input input = iter.next(); - try { - if (input.isReady()) { - input.monitor(); - iter.remove(); - } - } catch (Throwable t) { - LOG.error("Error while enabling monitoring for input. " + input.getShortDescription()); - } - } - Thread.sleep(30 * 1000); - } catch (Throwable t) { - // Ignore - } - } - } - }; - - inputIsReadyMonitor.start(); - } - - public void startInputs(String serviceName) { - for (Input input : inputs.get(serviceName)) { - try { - if (input instanceof InputFile) {// apply docker metadata registry - InputFile inputFile = (InputFile) input; - inputFile.setDockerContainerRegistry(dockerContainerRegistry); - } - input.init(logFeederProps); - if (input.isReady()) { - input.monitor(); - } else { - LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " + - "So it might not be an issue. " + input.getShortDescription()); - notReadyList.add(input); - } - } catch (Exception e) { - LOG.error("Error initializing input. " + input.getShortDescription(), e); - } - } - } - - @Override - public void addToNotReady(Input notReadyInput) { - notReadyList.add(notReadyInput); - } - - @Override - public void addMetricsContainers(List metricsList) { - for (List inputList : inputs.values()) { - for (Input input : inputList) { - input.addMetricsContainers(metricsList); - } - } - filesCountMetric.value = getActiveFilesCount(); - metricsList.add(filesCountMetric); - } - - public void logStats() { - for (List inputList : inputs.values()) { - for (Input input : inputList) { - input.logStat(); - } - } - - filesCountMetric.value = getActiveFilesCount(); - // TODO: logStatForMetric(filesCountMetric, "Stat: Files Monitored Count", ""); - } - - public void waitOnAllInputs() { - //wait on inputs - for (List inputList : inputs.values()) { - for (Input input : inputList) { - if (input != null) { - Thread inputThread = input.getThread(); - if (inputThread != null) { - try { - inputThread.join(); - } catch (InterruptedException e) { - // ignore - } - } - } - } - } - // wait on monitor - if (inputIsReadyMonitor != null) { - try { - this.close(); - inputIsReadyMonitor.join(); - } catch (InterruptedException e) { - // ignore - } - } - } - - public void checkInAll() { - for (List inputList : inputs.values()) { - for (Input input : inputList) { - input.lastCheckIn(); - } - } - } - - public void close() { - for (List inputList : inputs.values()) { - for (Input input : inputList) { - try { - input.setDrain(true); - } catch (Throwable t) { - LOG.error("Error while draining. input=" + input.getShortDescription(), t); - } - } - } - isDrain = true; - - // Need to get this value from property - int iterations = 30; - int waitTimeMS = 1000; - for (int i = 0; i < iterations; i++) { - boolean allClosed = true; - for (List inputList : inputs.values()) { - for (Input input : inputList) { - if (!input.isClosed()) { - try { - allClosed = false; - LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds"); - Thread.sleep(waitTimeMS); - } catch (Throwable t) { - // Ignore - } - } - } - } - if (allClosed) { - LOG.info("All inputs are closed. Iterations=" + i); - return; - } - } - - LOG.warn("Some inputs were not closed after " + iterations + " iterations"); - for (List inputList : inputs.values()) { - for (Input input : inputList) { - if (!input.isClosed()) { - LOG.warn("Input not closed. Will ignore it." + input.getShortDescription()); - } - } - } - } - - @VisibleForTesting - public void setLogFeederProps(LogFeederProps logFeederProps) { - this.logFeederProps = logFeederProps; - } - - public LogFeederProps getLogFeederProps() { - return logFeederProps; - } - - public CheckpointManager getCheckpointHandler() { - return checkpointHandler; - } - - public void setCheckpointHandler(CheckpointManager checkpointHandler) { - this.checkpointHandler = checkpointHandler; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java deleted file mode 100644 index 41db8bd6807..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import org.apache.ambari.logfeeder.util.S3Util; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor; -import org.apache.commons.lang.ArrayUtils; -import org.apache.solr.common.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; - -public class InputS3File extends InputFile { - - private static final Logger LOG = LoggerFactory.getLogger(InputS3File.class); - - @Override - public boolean isReady() { - if (!isReady()) { - // Let's try to check whether the file is available - setLogFiles(getActualFiles(getLogPath())); - if (!ArrayUtils.isEmpty(getLogFiles())) { - if (isTail() && getLogFiles().length > 1) { - LOG.warn("Found multiple files (" + getLogFiles().length + ") for the file filter " + getFilePath() + - ". Will use only the first one. Using " + getLogFiles()[0].getAbsolutePath()); - } - LOG.info("File filter " + getFilePath() + " expanded to " + getLogFiles()[0].getAbsolutePath()); - setReady(true); - } else { - LOG.debug(getLogPath() + " file doesn't exist. Ignoring for now"); - } - } - return isReady(); - } - - private File[] getActualFiles(String searchPath) { - // TODO search file on s3 - return new File[] { new File(searchPath) }; - } - - @Override - public void start() throws Exception { - if (ArrayUtils.isEmpty(getLogFiles())) { - return; - } - for (int i = getLogFiles().length - 1; i >= 0; i--) { - File file = getLogFiles()[i]; - if (i == 0 || !isTail()) { - try { - processFile(file, i == 0); - if (isClosed() || isDrain()) { - LOG.info("isClosed or isDrain. Now breaking loop."); - break; - } - } catch (Throwable t) { - LOG.error("Error processing file=" + file.getAbsolutePath(), t); - } - } - } - close(); - } - - @Override - public BufferedReader openLogFile(File logPathFile) throws Exception { - String s3AccessKey = ((InputS3FileDescriptor)getInputDescriptor()).getS3AccessKey(); - String s3SecretKey = ((InputS3FileDescriptor)getInputDescriptor()).getS3SecretKey(); - BufferedReader br = S3Util.getReader(logPathFile.getPath(), s3AccessKey, s3SecretKey); - Object fileKey = getFileKey(logPathFile); - setFileKey(fileKey); - String base64FileKey = Base64.byteArrayToBase64(getFileKey().toString().getBytes()); - setBase64FileKey(base64FileKey); - LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription()); - return br; - } - - private Object getFileKey(File logFile) { - return logFile.getPath(); - } - - @Override - public void close() { - super.close(); - setClosed(true); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java deleted file mode 100644 index 13b00e31293..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import com.google.common.base.Joiner; -import org.apache.ambari.logfeeder.conf.InputSimulateConfig; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.filter.FilterJSON; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.FilterJsonDescriptorImpl; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputDescriptorImpl; -import org.apache.commons.collections.MapUtils; -import org.apache.solr.common.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetAddress; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.atomic.AtomicInteger; - -public class InputSimulate extends InputFile { - private static final Logger LOG = LoggerFactory.getLogger(InputSimulate.class); - private static final String LOG_TEXT_PATTERN = "{ logtime=\"%d\", level=\"%s\", log_message=\"%s\", host=\"%s\"}"; - - private static final Map typeToFilePath = new HashMap<>(); - private static final List inputTypes = new ArrayList<>(); - public static void loadTypeToFilePath(List inputList) { - for (InputDescriptor input : inputList) { - typeToFilePath.put(input.getType(), input.getPath()); - inputTypes.add(input.getType()); - } - } - - private static final Map typeToLineNumber = new HashMap<>(); - - private static final AtomicInteger hostNumber = new AtomicInteger(0); - - private static final List simulateOutputs = new ArrayList<>(); - public static List getSimulateOutputs() { - return simulateOutputs; - } - - private final Random random = new Random(System.currentTimeMillis()); - - private InputSimulateConfig conf; - private List types; - private String level; - private int numberOfWords; - private int minLogWords; - private int maxLogWords; - private long sleepMillis; - private String host; - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - super.init(logFeederProps); - conf = logFeederProps.getInputSimulateConfig(); - this.types = getSimulatedLogTypes(); - this.level = conf.getSimulateLogLevel(); - this.numberOfWords = conf.getSimulateNumberOfWords(); - this.minLogWords = conf.getSimulateMinLogWords(); - this.maxLogWords = conf.getSimulateMaxLogWords(); - this.sleepMillis = conf.getSimulateSleepMilliseconds(); - this.host = "#" + hostNumber.incrementAndGet() + "-" + LogFeederUtil.hostName; - - Filter filter = new FilterJSON(); - filter.loadConfig(new FilterJsonDescriptorImpl()); - filter.setInput(this); - addFilter(filter); - - } - - private List getSimulatedLogTypes() { - String logsToSimulate = conf.getSimulateLogIds(); - return (logsToSimulate == null) ? - inputTypes : - Arrays.asList(logsToSimulate.split(",")); - } - - @Override - public void addOutput(Output output) { - try { - Class clazz = output.getClass(); - Output outputCopy = clazz.newInstance(); - outputCopy.loadConfig(output.getConfigs()); - outputCopy.setDestination(output.getDestination()); - simulateOutputs.add(outputCopy); - super.addOutput(outputCopy); - } catch (Exception e) { - LOG.warn("Could not copy Output class " + output.getClass() + ", using original output"); - super.addOutput(output); - } - } - - @Override - public boolean isReady() { - return true; - } - - @Override - public void start() throws Exception { - getFirstFilter().setOutputManager(getOutputManager()); - while (true) { - if (types.isEmpty()) { - try { Thread.sleep(sleepMillis); } catch(Exception e) { /* Ignore */ } - continue; - } - String type = imitateRandomLogFile(); - - String line = getLine(); - InputFileMarker marker = getInputMarker(type); - - outputLine(line, marker); - - try { Thread.sleep(sleepMillis); } catch(Exception e) { /* Ignore */ } - } - } - - private String imitateRandomLogFile() { - int typePos = random.nextInt(types.size()); - String type = types.get(typePos); - String filePath = MapUtils.getString(typeToFilePath, type, "path of " + type); - - ((InputDescriptorImpl)getInputDescriptor()).setType(type); - setFilePath(filePath); - - return type; - } - - private InputFileMarker getInputMarker(String type) throws Exception { - return new InputFileMarker(this, getBase64FileKey(), getLineNumber(type)); - } - - private static synchronized int getLineNumber(String type) { - if (!typeToLineNumber.containsKey(type)) { - typeToLineNumber.put(type, 0); - } - Integer lineNumber = typeToLineNumber.get(type) + 1; - - typeToLineNumber.put(type, lineNumber); - return lineNumber; - } - - public String getBase64FileKey() { - String fileKey; - try { - fileKey = InetAddress.getLocalHost().getHostAddress() + "|" + getFilePath(); - } catch (Exception e) { - // skip - fileKey = "localhost|" + getFilePath(); - } - return Base64.byteArrayToBase64(fileKey.getBytes()); - } - - private String getLine() { - Date d = new Date(); - String logMessage = createLogMessage(); - return String.format(LOG_TEXT_PATTERN, d.getTime(), level, logMessage, host); - } - - private String createLogMessage() { - int logMessageLength = minLogWords + random.nextInt(maxLogWords - minLogWords + 1); - Set words = new TreeSet<>(); - List logMessage = new ArrayList<>(); - while (words.size() < logMessageLength) { - int word = random.nextInt(numberOfWords); - if (words.add(word)) { - logMessage.add(String.format("Word%06d", word)); - } - } - - return Joiner.on(' ').join(logMessage); - } - - @Override - public void checkIn(InputFileMarker inputMarker) {} - - @Override - public void lastCheckIn() {} - - @Override - public String getNameForThread() { - return "Simulated input"; - } - - @Override - public String getShortDescription() { - return "Simulated input"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocket.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocket.java deleted file mode 100644 index 36b43013b82..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocket.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logsearch.appender.LogsearchConversion; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputSocketDescriptor; -import org.apache.commons.lang.ObjectUtils; -import org.apache.log4j.spi.LoggingEvent; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.net.ServerSocketFactory; -import javax.net.ssl.SSLServerSocketFactory; -import java.io.BufferedInputStream; -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.io.ObjectInputStream; -import java.net.ServerSocket; -import java.net.Socket; -import java.net.SocketException; - -public class InputSocket extends Input { - - private static final Logger LOG = LoggerFactory.getLogger(InputSocket.class); - - private ServerSocket serverSocket; - private Thread thread; - private int port; - private String protocol; - private boolean secure; - private boolean log4j; - - @Override - public void init(LogFeederProps logFeederProperties) throws Exception { - super.init(logFeederProperties); - port = (int) ObjectUtils.defaultIfNull(getInputDescriptor().getPort(), 0); - if (port == 0) { - throw new IllegalArgumentException(String.format("Port needs to be set for socket input (type: %s)", getInputDescriptor().getType())); - } - - protocol = (String) ObjectUtils.defaultIfNull(getInputDescriptor().getProtocol(), "tcp"); - secure = (boolean) ObjectUtils.defaultIfNull(getInputDescriptor().isSecure(), false); - log4j = (boolean) ObjectUtils.defaultIfNull(getInputDescriptor().isLog4j(), false); - } - - @Override - public boolean monitor() { - if (isReady()) { - LOG.info("Start monitoring socket thread..."); - thread = new Thread(this, getNameForThread()); - thread.start(); - return true; - } else { - return false; - } - } - - @Override - public void start() throws Exception { - LOG.info("Starting socket server (port: {}, protocol: {}, secure: {})", port, protocol, secure); - ServerSocketFactory socketFactory = secure ? SSLServerSocketFactory.getDefault() : ServerSocketFactory.getDefault(); - InputSocketMarker inputSocketMarker = new InputSocketMarker(this, port, protocol, secure, log4j); - LogsearchConversion loggerConverter = new LogsearchConversion(); - - try { - serverSocket = socketFactory.createServerSocket(port); - while (!isDrain()) { - Socket socket = serverSocket.accept(); - if (log4j) { - try (ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(socket.getInputStream()))) { - LoggingEvent loggingEvent = (LoggingEvent) ois.readObject(); - String jsonStr = loggerConverter.createOutput(loggingEvent); - LOG.trace("Incoming socket logging event: " + jsonStr); - outputLine(jsonStr, inputSocketMarker); - } - } else { - try (BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()));) { - String line = in.readLine(); - LOG.trace("Incoming socket message: " + line); - outputLine(line, inputSocketMarker); - } - } - } - } catch (SocketException socketEx) { - LOG.warn("{}", socketEx.getMessage()); - } finally { - serverSocket.close(); - } - } - - @Override - public void setDrain(boolean drain) { - super.setDrain(drain); - LOG.info("Stopping socket input: {}", getShortDescription()); - try { - serverSocket.close(); - setClosed(true); - } catch (Exception e) { - LOG.error("Error during closing socket input.", e); - } - } - - @Override - public String getNameForThread() { - return String.format("socket=%s-%s-%s", getLogType(), this.protocol, this.port); - } - - @Override - public String getShortDescription() { - return String.format("%s - (port: %d, protocol: %s)", getLogType(), port, protocol); - } - - @Override - public boolean isReady() { - return true; - } - - @Override - public InputSocketMarker getInputMarker() { - return null; - } - - @Override - public void setReady(boolean isReady) { - } - - @Override - public void checkIn(InputSocketMarker inputMarker) { - } - - @Override - public void lastCheckIn() { - } - - @Override - public String getReadBytesMetricName() { - return null; - } - - @Override - public String getStatMetricName() { - return null; - } - - @Override - public boolean logConfigs() { - return false; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocketMarker.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocketMarker.java deleted file mode 100644 index 983cd194c07..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSocketMarker.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input; - -import org.apache.ambari.logfeeder.plugin.input.InputMarker; - -import java.util.HashMap; -import java.util.Map; - -public class InputSocketMarker implements InputMarker{ - private final InputSocket input; - private final Integer port; - private final String protocol; - private final Boolean secure; - private final Boolean log4j; - - private final Map properties = new HashMap<>(); - - public InputSocketMarker(InputSocket input, Integer port, String protocol, Boolean secure, Boolean log4j) { - this.input = input; - this.port = port; - this.protocol = protocol; - this.secure = secure; - this.log4j = log4j; - properties.put("port", port); - properties.put("secure", secure); - properties.put("protocol", protocol); - properties.put("log4j", log4j); - } - - public InputSocket getInput() { - return input; - } - - @Override - public Map getAllProperties() { - return this.properties; - } - - public Integer getPort() { - return port; - } - - public String getProtocol() { - return protocol; - } - - public Boolean isSecure() { - return secure; - } - - public Boolean isLog4j() { - return log4j; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/ProcessFileHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/ProcessFileHelper.java deleted file mode 100644 index 4ed415a735e..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/ProcessFileHelper.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.io.BufferedReader; -import java.io.File; - -public class ProcessFileHelper { - - private static final Logger LOG = Logger.getLogger(ProcessFileHelper.class); - - private ProcessFileHelper() { - } - - public static void processFile(InputFile inputFile, File logPathFile, boolean follow) throws Exception { - LOG.info("Monitoring logPath=" + inputFile.getLogPath() + ", logPathFile=" + logPathFile); - BufferedReader br = null; - - int lineCount = 0; - try { - inputFile.setFilePath(logPathFile.getAbsolutePath()); - - br = inputFile.openLogFile(logPathFile); - - boolean resume = true; - int resumeFromLineNumber = inputFile.getResumeFromLineNumber(); - if (resumeFromLineNumber > 0) { - LOG.info("Resuming log file " + logPathFile.getAbsolutePath() + " from line number " + resumeFromLineNumber); - resume = false; - } - - inputFile.setClosed(false); - int sleepStep = 2; - int sleepIteration = 0; - while (true) { - try { - if (inputFile.isDrain()) { - break; - } - - String line = br.readLine(); - if (line == null) { - if (!resume) { - resume = true; - } - sleepIteration++; - if (sleepIteration == 2) { - inputFile.flush(); - if (!follow) { - LOG.info("End of file. Done with filePath=" + logPathFile.getAbsolutePath() + ", lineCount=" + lineCount); - break; - } - } else if (sleepIteration > 4) { - Object newFileKey = inputFile.getFileKeyFromLogFile(logPathFile); - if (newFileKey != null && (inputFile.getFileKey() == null || !newFileKey.equals(inputFile.getFileKey()))) { - LOG.info("File key is different. Marking this input file for rollover. oldKey=" + inputFile.getFileKey() + ", newKey=" + - newFileKey + ". " + inputFile.getShortDescription()); - - try { - LOG.info("File is rolled over. Closing current open file." + inputFile.getShortDescription() + ", lineCount=" + - lineCount); - br.close(); - } catch (Exception ex) { - LOG.error("Error closing file" + inputFile.getShortDescription(), ex); - break; - } - - try { - LOG.info("Opening new rolled over file." + inputFile.getShortDescription()); - br = inputFile.openLogFile(logPathFile); - lineCount = 0; - } catch (Exception ex) { - LOG.error("Error opening rolled over file. " + inputFile.getShortDescription(), ex); - LOG.info("Added input to not ready list." + inputFile.getShortDescription()); - inputFile.setReady(false); - inputFile.getInputManager().addToNotReady(inputFile); - break; - } - LOG.info("File is successfully rolled over. " + inputFile.getShortDescription()); - continue; - } - } - try { - Thread.sleep(sleepStep * 1000); - sleepStep = Math.min(sleepStep * 2, 10); - } catch (InterruptedException e) { - LOG.info("Thread interrupted." + inputFile.getShortDescription()); - } - } else { - lineCount++; - sleepStep = 1; - sleepIteration = 0; - - if (!resume && lineCount > resumeFromLineNumber) { - LOG.info("Resuming to read from last line. lineCount=" + lineCount + ", input=" + inputFile.getShortDescription()); - resume = true; - } - if (resume) { - InputFileMarker marker = new InputFileMarker(inputFile, inputFile.getBase64FileKey(), lineCount); - inputFile.outputLine(line, marker); - } - } - } catch (Throwable t) { - String logMessageKey = inputFile.getClass().getSimpleName() + "_READ_LOOP_EXCEPTION"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception in read loop. lineNumber=" + lineCount + - ", input=" + inputFile.getShortDescription(), t, LOG, Level.ERROR); - } - } - } finally { - if (br != null) { - LOG.info("Closing reader." + inputFile.getShortDescription() + ", lineCount=" + lineCount); - try { - br.close(); - } catch (Throwable t) { - // ignore - } - } - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/FileCheckpointManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/FileCheckpointManager.java deleted file mode 100644 index 69c21fb820c..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/FileCheckpointManager.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file.checkpoint; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.input.file.checkpoint.util.CheckpointFileReader; -import org.apache.ambari.logfeeder.input.file.checkpoint.util.FileCheckInHelper; -import org.apache.ambari.logfeeder.input.file.checkpoint.util.FileCheckpointCleanupHelper; -import org.apache.ambari.logfeeder.input.file.checkpoint.util.ResumeLineNumberHelper; -import org.apache.ambari.logfeeder.input.monitor.CheckpointCleanupMonitor; -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.EOFException; -import java.io.File; -import java.io.IOException; -import java.util.Map; -import java.util.UUID; -import java.util.stream.Stream; - -public class FileCheckpointManager implements CheckpointManager { - - private static final Logger LOG = LoggerFactory.getLogger(FileCheckpointManager.class); - - private static final String CHECKPOINT_SUBFOLDER_NAME = "logfeeder_checkpoints"; - - private String checkPointExtension; - private File checkPointFolderFile; - - @Override - public void init(LogFeederProps logFeederProps) { - checkPointExtension = logFeederProps.getCheckPointExtension(); - LOG.info("Determining valid checkpoint folder"); - boolean isCheckPointFolderValid = false; - // We need to keep track of the files we are reading. - String checkPointFolder = logFeederProps.getCheckpointFolder(); - if (!StringUtils.isEmpty(checkPointFolder)) { - checkPointFolderFile = new File(checkPointFolder); - isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile); - } - - if (!isCheckPointFolderValid) { - // Let's use tmp folder - checkPointFolderFile = new File(logFeederProps.getTmpDir(), CHECKPOINT_SUBFOLDER_NAME); - LOG.info("Checking if tmp folder can be used for checkpoints. Folder=" + checkPointFolderFile); - isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile); - if (isCheckPointFolderValid) { - LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." + - "Please set logfeeder.checkpoint.folder property"); - } - } - - if (isCheckPointFolderValid) { - LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints"); - // check checkpoint cleanup every 2000 min - Thread checkpointCleanupThread = new Thread(new CheckpointCleanupMonitor(this, 2000),"checkpoint_cleanup"); - checkpointCleanupThread.setDaemon(true); - checkpointCleanupThread.start(); - } else { - throw new IllegalStateException("Could not determine the checkpoint folder."); - } - } - - @Override - public void checkIn(InputFile inputFile, InputFileMarker inputMarker) { - FileCheckInHelper.checkIn(inputFile, inputMarker); - } - - @Override - public int resumeLineNumber(InputFile inputFile) { - return ResumeLineNumberHelper.getResumeFromLineNumber(inputFile, checkPointFolderFile); - } - - @Override - public void cleanupCheckpoints() { - FileCheckpointCleanupHelper.cleanCheckPointFiles(checkPointFolderFile, checkPointExtension); - } - - @Override - public void printCheckpoints(String checkpointLocation, String logTypeFilter, String fileKeyFilter) throws IOException { - System.out.println(String.format("Searching checkpoint files in '%s' folder ... (list)", checkpointLocation)); - File[] files = CheckpointFileReader.getFiles(new File(checkpointLocation), ".cp"); - if (files != null) { - for (File file : files) { - String fileNameTitle = String.format("file name: %s", file.getName()); - StringBuilder strBuilder = new StringBuilder(fileNameTitle.length()); - String[] splitted = file.getName().split("-"); - String logtType = ""; - String fileKey = ""; - if (splitted.length > 1) { - logtType = splitted[0]; - fileKey = splitted[1].replace(getFileExtension(), ""); - } else { - fileKey = file.getName().replace(getFileExtension(), ""); - } - if (checkFilter(logtType, logTypeFilter) || checkFilter(fileKey, fileKeyFilter)) { - continue; - } - Stream.generate(() -> '-').limit(fileNameTitle.length()).forEach(strBuilder::append); - String border = strBuilder.toString(); - System.out.println(border); - System.out.println(String.format("file name: %s", file.getName())); - System.out.println(border); - if (org.apache.commons.lang.StringUtils.isNotBlank(logtType)) { - System.out.println(String.format("log_type: %s", logtType)); - } - Map checkpointJson = CheckpointFileReader.getCheckpointObject(file); - for (Map.Entry entry : checkpointJson.entrySet()) { - System.out.println(String.format("%s: %s", entry.getKey(), entry.getValue())); - } - System.out.print("\n"); - } - } - } - - @Override - public void cleanCheckpoint(String checkpointLocation, String logTypeFilter, String fileKeyFilter, boolean all) { - System.out.println(String.format("Searching checkpoint files in '%s' folder ... (clean)", checkpointLocation)); - File[] files = CheckpointFileReader.getFiles(new File(checkpointLocation), ".cp"); - if (files != null) { - for (File file : files) { - String logtType = getLogTypeFromFileName(file.getName()); - String fileKey = getFileKeyFromFileName(file.getName()); - if (checkFilter(logtType, logTypeFilter) || checkFilter(fileKey, fileKeyFilter)) { - continue; - } - if (!all && logTypeFilter == null && fileKeyFilter == null) { - throw new IllegalArgumentException("It is required to use a filter for clean: --all, --log-type or --file-key "); - } - - if (all || logTypeFilter != null && logTypeFilter.equals(logtType) || - fileKeyFilter != null && fileKeyFilter.equals(fileKey)) { - System.out.println(String.format("Deleting checkpoint file - filename: %s, key: %s, log_type: %s", file.getAbsolutePath(), fileKey, logtType)); - file.delete(); - } - } - } - } - - private boolean verifyCheckPointFolder(File folderPathFile) { - if (!folderPathFile.exists()) { - try { - if (!folderPathFile.mkdir()) { - LOG.warn("Error creating folder for check point. folder=" + folderPathFile); - } - } catch (Throwable t) { - LOG.warn("Error creating folder for check point. folder=" + folderPathFile, t); - } - } - - if (folderPathFile.exists() && folderPathFile.isDirectory()) { - // Let's check whether we can create a file - File testFile = new File(folderPathFile, UUID.randomUUID().toString()); - try { - testFile.createNewFile(); - return testFile.delete(); - } catch (IOException e) { - LOG.warn("Couldn't create test file in " + folderPathFile.getAbsolutePath() + " for checkPoint", e); - } - } - return false; - } - - private boolean checkFilter(String actualValue, String filterValue) { - return (org.apache.commons.lang.StringUtils.isNotBlank(actualValue) && org.apache.commons.lang.StringUtils.isNotBlank(filterValue) && - !actualValue.equals(filterValue)); - } - - private String getFileExtension() { - return this.checkPointExtension == null ? ".cp" : this.checkPointExtension; - } - - private String getFileKeyFromFileName(String fileName) { - String[] splitted = fileName.split("-"); - String fileKeyResult = splitted.length > 1 ? splitted[1] : splitted[0]; - return fileKeyResult.replace(getFileExtension(), ""); - } - - private String getLogTypeFromFileName(String fileName) { - String[] splitted = fileName.split("-"); - String logTypeResult = ""; - if (splitted.length > 1) { - logTypeResult = splitted[0]; - } - return logTypeResult; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/CheckpointFileReader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/CheckpointFileReader.java deleted file mode 100644 index dd35d073178..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/CheckpointFileReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file.checkpoint.util; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.reflect.TypeToken; -import org.apache.commons.io.filefilter.WildcardFileFilter; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.lang.reflect.Type; -import java.util.Map; - -public class CheckpointFileReader { - - private CheckpointFileReader() { - } - - public static File[] getFiles(File checkPointFolderFile, String checkPointExtension) { - String searchPath = "*" + checkPointExtension; - FileFilter fileFilter = new WildcardFileFilter(searchPath); - return checkPointFolderFile.listFiles(fileFilter); - } - - public static Map getCheckpointObject(File checkPointFile) throws IOException { - final Map jsonCheckPoint; - try (RandomAccessFile checkPointReader = new RandomAccessFile(checkPointFile, "r")) { - int contentSize = checkPointReader.readInt(); - byte b[] = new byte[contentSize]; - int readSize = checkPointReader.read(b, 0, contentSize); - if (readSize != contentSize) { - throw new IllegalArgumentException("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read=" - + readSize + ", checkPointFile=" + checkPointFile); - } else { - String jsonCheckPointStr = new String(b, 0, readSize); - Gson gson = new GsonBuilder().create(); - Type type = new TypeToken>() {}.getType(); - jsonCheckPoint = gson.fromJson(jsonCheckPointStr, type); - } - } - return jsonCheckPoint; - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckInHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckInHelper.java deleted file mode 100644 index b217e349e18..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckInHelper.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file.checkpoint.util; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.util.FileUtil; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.io.File; -import java.io.RandomAccessFile; -import java.util.Date; -import java.util.Map; - -public class FileCheckInHelper { - - private static final Logger LOG = Logger.getLogger(FileCheckInHelper.class); - - private FileCheckInHelper() { - } - - public static void checkIn(InputFile inputFile, InputFileMarker inputMarker) { - try { - Map jsonCheckPoint = inputFile.getJsonCheckPoints().get(inputMarker.getBase64FileKey()); - File checkPointFile = inputFile.getCheckPointFiles().get(inputMarker.getBase64FileKey()); - - int lineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number"); - if (lineNumber > inputMarker.getLineNumber()) { - // Already wrote higher line number for this input - return; - } - // If interval is greater than last checkPoint time, then write - long currMS = System.currentTimeMillis(); - long lastCheckPointTimeMs = inputFile.getLastCheckPointTimeMSs().containsKey(inputMarker.getBase64FileKey()) ? - inputFile.getLastCheckPointTimeMSs().get(inputMarker.getBase64FileKey()) : 0; - if (!inputFile.isClosed() && (currMS - lastCheckPointTimeMs < inputFile.getCheckPointIntervalMS())) { - // Let's save this one so we can update the check point file on flush - inputFile.getLastCheckPointInputMarkers().put(inputMarker.getBase64FileKey(), inputMarker); - return; - } - inputFile.getLastCheckPointTimeMSs().put(inputMarker.getBase64FileKey(), currMS); - - if (inputFile.getMaxAgeMin() != 0) { - jsonCheckPoint.put("max_age_min", inputFile.getMaxAgeMin().toString()); - } - jsonCheckPoint.put("line_number", "" + new Integer(inputMarker.getLineNumber())); - jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS)); - jsonCheckPoint.put("last_write_time_date", new Date()); - - String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint); - - File tmpCheckPointFile = new File(checkPointFile.getAbsolutePath() + ".tmp"); - if (tmpCheckPointFile.exists()) { - tmpCheckPointFile.delete(); - } - RandomAccessFile tmpRaf = new RandomAccessFile(tmpCheckPointFile, "rws"); - tmpRaf.writeInt(jsonStr.length()); - tmpRaf.write(jsonStr.getBytes()); - tmpRaf.getFD().sync(); - tmpRaf.close(); - - FileUtil.move(tmpCheckPointFile, checkPointFile); - - if (inputFile.isClosed()) { - String logMessageKey = inputFile.getClass().getSimpleName() + "_FINAL_CHECKIN"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Wrote final checkPoint, input=" + inputFile.getShortDescription() + - ", checkPointFile=" + checkPointFile.getAbsolutePath() + ", checkPoint=" + jsonStr, null, LOG, Level.INFO); - } - } catch (Throwable t) { - String logMessageKey = inputFile.getClass().getSimpleName() + "_CHECKIN_EXCEPTION"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception checkIn. , input=" + inputFile.getShortDescription(), t, - LOG, Level.ERROR); - } - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckpointCleanupHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckpointCleanupHelper.java deleted file mode 100644 index 91b5383b63d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/FileCheckpointCleanupHelper.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file.checkpoint.util; - -import org.apache.ambari.logfeeder.util.FileUtil; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.solr.common.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.EOFException; -import java.io.File; -import java.io.RandomAccessFile; -import java.util.Map; - -public class FileCheckpointCleanupHelper { - - private static final Logger LOG = LoggerFactory.getLogger(FileCheckpointCleanupHelper.class); - - private FileCheckpointCleanupHelper() { - } - - public static void cleanCheckPointFiles(File checkPointFolderFile, String checkPointExtension) { - if (checkPointFolderFile == null) { - LOG.info("Will not clean checkPoint files. checkPointFolderFile=null"); - return; - } - LOG.info("Cleaning checkPoint files. checkPointFolderFile=" + checkPointFolderFile.getAbsolutePath()); - try { - // Loop over the check point files and if filePath is not present, then move to closed - File[] checkPointFiles = CheckpointFileReader.getFiles(checkPointFolderFile, checkPointExtension); - int totalCheckFilesDeleted = 0; - if (checkPointFiles != null) { - for (File checkPointFile : checkPointFiles) { - if (checkCheckPointFile(checkPointFile)) { - totalCheckFilesDeleted++; - } - } - LOG.info("Deleted " + totalCheckFilesDeleted + " checkPoint file(s). checkPointFolderFile=" + - checkPointFolderFile.getAbsolutePath()); - } - } catch (Throwable t) { - LOG.error("Error while cleaning checkPointFiles", t); - } - } - - private static boolean checkCheckPointFile(File checkPointFile) { - boolean deleted = false; - try (RandomAccessFile checkPointReader = new RandomAccessFile(checkPointFile, "r")) { - int contentSize = checkPointReader.readInt(); - byte b[] = new byte[contentSize]; - int readSize = checkPointReader.read(b, 0, contentSize); - if (readSize != contentSize) { - LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read=" - + readSize + ", checkPointFile=" + checkPointFile); - } else { - String jsonCheckPointStr = new String(b, 0, readSize); - Map jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr); - - String logFilePath = (String) jsonCheckPoint.get("file_path"); - String logFileKey = (String) jsonCheckPoint.get("file_key"); - Integer maxAgeMin = null; - if (jsonCheckPoint.containsKey("max_age_min")) { - maxAgeMin = Integer.parseInt(jsonCheckPoint.get("max_age_min").toString()); - } - if (logFilePath != null && logFileKey != null) { - boolean deleteCheckPointFile = false; - File logFile = new File(logFilePath); - if (logFile.exists()) { - Object fileKeyObj = FileUtil.getFileKey(logFile); - String fileBase64 = Base64.byteArrayToBase64(fileKeyObj.toString().getBytes()); - if (!logFileKey.equals(fileBase64)) { - LOG.info("CheckPoint clean: File key has changed. old=" + logFileKey + ", new=" + fileBase64 + ", filePath=" + - logFilePath + ", checkPointFile=" + checkPointFile.getAbsolutePath()); - deleteCheckPointFile = !wasFileRenamed(logFile.getParentFile(), logFileKey); - } else if (maxAgeMin != null && maxAgeMin != 0 && FileUtil.isFileTooOld(logFile, maxAgeMin)) { - deleteCheckPointFile = true; - LOG.info("Checkpoint clean: File reached max age minutes (" + maxAgeMin + "):" + logFilePath); - } - } else { - LOG.info("CheckPoint clean: Log file doesn't exist. filePath=" + logFilePath + ", checkPointFile=" + - checkPointFile.getAbsolutePath()); - deleteCheckPointFile = !wasFileRenamed(logFile.getParentFile(), logFileKey); - } - if (deleteCheckPointFile) { - LOG.info("Deleting CheckPoint file=" + checkPointFile.getAbsolutePath() + ", logFile=" + logFilePath); - checkPointFile.delete(); - deleted = true; - } - } - } - } catch (EOFException eof) { - LOG.warn("Caught EOFException. Ignoring reading existing checkPoint file. " + checkPointFile); - } catch (Throwable t) { - LOG.error("Error while checking checkPoint file. " + checkPointFile, t); - } - - return deleted; - } - - private static boolean wasFileRenamed(File folder, String searchFileBase64) { - for (File file : folder.listFiles()) { - Object fileKeyObj = FileUtil.getFileKey(file); - String fileBase64 = Base64.byteArrayToBase64(fileKeyObj.toString().getBytes()); - if (searchFileBase64.equals(fileBase64)) { - // even though the file name in the checkpoint file is different from the one it was renamed to, checkpoint files are - // identified by their name, which is generated from the file key, which would be the same for the renamed file - LOG.info("CheckPoint clean: File key matches file " + file.getAbsolutePath() + ", it must have been renamed"); - return true; - } - } - return false; - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/ResumeLineNumberHelper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/ResumeLineNumberHelper.java deleted file mode 100644 index 664fa4f8d79..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/file/checkpoint/util/ResumeLineNumberHelper.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.file.checkpoint.util; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.EOFException; -import java.io.File; -import java.io.RandomAccessFile; -import java.util.HashMap; -import java.util.Map; - -public class ResumeLineNumberHelper { - - private static final Logger LOG = LoggerFactory.getLogger(ResumeLineNumberHelper.class); - - private ResumeLineNumberHelper() { - } - - public static int getResumeFromLineNumber(InputFile inputFile, File checkPointFolder) { - int resumeFromLineNumber = 0; - - File checkPointFile = null; - try { - LOG.info("Checking existing checkpoint file. " + inputFile.getShortDescription()); - - String checkPointFileName = getCheckpointFileName(inputFile); - checkPointFile = new File(checkPointFolder, checkPointFileName); - inputFile.getCheckPointFiles().put(inputFile.getBase64FileKey(), checkPointFile); - Map jsonCheckPoint = null; - if (!checkPointFile.exists()) { - LOG.info("Checkpoint file for log file " + inputFile.getFilePath() + " doesn't exist, starting to read it from the beginning"); - } else { - try (RandomAccessFile checkPointWriter = new RandomAccessFile(checkPointFile, "rw")) { - int contentSize = checkPointWriter.readInt(); - byte b[] = new byte[contentSize]; - int readSize = checkPointWriter.read(b, 0, contentSize); - if (readSize != contentSize) { - LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read=" + - readSize + ", checkPointFile=" + checkPointFile + ", input=" + inputFile.getShortDescription()); - } else { - String jsonCheckPointStr = new String(b, 0, readSize); - jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr); - - resumeFromLineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number"); - - LOG.info("CheckPoint. checkPointFile=" + checkPointFile + ", json=" + jsonCheckPointStr + - ", resumeFromLineNumber=" + resumeFromLineNumber); - } - } catch (EOFException eofEx) { - LOG.info("EOFException. Will reset checkpoint file " + checkPointFile.getAbsolutePath() + " for " + - inputFile.getShortDescription(), eofEx); - } - } - if (jsonCheckPoint == null) { - // This seems to be first time, so creating the initial checkPoint object - jsonCheckPoint = new HashMap(); - jsonCheckPoint.put("file_path", inputFile.getFilePath()); - jsonCheckPoint.put("file_key", inputFile.getBase64FileKey()); - } - - inputFile.getJsonCheckPoints().put(inputFile.getBase64FileKey(), jsonCheckPoint); - - } catch (Throwable t) { - LOG.error("Error while configuring checkpoint file. Will reset file. checkPointFile=" + checkPointFile, t); - } - - return resumeFromLineNumber; - } - - private static String getCheckpointFileName(InputFile inputFile) { - return String.format("%s-%s%s", inputFile.getLogType(), - inputFile.getBase64FileKey(), inputFile.getCheckPointExtension()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/AbstractLogFileMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/AbstractLogFileMonitor.java deleted file mode 100644 index e0acde12c33..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/AbstractLogFileMonitor.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.monitor; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class AbstractLogFileMonitor implements Runnable { - - private Logger LOG = LoggerFactory.getLogger(AbstractLogFileMonitor.class); - - private final InputFile inputFile; - private final int waitInterval; - private final int detachTime; - - AbstractLogFileMonitor(InputFile inputFile, int waitInterval, int detachTime) { - this.inputFile = inputFile; - this.waitInterval = waitInterval; - this.detachTime = detachTime; - } - - public InputFile getInputFile() { - return inputFile; - } - - public int getDetachTime() { - return detachTime; - } - - @Override - public void run() { - LOG.info(getStartLog()); - - while (!Thread.currentThread().isInterrupted()) { - try { - Thread.sleep(1000 * waitInterval); - monitorAndUpdate(); - } catch (Exception e) { - LOG.error("Monitor thread interrupted.", e); - } - } - } - - protected abstract String getStartLog(); - - protected abstract void monitorAndUpdate() throws Exception; -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/CheckpointCleanupMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/CheckpointCleanupMonitor.java deleted file mode 100644 index 45404c47e7a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/CheckpointCleanupMonitor.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.monitor; - -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CheckpointCleanupMonitor implements Runnable { - - private static final Logger LOG = LoggerFactory.getLogger(CheckpointCleanupMonitor.class); - - private long waitIntervalMin; - private CheckpointManager checkpointHandler; - - public CheckpointCleanupMonitor(CheckpointManager checkpointHandler, long waitIntervalMin) { - this.waitIntervalMin = waitIntervalMin; - this.checkpointHandler = checkpointHandler; - } - - @Override - public void run() { - while (!Thread.currentThread().isInterrupted()) { - try { - Thread.sleep(1000 * 60 * waitIntervalMin); - checkpointHandler.cleanupCheckpoints(); - } catch (Exception e) { - LOG.error("Cleanup checkpoint files thread interrupted.", e); - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/DockerLogFileUpdateMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/DockerLogFileUpdateMonitor.java deleted file mode 100644 index 027582723d4..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/DockerLogFileUpdateMonitor.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.monitor; - -import org.apache.ambari.logfeeder.docker.DockerContainerRegistry; -import org.apache.ambari.logfeeder.docker.DockerMetadata; -import org.apache.ambari.logfeeder.input.InputFile; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -/** - * Periodically check docker containers metadata registry, stop monitoring container log files if those do not exist or stopped too long time ago. - * If it finds a new container log for the specific type, it will start to monitoring it. - *
- * Use cases:
- * - input has not monitored yet - found new container -> start monitoring it
- * - input has not monitored yet - found new stopped container -> start monitoring it
- * - input has not monitored yet - found new stopped container but log is too old -> do not monitoring it
- * - input has monitored already - container stopped - if it's stopped for too long time -> remove it from the monitoed list
- * - input has monitored already - container stopped - log is not too old -> keep in the monitored list
- * - input has monitored already - container does not exist - remove it from the monitoed list (and all other input with the same log type)
- */ -public class DockerLogFileUpdateMonitor extends AbstractLogFileMonitor { - - private Logger LOG = LoggerFactory.getLogger(DockerLogFileUpdateMonitor.class); - - public DockerLogFileUpdateMonitor(InputFile inputFile, int waitInterval, int detachTime) { - super(inputFile, waitInterval, detachTime); - } - - @Override - protected String getStartLog() { - return "Start docker component type log files monitor thread for " + getInputFile().getLogType(); - } - - @Override - protected void monitorAndUpdate() throws Exception { - DockerContainerRegistry dockerContainerRegistry = getInputFile().getDockerContainerRegistry(); - Map> dockerMetadataMapByType = dockerContainerRegistry.getContainerMetadataMap(); - String logType = getInputFile().getLogType(); - Map copiedChildMap = new HashMap<>(getInputFile().getInputChildMap()); - - if (dockerMetadataMapByType.containsKey(logType)) { - Map dockerMetadataMap = dockerMetadataMapByType.get(logType); - for (Map.Entry containerEntry : dockerMetadataMap.entrySet()) { - String logPath = containerEntry.getValue().getLogPath(); - String containerId = containerEntry.getValue().getId(); - long timestamp = containerEntry.getValue().getTimestamp(); - boolean running = containerEntry.getValue().isRunning(); - LOG.debug("Found log path: {} (container id: {})", logPath, containerId); - if (!copiedChildMap.containsKey(logPath)) { - if (!running && isItTooOld(timestamp, new Date().getTime(), getDetachTime())) { - LOG.debug("Container with id {} is stopped, won't monitor as it stopped for long time.", containerId); - } else { - LOG.info("Found new container (id: {}) with new log path: {}", logPath, containerId); - getInputFile().startNewChildDockerInputFileThread(containerEntry.getValue()); - } - } else { - if (!running && isItTooOld(timestamp, new Date().getTime(), getDetachTime())) { - LOG.info("Removing: {}", logPath); - getInputFile().stopChildDockerInputFileThread(containerEntry.getKey()); - } - } - } - } else { - if (!copiedChildMap.isEmpty()) { - LOG.info("Removing all inputs with type: {}", logType); - for (Map.Entry inputFileEntry : copiedChildMap.entrySet()) { - LOG.info("Removing: {}", inputFileEntry.getKey()); - getInputFile().stopChildDockerInputFileThread(inputFileEntry.getKey()); - } - } - } - } - - private boolean isItTooOld(long timestamp, long actualTimestamp, long maxDiffMinutes) { - long diff = actualTimestamp - timestamp; - long maxDiffMins = maxDiffMinutes * 1000 * 60; - return diff > maxDiffMins; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFileDetachMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFileDetachMonitor.java deleted file mode 100644 index a40e118a7a1..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFileDetachMonitor.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.monitor; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.util.FileUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Detach log files in case of folders do not exist or monitored files are too old - */ -public class LogFileDetachMonitor extends AbstractLogFileMonitor { - - private Logger LOG = LoggerFactory.getLogger(LogFileDetachMonitor.class); - - public LogFileDetachMonitor(InputFile inputFile, int interval, int detachTime) { - super(inputFile, interval, detachTime); - } - - @Override - public String getStartLog() { - return "Start file detach monitor thread for " + getInputFile().getFilePath(); - } - - @Override - protected void monitorAndUpdate() throws Exception { - File[] logFiles = getInputFile().getActualInputLogFiles(); - Map> actualFolderMap = FileUtil.getFoldersForFiles(logFiles); - - // create map copies - Map copiedInputFileMap = new HashMap<>(getInputFile().getInputChildMap()); - Map> copiedFolderMap = new HashMap<>(getInputFile().getFolderMap()); - // detach old entries - for (Map.Entry> entry : copiedFolderMap.entrySet()) { - if (new File(entry.getKey()).exists()) { - for (Map.Entry inputFileEntry : copiedInputFileMap.entrySet()) { - if (inputFileEntry.getKey().startsWith(entry.getKey())) { - File monitoredFile = entry.getValue().get(0); - boolean isFileTooOld = FileUtil.isFileTooOld(monitoredFile, getDetachTime()); - if (isFileTooOld) { - LOG.info("File ('{}') in folder ('{}') is too old (reached {} minutes), detach input thread.", entry.getKey(), getDetachTime()); - getInputFile().stopChildInputFileThread(entry.getKey()); - } - } - } - } else { - LOG.info("Folder not exists. ({}) Stop thread.", entry.getKey()); - for (Map.Entry inputFileEntry : copiedInputFileMap.entrySet()) { - if (inputFileEntry.getKey().startsWith(entry.getKey())) { - getInputFile().stopChildInputFileThread(entry.getKey()); - getInputFile().setFolderMap(actualFolderMap); - } - } - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFilePathUpdateMonitor.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFilePathUpdateMonitor.java deleted file mode 100644 index bfcab5dd641..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/monitor/LogFilePathUpdateMonitor.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.monitor; - -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.util.FileUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.List; -import java.util.Map; - -/** - * Update log file paths periodically, useful if the log file name pattern format is like: mylog-2017-10-09.log (so the tail one can change) - */ -public class LogFilePathUpdateMonitor extends AbstractLogFileMonitor { - - private Logger LOG = LoggerFactory.getLogger(LogFilePathUpdateMonitor.class); - - public LogFilePathUpdateMonitor(InputFile inputFile, int interval, int detachTime) { - super(inputFile, interval, detachTime); - } - - @Override - public String getStartLog() { - return "Start file path update monitor thread for " + getInputFile().getFilePath(); - } - - @Override - protected void monitorAndUpdate() throws Exception { - File[] logFiles = getInputFile().getActualInputLogFiles(); - Map> foldersMap = FileUtil.getFoldersForFiles(logFiles); - Map> originalFoldersMap = getInputFile().getFolderMap(); - for (Map.Entry> entry : foldersMap.entrySet()) { - if (originalFoldersMap.keySet().contains(entry.getKey())) { - List originalLogFiles = originalFoldersMap.get(entry.getKey()); - if (!entry.getValue().isEmpty()) { // check tail only for now - File lastFile = entry.getValue().get(0); - if (!originalLogFiles.get(0).getAbsolutePath().equals(lastFile.getAbsolutePath())) { - LOG.info("New file found (old: '{}', new: {}), reload thread for {}", - lastFile.getAbsolutePath(), originalLogFiles.get(0).getAbsolutePath(), entry.getKey()); - getInputFile().stopChildInputFileThread(entry.getKey()); - getInputFile().startNewChildInputFileThread(entry); - } - } - } else { - LOG.info("New log file folder found: {}, start a new thread if tail file is not too old.", entry.getKey()); - File monitoredFile = entry.getValue().get(0); - if (FileUtil.isFileTooOld(monitoredFile, getDetachTime())) { - LOG.info("'{}' file is too old. No new thread start needed.", monitoredFile.getAbsolutePath()); - } else { - getInputFile().startNewChildInputFileThread(entry); - } - } - } - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java deleted file mode 100644 index 7f78fd1ba60..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.reader; - -import org.apache.log4j.Logger; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.zip.GZIPInputStream; - -class GZIPReader extends InputStreamReader { - - private static final Logger LOG = Logger.getLogger(GZIPReader.class); - - GZIPReader(String fileName) throws FileNotFoundException { - super(getStream(fileName)); - LOG.info("Created GZIPReader for file : " + fileName); - } - - private static InputStream getStream(String fileName) { - InputStream gzipStream = null; - InputStream fileStream = null; - try { - fileStream = new FileInputStream(fileName); - gzipStream = new GZIPInputStream(fileStream); - } catch (Exception e) { - LOG.error(e, e.getCause()); - } - return gzipStream; - } - - /** - * validating file based on magic number - */ - static boolean isValidFile(String fileName) { - // TODO make it generic and put in factory itself - - try (InputStream is = new FileInputStream(fileName)) { - byte[] signature = new byte[2]; - int nread = is.read(signature); // read the gzip signature - return nread == 2 && signature[0] == (byte) 0x1f && signature[1] == (byte) 0x8b; - } catch (IOException e) { - return false; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java deleted file mode 100644 index b9393aaa32c..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.reader; - -import org.apache.log4j.Logger; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.Reader; - -public enum LogsearchReaderFactory { - INSTANCE; - private static final Logger LOG = Logger.getLogger(LogsearchReaderFactory.class); - - public Reader getReader(File file) throws FileNotFoundException { - LOG.debug("Inside reader factory for file:" + file); - if (GZIPReader.isValidFile(file.getAbsolutePath())) { - LOG.info("Reading file " + file + " as gzip file"); - return new GZIPReader(file.getAbsolutePath()); - } else { - return new FileReader(file); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/loglevelfilter/LogLevelFilterHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/loglevelfilter/LogLevelFilterHandler.java deleted file mode 100644 index ab35f03fc09..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/loglevelfilter/LogLevelFilterHandler.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.loglevelfilter; - -import com.google.gson.Gson; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.LogLevelFilterMonitor; -import org.apache.ambari.logsearch.config.api.LogSearchConfig; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.zookeeper.LogLevelFilterManagerZK; -import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZKHelper; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.cache.TreeCache; -import org.apache.curator.framework.recipes.cache.TreeCacheListener; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.TreeMap; -import java.util.concurrent.ConcurrentHashMap; - -public class LogLevelFilterHandler implements LogLevelFilterMonitor { - private static final Logger LOG = LoggerFactory.getLogger(LogLevelFilterHandler.class); - - private static final String TIMEZONE = "GMT"; - private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; - - private static final boolean DEFAULT_VALUE = true; - - private static ThreadLocal formatter = new ThreadLocal() { - protected DateFormat initialValue() { - SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT); - dateFormat.setTimeZone(TimeZone.getTimeZone(TIMEZONE)); - return dateFormat; - } - }; - - @Inject - private LogFeederProps logFeederProps; - - private LogSearchConfig config; - private Map filters = new ConcurrentHashMap<>(); - - // Use these 2 only if local config is used with zk log level filter storage - private TreeCache clusterCache = null; - private TreeCacheListener listener = null; - - public LogLevelFilterHandler(LogSearchConfig config) { - this.config = config; - } - - @PostConstruct - public void init() throws Exception { - TimeZone.setDefault(TimeZone.getTimeZone(TIMEZONE)); - if (logFeederProps.isZkFilterStorage() && logFeederProps.isUseLocalConfigs()) { - LogLevelFilterManagerZK filterManager = (LogLevelFilterManagerZK) config.getLogLevelFilterManager(); - CuratorFramework client = filterManager.getClient(); - client.start(); - Gson gson = filterManager.getGson(); - LogSearchConfigZKHelper.waitUntilRootAvailable(client); - TreeCache clusterCache = LogSearchConfigZKHelper.createClusterCache(client, logFeederProps.getClusterName()); - TreeCacheListener listener = LogSearchConfigZKHelper.createTreeCacheListener( - logFeederProps.getClusterName(), gson, this); - LogSearchConfigZKHelper.addAndStartListenersOnCluster(clusterCache, listener); - } - if (config.getLogLevelFilterManager() != null) { - TreeMap sortedFilters = config.getLogLevelFilterManager() - .getLogLevelFilters(logFeederProps.getClusterName()) - .getFilter(); - filters = new ConcurrentHashMap<>(sortedFilters); - } - } - - @Override - public void setLogLevelFilter(String logId, LogLevelFilter logLevelFilter) { - synchronized (LogLevelFilterHandler.class) { - filters.put(logId, logLevelFilter); - } - } - - @Override - public void removeLogLevelFilter(String logId) { - synchronized (LogLevelFilterHandler.class) { - filters.remove(logId); - } - } - - @Override - public Map getLogLevelFilters() { - return filters; - } - - public boolean isAllowed(String hostName, String logId, String level, List defaultLogLevels) { - if (!logFeederProps.isLogLevelFilterEnabled()) { - return true; - } - - LogLevelFilter logFilter = findLogFilter(logId, defaultLogLevels); - List allowedLevels = getAllowedLevels(hostName, logFilter); - return allowedLevels.isEmpty() || allowedLevels.contains(level); - } - - public boolean isAllowed(String jsonBlock, InputMarker inputMarker, List defaultLogLevels) { - if (org.apache.commons.lang3.StringUtils.isEmpty(jsonBlock)) { - return DEFAULT_VALUE; - } - Map jsonObj = LogFeederUtil.toJSONObject(jsonBlock); - return isAllowed(jsonObj, inputMarker, defaultLogLevels); - } - - public boolean isAllowed(Map jsonObj, InputMarker inputMarker, List defaultLogLevels) { - if ("audit".equals(inputMarker.getInput().getInputDescriptor().getRowtype())) - return true; - - boolean isAllowed = applyFilter(jsonObj, defaultLogLevels); - if (!isAllowed) { - LOG.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj)); - } - return isAllowed; - } - - - public boolean applyFilter(Map jsonObj, List defaultLogLevels) { - if (MapUtils.isEmpty(jsonObj)) { - LOG.warn("Output jsonobj is empty"); - return DEFAULT_VALUE; - } - - String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST); - String logId = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT); - String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL); - if (org.apache.commons.lang3.StringUtils.isNotBlank(hostName) && org.apache.commons.lang3.StringUtils.isNotBlank(logId) && org.apache.commons.lang3.StringUtils.isNotBlank(level)) { - return isAllowed(hostName, logId, level, defaultLogLevels); - } else { - return DEFAULT_VALUE; - } - } - - private synchronized LogLevelFilter findLogFilter(String logId, List defaultLogLevels) { - LogLevelFilter logFilter = filters.get(logId); - if (logFilter != null) { - return logFilter; - } - - LOG.info("Filter is not present for log " + logId + ", creating default filter"); - LogLevelFilter defaultFilter = new LogLevelFilter(); - defaultFilter.setLabel(logId); - defaultFilter.setDefaultLevels(defaultLogLevels); - - try { - config.getLogLevelFilterManager().createLogLevelFilter(logFeederProps.getClusterName(), logId, defaultFilter); - filters.put(logId, defaultFilter); - } catch (Exception e) { - LOG.warn("Could not persist the default filter for log " + logId, e); - } - - return defaultFilter; - } - - private List getAllowedLevels(String hostName, LogLevelFilter componentFilter) { - String componentName = componentFilter.getLabel(); - List hosts = componentFilter.getHosts(); - List defaultLevels = componentFilter.getDefaultLevels(); - List overrideLevels = componentFilter.getOverrideLevels(); - Date expiryTime = componentFilter.getExpiryTime(); - - // check is user override or not - if (expiryTime != null || CollectionUtils.isNotEmpty(overrideLevels) || CollectionUtils.isNotEmpty(hosts)) { - if (CollectionUtils.isEmpty(hosts)) { // hosts list is empty or null consider it apply on all hosts - hosts.add(LogFeederConstants.ALL); - } - - if (hosts.isEmpty() || hosts.contains(hostName)) { - if (isFilterExpired(componentFilter)) { - LOG.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at " + - componentFilter.getExpiryTime()); - return defaultLevels; - } else { - return overrideLevels; - } - } - } - return defaultLevels; - } - - private boolean isFilterExpired(LogLevelFilter logLevelFilter) { - if (logLevelFilter == null) - return false; - - Date filterEndDate = logLevelFilter.getExpiryTime(); - if (filterEndDate == null) { - return false; - } - - Date currentDate = new Date(); - if (!currentDate.before(filterEndDate)) { - LOG.debug("Filter for Component :" + logLevelFilter.getLabel() + " and Hosts : [" + - StringUtils.join(logLevelFilter.getHosts(), ',') + "] is expired because of filter endTime : " + - formatter.get().format(filterEndDate) + " is older than currentTime :" + formatter.get().format(currentDate)); - return true; - } else { - return false; - } - } - - public void setLogFeederProps(LogFeederProps logFeederProps) { - this.logFeederProps = logFeederProps; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperAnonymize.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperAnonymize.java deleted file mode 100644 index 8c0fc72deec..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperAnonymize.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import com.google.common.base.Splitter; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapAnonymizeDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.commons.lang.CharUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.util.Map; - -public class MapperAnonymize extends Mapper { - private static final Logger LOG = Logger.getLogger(MapperAnonymize.class); - - private static final char DEFAULT_HIDE_CHAR = '*'; - - private String pattern; - private Iterable patternParts; - private char hideChar; - - @Override - public boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor) { - init(inputDesc, fieldName, mapClassCode); - - pattern = ((MapAnonymizeDescriptor)mapFieldDescriptor).getPattern(); - if (StringUtils.isEmpty(pattern)) { - LOG.fatal("pattern is empty."); - return false; - } - - patternParts = Splitter.on("").omitEmptyStrings().split(pattern); - hideChar = CharUtils.toChar(((MapAnonymizeDescriptor)mapFieldDescriptor).getHideChar(), DEFAULT_HIDE_CHAR); - - return true; - } - - @Override - public Object apply(Map jsonObj, Object value) { - if (value != null) { - try { - hide((String)value, jsonObj); - } catch (Throwable t) { - LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", "Error applying anonymization." + - " pattern=" + pattern + ", hideChar=" + hideChar, t, LOG, Level.ERROR); - } - } - return value; - } - - private void hide(String value, Map jsonObj) { - StringBuilder sb = new StringBuilder(); - boolean first = true; - String rest = value; - for (String patternPart : patternParts) { - int pos = rest.indexOf(patternPart); - if (pos == -1) { - return; - } - - int end = pos + patternPart.length(); - if (first) { - if (pattern.startsWith("")) { - String beginning = rest.substring(0, pos); - int spacePos = beginning.lastIndexOf(" "); - if (spacePos == -1) { - sb.append(StringUtils.repeat(hideChar, beginning.length())); - } else { - sb.append(beginning.substring(0, spacePos+1)); - sb.append(StringUtils.repeat(hideChar, beginning.length() - spacePos - 1)); - } - sb.append(rest.substring(pos, end)); - } else { - sb.append(rest.substring(0, end)); - } - first = false; - } else { - sb.append(StringUtils.repeat(hideChar, pos)); - sb.append(rest.substring(pos, end)); - } - rest = rest.substring(end); - } - - if (pattern.endsWith("")) { - int spacePos = rest.indexOf(" "); - if (spacePos == -1) { - sb.append(StringUtils.repeat(hideChar, rest.length())); - rest = ""; - } else { - sb.append(StringUtils.repeat(hideChar, spacePos)); - rest = rest.substring(spacePos); - } - } - - sb.append(rest); - - jsonObj.put(getFieldName(), sb.toString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java deleted file mode 100644 index 150869b3b1c..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapDateDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.commons.lang.time.DateUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.FastDateFormat; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.text.ParseException; -import java.util.Calendar; -import java.util.Date; -import java.util.Map; - -public class MapperDate extends Mapper { - private static final Logger LOG = Logger.getLogger(MapperDate.class); - - private FastDateFormat targetDateFormatter = null; - private boolean isEpoch = false; - private FastDateFormat srcDateFormatter=null; - - @Override - public boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor) { - init(inputDesc, fieldName, mapClassCode); - - String targetDateFormat = ((MapDateDescriptor)mapFieldDescriptor).getTargetDatePattern(); - String srcDateFormat = ((MapDateDescriptor)mapFieldDescriptor).getSourceDatePattern(); - if (StringUtils.isEmpty(targetDateFormat)) { - LOG.fatal("Date format for map is empty. " + this); - } else { - LOG.info("Date mapper format is " + targetDateFormat); - - if (targetDateFormat.equalsIgnoreCase("epoch")) { - isEpoch = true; - return true; - } else { - try { - targetDateFormatter = FastDateFormat.getInstance(targetDateFormat); - if (!StringUtils.isEmpty(srcDateFormat)) { - srcDateFormatter = FastDateFormat.getInstance(srcDateFormat); - } - return true; - } catch (Throwable ex) { - LOG.fatal("Error creating date format. format=" + targetDateFormat + ". " + this.toString()); - } - } - } - return false; - } - - @Override - public Object apply(Map jsonObj, Object value) { - if (value != null) { - try { - if (isEpoch) { - long ms = Long.parseLong(value.toString()) * 1000; - value = new Date(ms); - jsonObj.put(LogFeederConstants.IN_MEMORY_TIMESTAMP, ((Date) value).getTime()); - } else if (targetDateFormatter != null) { - if (srcDateFormatter != null) { - Date srcDate = getSourceDate(value); - value = targetDateFormatter.format(srcDate); - jsonObj.put(LogFeederConstants.IN_MEMORY_TIMESTAMP, srcDate.getTime()); - } else { - value = targetDateFormatter.parse(value.toString()); - jsonObj.put(LogFeederConstants.IN_MEMORY_TIMESTAMP, ((Date) value).getTime()); - } - } else { - return value; - } - jsonObj.put(getFieldName(), value); - } catch (Throwable t) { - LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", "Error applying date transformation." + - " isEpoch=" + isEpoch + ", targetDateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.getPattern():"") - + ", value=" + value + ". " + this.toString(), t, LOG, Level.ERROR); - } - } - return value; - } - - private Date getSourceDate(Object value) throws ParseException { - Date srcDate = srcDateFormatter.parse(value.toString()); - - Calendar currentCalendar = Calendar.getInstance(); - - if (!srcDateFormatter.getPattern().contains("dd")) { - //set year/month/date in src_date when src_date does not have date component - srcDate = DateUtils.setYears(srcDate, currentCalendar.get(Calendar.YEAR)); - srcDate = DateUtils.setMonths(srcDate, currentCalendar.get(Calendar.MONTH)); - srcDate = DateUtils.setDays(srcDate, currentCalendar.get(Calendar.DAY_OF_MONTH)); - // if with the current date the time stamp is after the current one, it must be previous day - if (srcDate.getTime() > currentCalendar.getTimeInMillis()) { - srcDate = DateUtils.addDays(srcDate, -1); - } - } else if (!srcDateFormatter.getPattern().contains("yy")) { - //set year in src_date when src_date does not have year component - srcDate = DateUtils.setYears(srcDate, currentCalendar.get(Calendar.YEAR)); - // if with the current year the time stamp is after the current one, it must be previous year - if (srcDate.getTime() > currentCalendar.getTimeInMillis()) { - srcDate = DateUtils.addYears(srcDate, -1); - } - } - - return srcDate; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java deleted file mode 100644 index bbb63379730..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldCopyDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; - -import java.util.Map; - -/** - * Overrides the value for the field - */ -public class MapperFieldCopy extends Mapper { - private static final Logger LOG = Logger.getLogger(MapperFieldCopy.class); - - private String copyName = null; - - @Override - public boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor) { - init(inputDesc, fieldName, mapClassCode); - copyName = ((MapFieldCopyDescriptor)mapFieldDescriptor).getCopyName(); - if (StringUtils.isEmpty(copyName)) { - LOG.fatal("Map copy name is empty."); - return false; - } - return true; - } - - @Override - public Object apply(Map jsonObj, Object value) { - jsonObj.put(copyName, value); - return value; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java deleted file mode 100644 index 2b1f70f8342..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldNameDescriptor; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.util.Map; - -/** - * Overrides the value for the field - */ -public class MapperFieldName extends Mapper { - private static final Logger LOG = Logger.getLogger(MapperFieldName.class); - - private String newValue = null; - - @Override - public boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor) { - init(inputDesc, fieldName, mapClassCode); - - newValue = ((MapFieldNameDescriptor)mapFieldDescriptor).getNewFieldName(); - if (StringUtils.isEmpty(newValue)) { - LOG.fatal("Map field value is empty."); - return false; - } - return true; - } - - @Override - public Object apply(Map jsonObj, Object value) { - if (newValue != null) { - jsonObj.remove(getFieldName()); - jsonObj.put(newValue, value); - } else { - LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", - "New fieldName is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR); - } - return value; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java deleted file mode 100644 index e3d49244835..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.filter.mapper.Mapper; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldValueDescriptor; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.util.Map; - -/** - * Overrides the value for the field - */ -public class MapperFieldValue extends Mapper { - private static final Logger LOG = Logger.getLogger(MapperFieldValue.class); - - private String prevValue = null; - private String newValue = null; - - @Override - public boolean init(String inputDesc, String fieldName, String mapClassCode, MapFieldDescriptor mapFieldDescriptor) { - init(inputDesc, fieldName, mapClassCode); - - prevValue = ((MapFieldValueDescriptor)mapFieldDescriptor).getPreValue(); - newValue = ((MapFieldValueDescriptor)mapFieldDescriptor).getPostValue();; - if (StringUtils.isEmpty(newValue)) { - LOG.fatal("Map field value is empty."); - return false; - } - return true; - } - - @Override - public Object apply(Map jsonObj, Object value) { - if (newValue != null && prevValue != null) { - if (prevValue.equalsIgnoreCase(value.toString())) { - value = newValue; - jsonObj.put(getFieldName(), value); - } - } else { - LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", - "New value is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR); - } - return value; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java deleted file mode 100644 index 0ccdff34c2a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.metrics; - -import org.apache.ambari.logfeeder.conf.LogFeederSecurityConfig; -import org.apache.ambari.logfeeder.conf.MetricsCollectorConfig; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import org.apache.log4j.Logger; - -import java.util.Collection; -import java.util.List; - -// TODO: Refactor for failover -public class LogFeederAMSClient extends AbstractTimelineMetricsSink { - private static final Logger LOG = Logger.getLogger(LogFeederAMSClient.class); - - private final List collectorHosts; - private final String collectorProtocol; - private final String collectorPort; - private final String collectorPath; - - public LogFeederAMSClient(MetricsCollectorConfig metricsCollectorConfig, LogFeederSecurityConfig securityConfig) { - String collectorHostsString = metricsCollectorConfig.getHostsString(); - if (!StringUtils.isBlank(collectorHostsString)) { - collectorHostsString = collectorHostsString.trim(); - LOG.info("AMS collector Hosts=" + collectorHostsString); - - collectorHosts = metricsCollectorConfig.getHosts(); - collectorProtocol = metricsCollectorConfig.getProtocol(); - collectorPort = metricsCollectorConfig.getPort(); - collectorPath = metricsCollectorConfig.getPath(); - } else { - collectorHosts = null; - collectorProtocol = null; - collectorPort = null; - collectorPath = null; - } - - if (StringUtils.isNotBlank(securityConfig.getTrustStoreLocation())) { - loadTruststore(securityConfig.getTrustStoreLocation(), securityConfig.getTrustStoreType(), securityConfig.getTrustStorePassword()); - } - } - - @Override - public String getCollectorUri(String host) { - if (collectorProtocol == null || host == null || collectorPort == null || collectorPath == null) { - return null; - } - return String.format("%s://%s:%s%s", collectorProtocol, host, collectorPort, collectorPath); - } - - @Override - protected int getTimeoutSeconds() { - // TODO: Hard coded timeout - return 10; - } - - @Override - protected String getZookeeperQuorum() { - return null; - } - - @Override - protected Collection getConfiguredCollectorHosts() { - return collectorHosts; - } - - @Override - protected String getHostname() { - return null; - } - - @Override - protected boolean isHostInMemoryAggregationEnabled() { - return false; - } - - @Override - protected int getHostInMemoryAggregationPort() { - return 0; - } - - @Override - protected String getHostInMemoryAggregationProtocol() { - return "http"; - } - - @Override - protected boolean emitMetrics(TimelineMetrics metrics) { - return super.emitMetrics(metrics); - } - - @Override - protected String getCollectorProtocol() { - return collectorProtocol; - } - - @Override - protected String getCollectorPort() { - return collectorPort; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java deleted file mode 100644 index f5bc0eb6189..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.metrics; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.TreeMap; - -import org.apache.ambari.logfeeder.conf.LogFeederSecurityConfig; -import org.apache.ambari.logfeeder.conf.MetricsCollectorConfig; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import org.apache.log4j.Logger; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; - -public class MetricsManager { - private static final Logger LOG = Logger.getLogger(MetricsManager.class); - - private boolean isMetricsEnabled = false; - private String appId = "logfeeder"; - - private long lastPublishTimeMS = 0; // Let's do the first publish immediately - private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock - - private int publishIntervalMS = 60 * 1000; - private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep the metrics in memory forever - private HashMap metricsMap = new HashMap<>(); - private LogFeederAMSClient amsClient = null; - - @Inject - private MetricsCollectorConfig metricsCollectorConfig; - - @Inject - private LogFeederSecurityConfig logFeederSecurityConfig; - - @PostConstruct - public void init() { - LOG.info("Initializing MetricsManager()"); - if (amsClient == null) { - amsClient = new LogFeederAMSClient(metricsCollectorConfig, logFeederSecurityConfig); - } - - if (amsClient.getCollectorUri(null) != null) { - if (LogFeederUtil.hostName == null) { - isMetricsEnabled = false; - LOG.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics"); - } else { - isMetricsEnabled = true; - LOG.info("LogFeeder Metrics is enabled. Metrics host=" + amsClient.getCollectorUri(null)); - } - } else { - LOG.info("LogFeeder Metrics publish is disabled"); - } - } - - public boolean isMetricsEnabled() { - return isMetricsEnabled; - } - - public synchronized void useMetrics(List metricsList) { - if (!isMetricsEnabled) { - return; - } - LOG.info("useMetrics() metrics.size=" + metricsList.size()); - long currMS = System.currentTimeMillis(); - - gatherMetrics(metricsList, currMS); - publishMetrics(currMS); - } - - private void gatherMetrics(List metricsList, long currMS) { - Long currMSLong = new Long(currMS); - for (MetricData metric : metricsList) { - if (metric.metricsName == null) { - LOG.debug("metric.metricsName is null"); - continue; - } - long currCount = metric.value; - if (!metric.isPointInTime && metric.publishCount > 0 && currCount <= metric.prevPublishValue) { - LOG.debug("Nothing changed. " + metric.metricsName + ", currCount=" + currCount + ", prevPublishCount=" + - metric.prevPublishValue); - continue; - } - metric.publishCount++; - - LOG.debug("Ensuring metrics=" + metric.metricsName); - TimelineMetric timelineMetric = metricsMap.get(metric.metricsName); - if (timelineMetric == null) { - LOG.debug("Creating new metric obbject for " + metric.metricsName); - timelineMetric = new TimelineMetric(); - timelineMetric.setMetricName(metric.metricsName); - timelineMetric.setHostName(LogFeederUtil.hostName); - timelineMetric.setAppId(appId); - timelineMetric.setStartTime(currMS); - timelineMetric.setType("Long"); - timelineMetric.setMetricValues(new TreeMap()); - - metricsMap.put(metric.metricsName, timelineMetric); - } - - LOG.debug("Adding metrics=" + metric.metricsName); - if (metric.isPointInTime) { - timelineMetric.getMetricValues().put(currMSLong, new Double(currCount)); - } else { - Double value = timelineMetric.getMetricValues().get(currMSLong); - if (value == null) { - value = new Double(0); - } - value += (currCount - metric.prevPublishValue); - timelineMetric.getMetricValues().put(currMSLong, value); - metric.prevPublishValue = currCount; - } - } - } - - private void publishMetrics(long currMS) { - if (!metricsMap.isEmpty() && currMS - lastPublishTimeMS > publishIntervalMS) { - try { - TimelineMetrics timelineMetrics = new TimelineMetrics(); - timelineMetrics.setMetrics(new ArrayList(metricsMap.values())); - amsClient.emitMetrics(timelineMetrics); - - LOG.info("Published " + timelineMetrics.getMetrics().size() + " metrics to AMS"); - metricsMap.clear(); - lastPublishTimeMS = currMS; - } catch (Throwable t) { - LOG.warn("Error sending metrics to AMS.", t); - if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) { - LOG.error("AMS was not sent for last " + maxMetricsBuffer / 1000 + - " seconds. Purging it and will start rebuilding it again"); - metricsMap.clear(); - lastFailedPublishTimeMS = currMS; - } - } - } else { - LOG.info("Not publishing metrics. metrics.size()=" + metricsMap.size() + ", lastPublished=" + - (currMS - lastPublishTimeMS) / 1000 + " seconds ago, intervalConfigured=" + publishIntervalMS / 1000); - } - } - - public void setAmsClient(LogFeederAMSClient amsClient) { - this.amsClient = amsClient; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/StatsLogger.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/StatsLogger.java deleted file mode 100644 index 91de1d83d99..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/StatsLogger.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.metrics; - -import org.apache.ambari.logfeeder.common.ConfigHandler; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import java.util.ArrayList; -import java.util.List; - -public class StatsLogger extends Thread { - - private static final Logger LOG = LoggerFactory.getLogger(StatsLogger.class); - - private static final int CHECKPOINT_CLEAN_INTERVAL_MS = 24 * 60 * 60 * 60 * 1000; // 24 hours - - private long lastCheckPointCleanedMS = 0; - - @Inject - private ConfigHandler configHandler; - - @Inject - private MetricsManager metricsManager; - - public StatsLogger() { - super("statLogger"); - setDaemon(true); - } - - @PostConstruct - public void init() { - this.start(); - } - - @Override - public void run() { - while (true) { - try { - Thread.sleep(30 * 1000); - } catch (Throwable t) { - // Ignore - } - try { - logStats(); - } catch (Throwable t) { - LOG.error("LogStats: Caught exception while logging stats.", t); - } - - if (System.currentTimeMillis() > (lastCheckPointCleanedMS + CHECKPOINT_CLEAN_INTERVAL_MS)) { - lastCheckPointCleanedMS = System.currentTimeMillis(); - configHandler.cleanCheckPointFiles(); - } - } - } - - private void logStats() { - configHandler.logStats(); - if (metricsManager.isMetricsEnabled()) { - List metricsList = new ArrayList(); - configHandler.addMetrics(metricsList); - metricsManager.useMetrics(metricsList); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java deleted file mode 100644 index 5cde6db6ec4..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.plugin.input.InputMarker; - -import java.util.Map; - -/** - * This contains the output json object and InputMarker. - */ -public class OutputData { - public final Map jsonObj; - public final InputMarker inputMarker; - - public OutputData(Map jsonObj, InputMarker inputMarker) { - this.jsonObj = jsonObj; - this.inputMarker = inputMarker; - } - - @Override - public String toString() { - return "OutputData [jsonObj=" + jsonObj + ", inputMarker=" + inputMarker + "]"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java deleted file mode 100644 index 2113cbd527d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.log4j.Logger; - -import java.io.File; - -/** - * Output that just ignore the logs - */ -public class OutputDevNull extends Output { - - private static final Logger LOG = Logger.getLogger(OutputDevNull.class); - - @Override - public void write(String block, InputMarker inputMarker){ - LOG.trace("Ignore log block: " + block); - } - - @Override - public Long getPendingCount() { - return 0L; - } - - @Override - public String getWriteBytesMetricName() { - return "write:devnull"; - } - - @Override - public String getOutputType() { - return "devnull"; - } - - @Override - public void close() { - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) { - throw new UnsupportedOperationException("copyFile method is not yet supported for output=dev_null"); - } - - @Override - public void init(LogFeederProps LogFeederProps) throws Exception { - } - - @Override - public String getShortDescription() { - return "write:devnull"; - } - - @Override - public String getStatMetricName() { - return "write:devnull"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java deleted file mode 100644 index 850daaff10d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.Map; - -public class OutputFile extends Output { - private static final Logger LOG = Logger.getLogger(OutputFile.class); - - private PrintWriter outWriter; - private String filePath = null; - private String codec; - private LogFeederProps logFeederProps; - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - this.logFeederProps = logFeederProps; - filePath = getStringValue("path"); - if (StringUtils.isEmpty(filePath)) { - LOG.error("Filepath config property is not set in config file."); - return; - } - codec = getStringValue("codec"); - if (StringUtils.isBlank(codec)) { - codec = "json"; - } else { - if (codec.trim().equalsIgnoreCase("csv")) { - codec = "csv"; - } else if (codec.trim().equalsIgnoreCase("json")) { - codec = "csv"; - } else { - LOG.error("Unsupported codec type. codec=" + codec + ", will use json"); - codec = "json"; - } - } - LOG.info("Out filePath=" + filePath + ", codec=" + codec); - File outFile = new File(filePath); - if (outFile.getParentFile() != null) { - File parentDir = outFile.getParentFile(); - if (!parentDir.isDirectory()) { - parentDir.mkdirs(); - } - } - - outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile, true))); - - LOG.info("init() is successfull. filePath=" + outFile.getAbsolutePath()); - } - - @Override - public void close() { - LOG.info("Closing file." + getShortDescription()); - if (outWriter != null) { - try { - outWriter.close(); - } catch (Throwable t) { - // Ignore this exception - } - } - setClosed(true); - } - - @Override - public void write(Map jsonObj, InputFileMarker inputMarker) throws Exception { - String outStr = null; - CSVPrinter csvPrinter = null; - try { - if (codec.equals("csv")) { - csvPrinter = new CSVPrinter(outWriter, CSVFormat.RFC4180); - //TODO: - } else { - outStr = LogFeederUtil.getGson().toJson(jsonObj); - } - if (outWriter != null && outStr != null) { - statMetric.value++; - - outWriter.println(outStr); - outWriter.flush(); - } - } finally { - if (csvPrinter != null) { - try { - csvPrinter.close(); - } catch (IOException e) { - } - } - } - } - - @Override - synchronized public void write(String block, InputFileMarker inputMarker) throws Exception { - if (outWriter != null && block != null) { - statMetric.value++; - - outWriter.println(block); - outWriter.flush(); - } - } - - @Override - public Long getPendingCount() { - return null; - } - - @Override - public String getWriteBytesMetricName() { - return "output.kafka.write_bytes"; - } - - @Override - public String getShortDescription() { - return "output:destination=file,path=" + filePath; - } - - @Override - public String getStatMetricName() { - return "output.file.write_logs"; - } - - @Override - public String getOutputType() { - throw new IllegalStateException("This method should be overriden if the Output wants to monitor the configuration"); - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException { - throw new UnsupportedOperationException("copyFile method is not yet supported for output=file"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java deleted file mode 100644 index 03669fe9ed4..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.output.spool.LogSpooler; -import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext; -import org.apache.ambari.logfeeder.output.spool.RolloverCondition; -import org.apache.ambari.logfeeder.output.spool.RolloverHandler; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederHDFSUtil; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logfeeder.util.PlaceholderUtil; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.fs.FileSystem; -import org.apache.log4j.Logger; - -import java.io.File; -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; -import java.util.concurrent.ConcurrentLinkedQueue; - -/** - * An {@link Output} that records logs to HDFS. - * - * The events are spooled on the local file system and uploaded in batches asynchronously. - */ -public class OutputHDFSFile extends Output implements RolloverHandler, RolloverCondition { - private static final Logger LOG = Logger.getLogger(OutputHDFSFile.class); - - private static final long DEFAULT_ROLLOVER_THRESHOLD_TIME_SECONDS = 5 * 60L;// 5 min by default - - private ConcurrentLinkedQueue localReadyFiles = new ConcurrentLinkedQueue(); - - private final Object readyMonitor = new Object(); - - private Thread hdfsCopyThread = null; - - private String filenamePrefix = "service-logs-"; - private long rolloverThresholdTimeMillis; - - private String hdfsOutDir = null; - private String hdfsHost = null; - private String hdfsPort = null; - private FileSystem fileSystem = null; - - private LogSpooler logSpooler; - - private LogFeederProps logFeederProps; - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - this.logFeederProps = logFeederProps; - hdfsOutDir = getStringValue("hdfs_out_dir"); - hdfsHost = getStringValue("hdfs_host"); - hdfsPort = getStringValue("hdfs_port"); - long rolloverThresholdTimeSeconds = getLongValue("rollover_sec", DEFAULT_ROLLOVER_THRESHOLD_TIME_SECONDS); - rolloverThresholdTimeMillis = rolloverThresholdTimeSeconds * 1000L; - filenamePrefix = getStringValue("file_name_prefix", filenamePrefix); - if (StringUtils.isEmpty(hdfsOutDir)) { - LOG.error("HDFS config property is not set in config file."); - return; - } - if (StringUtils.isEmpty(hdfsHost)) { - LOG.error("HDFS config property is not set in config file."); - return; - } - if (StringUtils.isEmpty(hdfsPort)) { - LOG.error("HDFS config property is not set in config file."); - return; - } - HashMap contextParam = buildContextParam(); - hdfsOutDir = PlaceholderUtil.replaceVariables(hdfsOutDir, contextParam); - LOG.info("hdfs Output dir=" + hdfsOutDir); - String localFileDir = logFeederProps.getTmpDir() + "hdfs/service/"; - logSpooler = new LogSpooler(localFileDir, filenamePrefix, this, this); - this.startHDFSCopyThread(); - } - - @Override - public void close() { - LOG.info("Closing file." + getShortDescription()); - logSpooler.rollover(); - this.stopHDFSCopyThread(); - setClosed(true); - } - - @Override - public synchronized void write(String block, InputFileMarker inputMarker) throws Exception { - if (block != null) { - logSpooler.add(block); - statMetric.value++; - } - } - - - @Override - public String getShortDescription() { - return "output:destination=hdfs,hdfsOutDir=" + hdfsOutDir; - } - - private void startHDFSCopyThread() { - - hdfsCopyThread = new Thread("hdfsCopyThread") { - @Override - public void run() { - try { - while (true) { - Iterator localFileIterator = localReadyFiles.iterator(); - while (localFileIterator.hasNext()) { - File localFile = localFileIterator.next(); - fileSystem = LogFeederHDFSUtil.buildFileSystem(hdfsHost, hdfsPort); - if (fileSystem != null && localFile.exists()) { - String destFilePath = hdfsOutDir + "/" + localFile.getName(); - String localPath = localFile.getAbsolutePath(); - boolean overWrite = true; - boolean delSrc = true; - boolean isCopied = LogFeederHDFSUtil.copyFromLocal(localFile.getAbsolutePath(), destFilePath, fileSystem, - overWrite, delSrc); - if (isCopied) { - LOG.debug("File copy to hdfs hdfspath :" + destFilePath + " and deleted local file :" + localPath); - } else { - // TODO Need to write retry logic, in next release we can handle it - LOG.error("Hdfs file copy failed for hdfspath :" + destFilePath + " and localpath :" + localPath); - } - } - localFileIterator.remove(); - } - try { - // wait till new file comes in reayList - synchronized (readyMonitor) { - if (localReadyFiles.isEmpty()) { - readyMonitor.wait(); - } - } - } catch (InterruptedException e) { - LOG.error(e.getLocalizedMessage(),e); - } - } - } catch (Exception e) { - LOG.error("Exception in hdfsCopyThread errorMsg:" + e.getLocalizedMessage(), e); - } - } - }; - hdfsCopyThread.setDaemon(true); - hdfsCopyThread.start(); - } - - private void stopHDFSCopyThread() { - if (hdfsCopyThread != null) { - LOG.info("waiting till copy all local files to hdfs......."); - while (!localReadyFiles.isEmpty()) { - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - LOG.error(e.getLocalizedMessage(), e); - } - LOG.debug("still waiting to copy all local files to hdfs......."); - } - LOG.info("calling interrupt method for hdfsCopyThread to stop it."); - try { - hdfsCopyThread.interrupt(); - } catch (SecurityException exception) { - LOG.error(" Current thread : '" + Thread.currentThread().getName() + - "' does not have permission to interrupt the Thread: '" + hdfsCopyThread.getName() + "'"); - } - LogFeederHDFSUtil.closeFileSystem(fileSystem); - } - } - - private HashMap buildContextParam() { - HashMap contextParam = new HashMap(); - contextParam.put("host", LogFeederUtil.hostName); - return contextParam; - } - - private void addFileInReadyList(File localFile) { - localReadyFiles.add(localFile); - try { - synchronized (readyMonitor) { - readyMonitor.notifyAll(); - } - } catch (Exception e) { - LOG.error(e.getLocalizedMessage(),e); - } - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException { - throw new UnsupportedOperationException("copyFile method is not yet supported for output=hdfs"); - } - - /** - * Add the rollover file to a daemon thread for uploading to HDFS - * @param rolloverFile the file to be uploaded to HDFS - */ - @Override - public void handleRollover(File rolloverFile) { - addFileInReadyList(rolloverFile); - } - - /** - * Determines whether it is time to handleRollover the current spool file. - * - * The file will handleRollover if the time since creation of the file is more than - * the timeout specified in rollover_sec configuration. - * @param currentSpoolerContext {@link LogSpoolerContext} that holds state of active Spool file - * @return true if time since creation is greater than value specified in rollover_sec, - * false otherwise. - */ - @Override - public boolean shouldRollover(LogSpoolerContext currentSpoolerContext) { - long timeSinceCreation = new Date().getTime() - currentSpoolerContext.getActiveLogCreationTime().getTime(); - boolean shouldRollover = timeSinceCreation > rolloverThresholdTimeMillis; - if (shouldRollover) { - LOG.info("Detecting that time since file creation time " + currentSpoolerContext.getActiveLogCreationTime() + - " has crossed threshold (msecs) " + rolloverThresholdTimeMillis); - } - return shouldRollover; - } - - @Override - public String getOutputType() { - throw new IllegalStateException("This method should be overriden if the Output wants to monitor the configuration"); - } - - @Override - public Long getPendingCount() { - return 0L; - } - - @Override - public String getWriteBytesMetricName() { - return "output.hdfs.write_bytes"; - } - - @Override - public String getStatMetricName() { - return "output.hdfs.write_logs"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java deleted file mode 100644 index a82ede03ccf..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.commons.lang3.StringUtils; -import org.apache.kafka.clients.producer.Callback; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.io.File; -import java.util.Properties; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.LinkedTransferQueue; - -public class OutputKafka extends Output { - private static final Logger LOG = Logger.getLogger(OutputKafka.class); - - private static final int FAILED_RETRY_INTERVAL = 30; - private static final int CATCHUP_RETRY_INTERVAL = 5; - - private static final int DEFAULT_BATCH_SIZE = 5000; - private static final int DEFAULT_LINGER_MS = 1000; - - private String topic = null; - private boolean isAsync = true; - private long messageCount = 0; - - private KafkaProducer producer = null; - private BlockingQueue failedMessages = new LinkedTransferQueue(); - - // Let's start with the assumption Kafka is down - private boolean isKafkaBrokerUp = false; - - private LogFeederProps logFeederProps; - - @Override - public String getStatMetricName() { - return "output.kafka.write_logs"; - } - - @Override - public String getWriteBytesMetricName() { - return "output.kafka.write_bytes"; - } - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - this.logFeederProps = logFeederProps; - Properties props = initProperties(); - - producer = creteKafkaProducer(props); - createKafkaRetryThread(); - } - - private Properties initProperties() throws Exception { - String brokerList = getStringValue("broker_list"); - if (StringUtils.isEmpty(brokerList)) { - throw new Exception("For kafka output, bootstrap broker_list is needed"); - } - - topic = getStringValue("topic"); - if (StringUtils.isEmpty(topic)) { - throw new Exception("For kafka output, topic is needed"); - } - - isAsync = getBooleanValue("is_async", true); - int batchSize = getIntValue("batch_size", DEFAULT_BATCH_SIZE); - int lingerMS = getIntValue("linger_ms", DEFAULT_LINGER_MS); - - Properties props = new Properties(); - props.put("bootstrap.servers", brokerList); - props.put("client.id", "logfeeder_producer"); - props.put("key.serializer", StringSerializer.class.getName()); - props.put("value.serializer", StringSerializer.class.getName()); - props.put("compression.type", "snappy"); - props.put("batch.size", batchSize); - props.put("linger.ms", lingerMS); - - for (String key : getConfigs().keySet()) { - if (key.startsWith("kafka.")) { - Object value = getConfigs().get(key); - if (value == null || value.toString().length() == 0) { - continue; - } - String kafkaKey = key.substring("kafka.".length()); - LOG.info("Adding custom Kafka property. " + kafkaKey + "=" + value); - props.put(kafkaKey, value); - } - } - - return props; - } - - protected KafkaProducer creteKafkaProducer(Properties props) { - return new KafkaProducer(props); - } - - private void createKafkaRetryThread() { - Thread retryThread = new Thread("kafka-writer-retry,topic=" + topic) { - @Override - public void run() { - KafkaCallBack kafkaCallBack = null; - LOG.info("Started thread to monitor failed messsages. " + getShortDescription()); - while (true) { - try { - if (kafkaCallBack == null) { - kafkaCallBack = failedMessages.take(); - } - if (publishMessage(kafkaCallBack.message, kafkaCallBack.inputMarker)) { - kafkaCallBack = null; - } else { - LOG.error("Kafka is down. messageNumber=" + kafkaCallBack.thisMessageNumber + ". Going to sleep for " + - FAILED_RETRY_INTERVAL + " seconds"); - Thread.sleep(FAILED_RETRY_INTERVAL * 1000); - } - - } catch (Throwable t) { - String logMessageKey = this.getClass().getSimpleName() + "_KAFKA_RETRY_WRITE_ERROR"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending message to Kafka during retry. message=" + - (kafkaCallBack == null ? null : kafkaCallBack.message), t, LOG, Level.ERROR); - } - } - - } - }; - retryThread.setDaemon(true); - retryThread.start(); - } - - @Override - public synchronized void write(String block, InputFileMarker inputMarker) throws Exception { - while (!isDrain() && !inputMarker.getInput().isDrain()) { - try { - if (failedMessages.size() == 0) { - if (publishMessage(block, inputMarker)) { - break; - } - } - if (isDrain() || inputMarker.getInput().isDrain()) { - break; - } - if (!isKafkaBrokerUp) { - LOG.error("Kafka is down. Going to sleep for " + FAILED_RETRY_INTERVAL + " seconds"); - Thread.sleep(FAILED_RETRY_INTERVAL * 1000); - } else { - LOG.warn("Kafka is still catching up from previous failed messages. outstanding messages=" + failedMessages.size() + - " Going to sleep for " + CATCHUP_RETRY_INTERVAL + " seconds"); - Thread.sleep(CATCHUP_RETRY_INTERVAL * 1000); - } - } catch (Throwable t) { - // ignore - break; - } - } - } - - @Override - public void setDrain(boolean drain) { - super.setDrain(drain); - } - - public void flush() { - LOG.info("Flush called..."); - setDrain(true); - } - - @Override - public void close() { - LOG.info("Closing Kafka client..."); - flush(); - if (producer != null) { - try { - producer.close(); - } catch (Throwable t) { - LOG.error("Error closing Kafka topic. topic=" + topic); - } - } - LOG.info("Closed Kafka client"); - super.close(); - } - - private boolean publishMessage(String block, InputMarker inputMarker) { - if (isAsync && isKafkaBrokerUp) { // Send asynchronously - producer.send(new ProducerRecord(topic, block), new KafkaCallBack(this, block, inputMarker, ++messageCount)); - return true; - } else { // Send synchronously - try { - // Not using key. Let it round robin - RecordMetadata metadata = producer.send(new ProducerRecord(topic, block)).get(); - if (metadata != null) { - statMetric.value++; - writeBytesMetric.value += block.length(); - } - if (!isKafkaBrokerUp) { - LOG.info("Started writing to kafka. " + getShortDescription()); - isKafkaBrokerUp = true; - } - return true; - } catch (InterruptedException e) { - isKafkaBrokerUp = false; - String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_INTERRUPT"; - LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "InterruptedException-Error sending message to Kafka", e, LOG, - Level.ERROR); - } catch (ExecutionException e) { - isKafkaBrokerUp = false; - String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_EXECUTION"; - LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "ExecutionException-Error sending message to Kafka", e, LOG, - Level.ERROR); - } catch (Throwable t) { - isKafkaBrokerUp = false; - String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_WRITE_ERROR"; - LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "GenericException-Error sending message to Kafka", t, LOG, - Level.ERROR); - } - } - return false; - } - - @Override - public String getShortDescription() { - return "output:destination=kafka,topic=" + topic; - } - - class KafkaCallBack implements Callback { - - private long thisMessageNumber; - private OutputKafka output = null; - private String message; - private InputMarker inputMarker; - - public KafkaCallBack(OutputKafka output, String message, InputMarker inputMarker, long messageCount) { - this.thisMessageNumber = messageCount; - this.output = output; - this.inputMarker = inputMarker; - this.message = message; - } - - public void onCompletion(RecordMetadata metadata, Exception exception) { - if (metadata != null) { - if (!output.isKafkaBrokerUp) { - LOG.info("Started writing to kafka. " + output.getShortDescription()); - output.isKafkaBrokerUp = true; - } - output.incrementStat(1); - output.writeBytesMetric.value += message.length(); - } else { - output.isKafkaBrokerUp = false; - String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_ASYNC_ERROR"; - LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "Error sending message to Kafka. Async Callback", exception, LOG, - Level.ERROR); - - output.failedMessages.add(this); - } - } - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException { - throw new UnsupportedOperationException("copyFile method is not yet supported for output=kafka"); - } - - @Override - public String getOutputType() { - throw new IllegalStateException("This method should be overriden if the Output wants to monitor the configuration"); - } - - @Override - public Long getPendingCount() { - return 0L; - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputLineFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputLineFilter.java deleted file mode 100644 index 04600a3d374..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputLineFilter.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.plugin.input.cache.LRUCache; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - - -/** - * Filter for outputs based on input configs, which can drop lines if the filter applies. - */ -public class OutputLineFilter { - - private static final Logger LOG = LoggerFactory.getLogger(OutputLineFilter.class); - - /** - * Applies filter based on input cache (on service log only). - * Get the message and in-memory timestamp for log line. If both are not empty, evaluate that log line needs to be filtered out or not. - */ - public Boolean apply(Map lineMap, Input input) { - boolean isLogFilteredOut = false; - LRUCache inputLruCache = input.getCache(); - if (inputLruCache != null && "service".equals(input.getInputDescriptor().getRowtype())) { - String logMessage = (String) lineMap.get(input.getCacheKeyField()); - Long timestamp = null; - if (lineMap.containsKey((LogFeederConstants.IN_MEMORY_TIMESTAMP))) { - timestamp = (Long) lineMap.get(LogFeederConstants.IN_MEMORY_TIMESTAMP); - } - if (logMessage != null && timestamp != null) { - isLogFilteredOut = !inputLruCache.isEntryReplaceable(logMessage, timestamp); - if (!isLogFilteredOut) { - inputLruCache.put(logMessage, timestamp); - } else { - LOG.debug("Log line filtered out: {} (file: {}, dedupInterval: {}, lastDedupEnabled: {})", - logMessage, inputLruCache.getFileName(), inputLruCache.getDedupInterval(), inputLruCache.isLastDedupEnabled()); - } - } - } - if (lineMap.containsKey(LogFeederConstants.IN_MEMORY_TIMESTAMP)) { - lineMap.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP); - } - return isLogFilteredOut; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManagerImpl.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManagerImpl.java deleted file mode 100644 index 390a770e844..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManagerImpl.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.output; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.hash.Hashing; -import org.apache.ambari.logfeeder.common.IdGeneratorHelper; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logsearch.config.api.OutputConfigMonitor; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import javax.inject.Inject; -import java.io.File; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; - -public class OutputManagerImpl extends OutputManager { - private static final Logger LOG = Logger.getLogger(OutputManagerImpl.class); - - private static final int MAX_OUTPUT_SIZE = 32765; // 32766-1 - - private List outputs = new ArrayList<>(); - - private boolean addMessageMD5 = true; - - private static long docCounter = 0; - private MetricData messageTruncateMetric = new MetricData(null, false); - - @Inject - private LogLevelFilterHandler logLevelFilterHandler; - - @Inject - private LogFeederProps logFeederProps; - - private OutputLineFilter outputLineFilter = new OutputLineFilter(); - - public List getOutputs() { - return outputs; - } - - public List getOutputsToMonitor() { - List outputsToMonitor = new ArrayList<>(); - for (Output output : outputs) { - if (output.monitorConfigChanges()) { - outputsToMonitor.add(output); - } - } - return outputsToMonitor; - } - - public void add(Output output) { - this.outputs.add(output); - } - - @Override - public void init() throws Exception { - for (Output output : outputs) { - output.init(logFeederProps); - } - } - - public void write(Map jsonObj, InputMarker inputMarker) { - Input input = inputMarker.getInput(); - - // Update the block with the context fields - for (Map.Entry entry : input.getInputDescriptor().getAddFields().entrySet()) { - if (jsonObj.get(entry.getKey()) == null || entry.getKey().equals("cluster") && "null".equals(jsonObj.get(entry.getKey()))) { - jsonObj.put(entry.getKey(), entry.getValue()); - } - } - - // TODO: Ideally most of the overrides should be configurable - - LogFeederUtil.fillMapWithFieldDefaults(jsonObj, inputMarker, true); - jsonObj.putIfAbsent("level", LogFeederConstants.LOG_LEVEL_UNKNOWN); - - if (input.isUseEventMD5() || input.isGenEventMD5()) { - String prefix = ""; - Object logtimeObj = jsonObj.get("logtime"); - if (logtimeObj != null) { - if (logtimeObj instanceof Date) { - prefix = "" + ((Date) logtimeObj).getTime(); - } else { - prefix = logtimeObj.toString(); - } - } - - - byte[] bytes = LogFeederUtil.getGson().toJson(jsonObj).getBytes(); - Long eventMD5 = Hashing.md5().hashBytes(bytes).asLong(); - if (input.isGenEventMD5()) { - jsonObj.put("event_md5", prefix + eventMD5.toString()); - } - if (input.isUseEventMD5()) { - jsonObj.put("id", prefix + eventMD5.toString()); - } - } - - jsonObj.put("seq_num", new Long(docCounter++)); - if (jsonObj.get("event_count") == null) { - jsonObj.put("event_count", new Integer(1)); - } - if (StringUtils.isNotBlank(input.getInputDescriptor().getGroup())) { - jsonObj.put("group", input.getInputDescriptor().getGroup()); - } - if (inputMarker.getAllProperties().containsKey("line_number") && - (Integer) inputMarker.getAllProperties().get("line_number") > 0) { - jsonObj.put("logfile_line_number", inputMarker.getAllProperties().get("line_number")); - } - if (jsonObj.containsKey("log_message")) { - // TODO: Let's check size only for log_message for now - String logMessage = (String) jsonObj.get("log_message"); - logMessage = truncateLongLogMessage(jsonObj, input, logMessage); - if (addMessageMD5) { - jsonObj.put("message_md5", "" + Hashing.md5().hashBytes(logMessage.getBytes()).asLong()); - } - } - List defaultLogLevels = getDefaultLogLevels(input); - if (logLevelFilterHandler.isAllowed(jsonObj, inputMarker, defaultLogLevels) - && !outputLineFilter.apply(jsonObj, inputMarker.getInput())) { - List outputList = input.getOutputList(); - for (Output output : outputList) { - try { - if (jsonObj.get("id") == null) { - jsonObj.put("id", IdGeneratorHelper.generateUUID(jsonObj, output.getIdFields())); - } - output.write(jsonObj, inputMarker); - } catch (Exception e) { - LOG.error("Error writing. to " + output.getShortDescription(), e); - } - } - } - } - - private List getDefaultLogLevels(Input input) { - List defaultLogLevels = logFeederProps.getIncludeDefaultLogLevels(); - List overrideDefaultLogLevels = input.getInputDescriptor().getDefaultLogLevels(); - if (CollectionUtils.isNotEmpty(overrideDefaultLogLevels)) { - return overrideDefaultLogLevels; - } else { - return defaultLogLevels; - } - } - - @SuppressWarnings("unchecked") - private String truncateLongLogMessage(Map jsonObj, Input input, String logMessage) { - if (logMessage != null && logMessage.getBytes().length > MAX_OUTPUT_SIZE) { - messageTruncateMetric.value++; - String logMessageKey = this.getClass().getSimpleName() + "_MESSAGESIZE"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Message is too big. size=" + logMessage.getBytes().length + - ", input=" + input.getShortDescription() + ". Truncating to " + MAX_OUTPUT_SIZE + ", first upto 100 characters=" + - StringUtils.abbreviate(logMessage, 100), null, LOG, Level.WARN); - logMessage = new String(logMessage.getBytes(), 0, MAX_OUTPUT_SIZE); - jsonObj.put("log_message", logMessage); - List tagsList = (List) jsonObj.get("tags"); - if (tagsList == null) { - tagsList = new ArrayList(); - jsonObj.put("tags", tagsList); - } - tagsList.add("error_message_truncated"); - } - return logMessage; - } - - public void write(String jsonBlock, InputMarker inputMarker) { - List defaultLogLevels = getDefaultLogLevels(inputMarker.getInput()); - if (logLevelFilterHandler.isAllowed(jsonBlock, inputMarker, defaultLogLevels)) { - List outputList = inputMarker.getInput().getOutputList(); - for (Output output : outputList) { - try { - output.write(jsonBlock, inputMarker); - } catch (Exception e) { - LOG.error("Error writing. to " + output.getShortDescription(), e); - } - } - } - } - - public void copyFile(File inputFile, InputMarker inputMarker) { - Input input = inputMarker.getInput(); - List outputList = input.getOutputList(); - for (Output output : outputList) { - try { - output.copyFile(inputFile, inputMarker); - }catch (Exception e) { - LOG.error("Error coyping file . to " + output.getShortDescription(), e); - } - } - } - - public void logStats() { - for (Output output : outputs) { - output.logStat(); - } - LogFeederUtil.logStatForMetric(messageTruncateMetric, "Stat: Messages Truncated", ""); - } - - public void addMetricsContainers(List metricsList) { - metricsList.add(messageTruncateMetric); - for (Output output : outputs) { - output.addMetricsContainers(metricsList); - } - } - - public void close() { - LOG.info("Close called for outputs ..."); - for (Output output : outputs) { - try { - output.setDrain(true); - output.close(); - } catch (Exception e) { - // Ignore - } - } - - // Need to get this value from property - int iterations = 30; - int waitTimeMS = 1000; - for (int i = 0; i < iterations; i++) { - boolean allClosed = true; - for (Output output : outputs) { - if (!output.isClosed()) { - try { - allClosed = false; - LOG.warn("Waiting for output to close. " + output.getShortDescription() + ", " + (iterations - i) + " more seconds"); - Thread.sleep(waitTimeMS); - } catch (Throwable t) { - // Ignore - } - } - } - if (allClosed) { - LOG.info("All outputs are closed. Iterations=" + i); - return; - } - } - - LOG.warn("Some outpus were not closed after " + iterations + " iterations"); - for (Output output : outputs) { - if (!output.isClosed()) { - LOG.warn("Output not closed. Will ignore it." + output.getShortDescription() + ", pendingCound=" + output.getPendingCount()); - } - } - } - - public LogLevelFilterHandler getLogLevelFilterHandler() { - return logLevelFilterHandler; - } - - public void setLogLevelFilterHandler(LogLevelFilterHandler logLevelFilterHandler) { - this.logLevelFilterHandler = logLevelFilterHandler; - } - - public LogFeederProps getLogFeederProps() { - return logFeederProps; - } - - @VisibleForTesting - public void setLogFeederProps(LogFeederProps logFeederProps) { - this.logFeederProps = logFeederProps; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java deleted file mode 100644 index fc64d4b7abc..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.output; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.output.spool.LogSpooler; -import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext; -import org.apache.ambari.logfeeder.output.spool.RolloverCondition; -import org.apache.ambari.logfeeder.output.spool.RolloverHandler; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logfeeder.util.S3Util; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigGson; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputConfigImpl; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputDescriptorImpl; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputS3FileDescriptorImpl; -import org.apache.log4j.Logger; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - - -/** - * Write log file into s3 bucket. - * - * This class supports two modes of upload: - *
    - *
  • A one time upload of files matching a pattern
  • - *
  • A batch mode, asynchronous, periodic upload of files
  • - *
- */ -public class OutputS3File extends OutputFile implements RolloverCondition, RolloverHandler { - private static final Logger LOG = Logger.getLogger(OutputS3File.class); - - public static final String GLOBAL_CONFIG_S3_PATH_SUFFIX = "global.config.json"; - - private LogSpooler logSpooler; - private S3OutputConfiguration s3OutputConfiguration; - private S3Uploader s3Uploader; - private LogFeederProps logFeederProps; - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - this.logFeederProps = logFeederProps; - s3OutputConfiguration = S3OutputConfiguration.fromConfigBlock(this); - } - - private static boolean uploadedGlobalConfig = false; - - /** - * Copy local log files and corresponding config to S3 bucket one time. - * @param inputFile The file to be copied - * @param inputMarker Contains information about the configuration to be uploaded. - */ - @Override - public void copyFile(File inputFile, InputMarker inputMarker) { - String type = inputMarker.getInput().getInputDescriptor().getType(); - S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, type); - String resolvedPath = s3Uploader.uploadFile(inputFile, inputMarker.getInput().getInputDescriptor().getType()); - - uploadConfig(inputMarker, type, s3OutputConfiguration, resolvedPath); - } - - private void uploadConfig(InputMarker inputMarker, String type, S3OutputConfiguration s3OutputConfiguration, - String resolvedPath) { - - ArrayList filters = new ArrayList<>(); - addFilters(filters, inputMarker.getInput().getFirstFilter()); - InputS3FileDescriptor inputS3FileDescriptorOriginal = (InputS3FileDescriptor) inputMarker.getInput().getInputDescriptor(); - InputS3FileDescriptorImpl inputS3FileDescriptor = InputConfigGson.gson.fromJson( - InputConfigGson.gson.toJson(inputS3FileDescriptorOriginal), InputS3FileDescriptorImpl.class); - String s3CompletePath = LogFeederConstants.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName() + - LogFeederConstants.S3_PATH_SEPARATOR + resolvedPath; - inputS3FileDescriptor.setPath(s3CompletePath); - - ArrayList inputConfigList = new ArrayList<>(); - inputConfigList.add(inputS3FileDescriptor); - // set source s3_file - // remove global config from input config - removeS3GlobalConfig(inputS3FileDescriptor); - // write config into s3 file - InputConfigImpl inputConfig = new InputConfigImpl(); - inputConfig.setInput(inputConfigList); - - writeConfigToS3(inputConfig, getComponentConfigFileName(type), s3OutputConfiguration); - // write global config - writeGlobalConfig(s3OutputConfiguration); - } - - private void addFilters(ArrayList filters, Filter filter) { - if (filter != null) { - FilterDescriptor filterDescriptorOriginal = filter.getFilterDescriptor(); - FilterDescriptor filterDescriptor = InputConfigGson.gson.fromJson( - InputConfigGson.gson.toJson(filterDescriptorOriginal), filterDescriptorOriginal.getClass()); - filters.add(filterDescriptor); - if (filter.getNextFilter() != null) { - addFilters(filters, filter.getNextFilter()); - } - } - } - - private void writeConfigToS3(Object config, String s3KeySuffix, S3OutputConfiguration s3OutputConfiguration) { - String configJson = InputConfigGson.gson.toJson(config); - - String s3ResolvedKey = new S3LogPathResolver().getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix, - s3OutputConfiguration.getCluster()); - - S3Util.writeIntoS3File(configJson, s3OutputConfiguration.getS3BucketName(), s3ResolvedKey, - s3OutputConfiguration.getS3AccessKey(), s3OutputConfiguration.getS3SecretKey()); - } - - private String getComponentConfigFileName(String componentName) { - return "input.config-" + componentName + ".json"; - } - - private void removeS3GlobalConfig(InputS3FileDescriptorImpl inputS3FileDescriptor) { - inputS3FileDescriptor.setSource(null); - inputS3FileDescriptor.setCopyFile(null); - inputS3FileDescriptor.setProcessFile(null); - inputS3FileDescriptor.setTail(null); - inputS3FileDescriptor.getAddFields().remove("ip"); - inputS3FileDescriptor.getAddFields().remove("host"); - inputS3FileDescriptor.getAddFields().remove("bundle_id"); - } - - /** - * write global config in s3 file Invoke only once - */ - @SuppressWarnings("unchecked") - private synchronized void writeGlobalConfig(S3OutputConfiguration s3OutputConfiguration) { - if (!uploadedGlobalConfig) { - Map globalConfig = new HashMap<>(); - //updating global config before write to s3 - globalConfig.put("source", "s3_file"); - globalConfig.put("copy_file", false); - globalConfig.put("process_file", true); - globalConfig.put("tail", false); - Map addFields = (Map) globalConfig.get("add_fields"); - if (addFields == null) { - addFields = new HashMap<>(); - } - addFields.put("ip", LogFeederUtil.ipAddress); - addFields.put("host", LogFeederUtil.hostName); - // add bundle id same as cluster if its not there - String bundle_id = (String) addFields.get("bundle_id"); - if (bundle_id == null || bundle_id.isEmpty()) { - String cluster = (String) addFields.get("cluster"); - if (cluster != null && !cluster.isEmpty()) { - addFields.put("bundle_id", bundle_id); - } - } - globalConfig.put("add_fields", addFields); - Map config = new HashMap(); - config.put("global", globalConfig); - writeConfigToS3(config, GLOBAL_CONFIG_S3_PATH_SUFFIX, s3OutputConfiguration); - uploadedGlobalConfig = true; - } - } - - /** - * Write a log line to local file, to upload to S3 bucket asynchronously. - * - * This method uses a {@link LogSpooler} to spool the log lines to a local file. - - * @param block The log event to upload - * @param inputMarker Contains information about the log file feeding the lines. - * @throws Exception - */ - @Override - public void write(String block, InputFileMarker inputMarker) throws Exception { - if (logSpooler == null) { - if (inputMarker.getInput().getClass().isAssignableFrom(InputFile.class)) { - InputFile input = (InputFile) inputMarker.getInput(); - logSpooler = createSpooler(input.getFilePath()); - s3Uploader = createUploader(input.getInputDescriptor().getType()); - logSpooler.add(block); - } else { - LOG.error("Cannot write from non local file..."); - } - } - } - - @VisibleForTesting - protected S3Uploader createUploader(String logType) { - S3Uploader uploader = new S3Uploader(s3OutputConfiguration, true, logType); - uploader.startUploaderThread(); - return uploader; - } - - @VisibleForTesting - protected LogSpooler createSpooler(String filePath) { - String spoolDirectory = logFeederProps.getTmpDir() + "/s3/service"; - LOG.info(String.format("Creating spooler with spoolDirectory=%s, filePath=%s", spoolDirectory, filePath)); - return new LogSpooler(spoolDirectory, new File(filePath).getName()+"-", this, this, - s3OutputConfiguration.getRolloverTimeThresholdSecs()); - } - - /** - * Check whether the locally spooled file should be rolled over, based on file size. - * - * @param currentSpoolerContext {@link LogSpoolerContext} that holds state about the file being checked - * for rollover. - * @return true if sufficient size has been reached based on {@link S3OutputConfiguration#getRolloverSizeThresholdBytes()}, - * false otherwise - */ - @Override - public boolean shouldRollover(LogSpoolerContext currentSpoolerContext) { - File spoolFile = currentSpoolerContext.getActiveSpoolFile(); - long currentSize = spoolFile.length(); - boolean result = (currentSize >= s3OutputConfiguration.getRolloverSizeThresholdBytes()); - if (result) { - LOG.info(String.format("Rolling over %s, current size %d, threshold size %d", spoolFile, currentSize, - s3OutputConfiguration.getRolloverSizeThresholdBytes())); - } - return result; - } - - /** - * Stops dependent objects that consume resources. - */ - @Override - public void close() { - if (s3Uploader != null) { - s3Uploader.stopUploaderThread(); - } - if (logSpooler != null) { - logSpooler.close(); - } - } - - /** - * Adds the locally spooled file to the {@link S3Uploader} to be uploaded asynchronously. - * - * @param rolloverFile The file that has been rolled over. - */ - @Override - public void handleRollover(File rolloverFile) { - s3Uploader.addFileForUpload(rolloverFile.getAbsolutePath()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java deleted file mode 100644 index 350986e3771..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java +++ /dev/null @@ -1,512 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.common.IdGeneratorHelper; -import org.apache.ambari.logfeeder.common.LogFeederSolrClientFactory; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logfeeder.util.DateUtil; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.response.SolrPingResponse; -import org.apache.solr.client.solrj.response.UpdateResponse; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; - -import java.io.File; -import java.io.IOException; -import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Collection;; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -public class OutputSolr extends Output { - - private static final Logger LOG = Logger.getLogger(OutputSolr.class); - - private static final int SHARDS_WAIT_MS = 10000; - - private static final int DEFAULT_MAX_BUFFER_SIZE = 5000; - private static final int DEFAULT_MAX_INTERVAL_MS = 3000; - private static final int DEFAULT_NUMBER_OF_WORKERS = 1; - private static final boolean DEFAULT_SKIP_LOGTIME = false; - - private static final int RETRY_INTERVAL = 30; - - private static final String JAVA_SECURITY_AUTH_LOGIN_CONFIG = "java.security.auth.login.config"; - private static final String SOLR_HTTPCLIENT_BUILDER_FACTORY = "solr.httpclient.builder.factory"; - - private String type; - private String collection; - private String splitMode; - private int splitInterval; - private String zkConnectString; - private String[] solrUrls = null; - private int maxIntervalMS; - private int workers; - private int maxBufferSize; - private boolean implicitRouting = false; - private int lastSlotByMin = -1; - private boolean skipLogtime = false; - private List idFields = new ArrayList<>(); - - private BlockingQueue outgoingBuffer = null; - private List workerThreadList = new ArrayList<>(); - - private LogFeederProps logFeederProps; - - @Override - public boolean monitorConfigChanges() { - return true; - }; - - @Override - public String getOutputType() { - return type; - } - - @Override - public String getStatMetricName() { - return "output.solr.write_logs"; - } - - @Override - public String getWriteBytesMetricName() { - return "output.solr.write_bytes"; - } - - @Override - public void init(LogFeederProps logFeederProps) throws Exception { - this.logFeederProps = logFeederProps; - initParams(logFeederProps); - setupSecurity(); - createOutgoingBuffer(); - createSolrWorkers(); - } - - private void initParams(LogFeederProps logFeederProps) throws Exception { - type = getStringValue("type"); - - zkConnectString = getStringValue("zk_connect_string"); - List solrUrlsList = getListValue("solr_urls"); - - if (StringUtils.isBlank(zkConnectString) && CollectionUtils.isEmpty(solrUrlsList)) { - throw new Exception("For solr output the zk_connect_string or solr_urls property need to be set"); - } - - if (CollectionUtils.isNotEmpty(solrUrlsList)) { - solrUrls = solrUrlsList.toArray(new String[0]); - } - - idFields = getListValue("id_fields", new ArrayList<>()); - - skipLogtime = getBooleanValue("skip_logtime", DEFAULT_SKIP_LOGTIME); - - maxIntervalMS = getIntValue("idle_flush_time_ms", DEFAULT_MAX_INTERVAL_MS); - workers = getIntValue("workers", DEFAULT_NUMBER_OF_WORKERS); - - splitInterval = 0; - splitMode = getStringValue("split_interval", "none"); - if (!splitMode.equals("none")) { - splitInterval = Integer.parseInt(splitMode); - } - - collection = getStringValue("collection"); - if (StringUtils.isEmpty(collection)) { - throw new IllegalStateException("Collection property is mandatory"); - } - - maxBufferSize = getIntValue("flush_size", DEFAULT_MAX_BUFFER_SIZE); - if (maxBufferSize < 1) { - LOG.warn("maxBufferSize is less than 1. Making it 1"); - maxBufferSize = 1; - } - - LOG.info(String.format("Config: Number of workers=%d, splitMode=%s, splitInterval=%d." - + getShortDescription(), workers, splitMode, splitInterval)); - - implicitRouting = logFeederProps.isSolrImplicitRouting(); // TODO: in the future, load it from output config (can be a use case to use different routing for audit/service logs) - if (implicitRouting) { - LOG.info("Config: Use implicit routing globally for adding docs to Solr."); - } else { - LOG.info("Config: Use compositeId globally for adding docs to Solr."); - } - } - - private void setupSecurity() { - boolean securityEnabled = logFeederProps.getLogFeederSecurityConfig().isSolrKerberosEnabled(); - if (securityEnabled) { - String javaSecurityConfig = System.getProperty(JAVA_SECURITY_AUTH_LOGIN_CONFIG); - String solrHttpBuilderFactory = System.getProperty(SOLR_HTTPCLIENT_BUILDER_FACTORY); - LOG.info("setupSecurity() called for kerberos configuration, jaas file: " - + javaSecurityConfig + ", solr http client factory: " + solrHttpBuilderFactory); - } - } - - private void createOutgoingBuffer() { - int bufferSize = maxBufferSize * (workers + 3); - LOG.info("Creating blocking queue with bufferSize=" + bufferSize); - outgoingBuffer = new LinkedBlockingQueue(bufferSize); - } - - private void createSolrWorkers() throws Exception, MalformedURLException { - for (int count = 0; count < workers; count++) { - SolrClient solrClient = getSolrClient(count); - createSolrWorkerThread(count, solrClient); - } - } - - private SolrClient getSolrClient(int count) throws Exception, MalformedURLException { - SolrClient solrClient = new LogFeederSolrClientFactory().createSolrClient(zkConnectString, solrUrls, collection); - pingSolr(count, solrClient); - return solrClient; - } - - private void pingSolr(int count, SolrClient solrClient) { - try { - LOG.info("Pinging Solr server."); - SolrPingResponse response = solrClient.ping(); - if (response.getStatus() == 0) { - LOG.info("Ping to Solr server is successful for worker=" + count); - } else { - LOG.warn( - String.format("Ping to Solr server failed. It would check again. worker=%d, collection=%s, " + - "response=%s", count, collection, response)); - } - } catch (Throwable t) { - LOG.warn(String.format( - "Ping to Solr server failed. It would check again. worker=%d, collection=%s", count, collection), t); - } - } - - private void createSolrWorkerThread(int count, SolrClient solrClient) { - SolrWorkerThread solrWorkerThread = new SolrWorkerThread(solrClient); - solrWorkerThread.setName(getNameForThread() + "," + collection + ",worker=" + count); - solrWorkerThread.setDaemon(true); - solrWorkerThread.start(); - workerThreadList.add(solrWorkerThread); - } - - @Override - public void write(Map jsonObj, InputMarker inputMarker) throws Exception { - try { - trimStrValue(jsonObj); - useActualDateIfNeeded(jsonObj); - outgoingBuffer.put(new OutputData(jsonObj, inputMarker)); - } catch (InterruptedException e) { - // ignore - } - } - - private void useActualDateIfNeeded(Map jsonObj) { - if (skipLogtime) { - jsonObj.put("logtime", DateUtil.getActualDateStr()); - if (jsonObj.get("evtTime") != null) { - jsonObj.put("evtTime", DateUtil.getActualDateStr()); - } - } - } - - public void flush() { - LOG.info("Flush called..."); - setDrain(true); - - int wrapUpTimeSecs = 30; - // Give wrapUpTimeSecs seconds to wrap up - boolean isPending = false; - for (int i = 0; i < wrapUpTimeSecs; i++) { - for (SolrWorkerThread solrWorkerThread : workerThreadList) { - if (solrWorkerThread.isDone()) { - try { - solrWorkerThread.interrupt(); - } catch (Throwable t) { - // ignore - } - } else { - isPending = true; - } - } - if (isPending) { - try { - LOG.info("Will give " + (wrapUpTimeSecs - i) + " seconds to wrap up"); - Thread.sleep(1000); - } catch (InterruptedException e) { - // ignore - } - } - isPending = false; - } - } - - @Override - public void setDrain(boolean drain) { - super.setDrain(drain); - } - - @Override - public Long getPendingCount() { - long pendingCount = 0; - for (SolrWorkerThread solrWorkerThread : workerThreadList) { - pendingCount += solrWorkerThread.localBuffer.size(); - } - return pendingCount; - } - - @Override - public void close() { - LOG.info("Closing Solr client..."); - flush(); - - LOG.info("Closed Solr client"); - super.close(); - } - - @Override - public String getShortDescription() { - return "output:destination=solr,collection=" + collection; - } - - class SolrWorkerThread extends Thread { - private static final String ROUTER_FIELD = "_router_field_"; - - private final SolrClient solrClient; - private final Collection localBuffer = new ArrayList<>(); - private final Map latestInputMarkers = new HashMap<>(); - - private long localBufferBytesSize = 0; - - public SolrWorkerThread(SolrClient solrClient) { - this.solrClient = solrClient; - } - - @Override - public void run() { - LOG.info("SolrWorker thread started"); - long lastDispatchTime = System.currentTimeMillis(); - - while (true) { - long currTimeMS = System.currentTimeMillis(); - OutputData outputData = null; - try { - long nextDispatchDuration = maxIntervalMS - (currTimeMS - lastDispatchTime); - outputData = getOutputData(nextDispatchDuration); - - if (outputData != null) { - createSolrDocument(outputData); - } else { - if (isDrain() && outgoingBuffer.isEmpty()) { - break; - } - } - - if (!localBuffer.isEmpty() && - (outputData == null && isDrain() || nextDispatchDuration <= 0 || localBuffer.size() >= maxBufferSize) - ) { - boolean response = sendToSolr(outputData); - if (isDrain() && !response) { - //Since sending to Solr response failed and it is in draining mode, let's break; - LOG.warn("In drain mode and sending to Solr failed. So exiting. output=" + getShortDescription()); - break; - } - } - if (localBuffer.isEmpty()) { - //If localBuffer is empty, then reset the timer - lastDispatchTime = currTimeMS; - } - } catch (InterruptedException e) { - // Handle thread exiting - } catch (Throwable t) { - String logMessageKey = this.getClass().getSimpleName() + "_SOLR_MAINLOOP_EXCEPTION"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception in main loop. " + outputData, t, LOG, - Level.ERROR); - } - } - - closeSolrClient(); - - resetLocalBuffer(); - LOG.info("Exiting Solr worker thread. output=" + getShortDescription()); - } - - /** - * This will loop till Solr is available and LogFeeder is - * successfully able to write to the collection or shard. It will block till - * it can write. The outgoingBuffer is a BlockingQueue and when it is full, it - * will automatically stop parsing the log files. - */ - private boolean sendToSolr(OutputData outputData) { - boolean result = false; - while (!isDrain()) { - try { - if (implicitRouting) { - // Compute the current router value - addRouterField(); - } - addToSolr(outputData); - resetLocalBuffer(); - //Send successful, will return - result = true; - break; - } catch (IOException | SolrException exception) { - // Transient error, lets block till it is available - try { - LOG.warn("Solr is not reachable. Going to retry after " + RETRY_INTERVAL + " seconds. " + "output=" - + getShortDescription(), exception); - Thread.sleep(RETRY_INTERVAL * 1000); - } catch (Throwable t) { - // ignore - } - } catch (Throwable serverException) { - // Something unknown happened. Let's not block because of this error. - // Clear the buffer - String logMessageKey = this.getClass().getSimpleName() + "_SOLR_UPDATE_EXCEPTION"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending log message to server. Dropping logs", - serverException, LOG, Level.ERROR); - resetLocalBuffer(); - break; - } - } - return result; - } - - private OutputData getOutputData(long nextDispatchDuration) throws InterruptedException { - OutputData outputData = outgoingBuffer.poll(); - if (outputData == null && !isDrain() && nextDispatchDuration > 0) { - outputData = outgoingBuffer.poll(nextDispatchDuration, TimeUnit.MILLISECONDS); - } - if (outputData != null && outputData.jsonObj.get("id") == null) { - outputData.jsonObj.put("id", IdGeneratorHelper.generateUUID(outputData.jsonObj, idFields)); - } - return outputData; - } - - private void createSolrDocument(OutputData outputData) { - SolrInputDocument document = new SolrInputDocument(); - for (String name : outputData.jsonObj.keySet()) { - Object obj = outputData.jsonObj.get(name); - document.addField(name, obj); - try { - localBufferBytesSize += obj.toString().length(); - } catch (Throwable t) { - String logMessageKey = this.getClass().getSimpleName() + "_BYTE_COUNT_ERROR"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error calculating byte size. object=" + obj, t, LOG, - Level.ERROR); - } - } - Object fileKey = outputData.inputMarker.getAllProperties().get("file_key"); - if (fileKey != null) { - latestInputMarkers.put(fileKey.toString(), outputData.inputMarker); - } - localBuffer.add(document); - } - - private void addRouterField() { - ZkStateReader reader = ((CloudSolrClient) solrClient).getZkStateReader(); - DocCollection docCollection = reader.getClusterState().getCollection(collection); - Collection slices = docCollection.getSlices(); - List shards = slices.stream().map(Slice::getName).collect(Collectors.toList()); - - Calendar cal = Calendar.getInstance(); - int weekDay = cal.get(Calendar.DAY_OF_WEEK); - int currHour = cal.get(Calendar.HOUR_OF_DAY); - int currMin = cal.get(Calendar.MINUTE); - - int minOfWeek = (weekDay - 1) * 24 * 60 + currHour * 60 + currMin; - int slotByMin = minOfWeek / splitInterval % shards.size(); - - String shard = shards.get(slotByMin); - - if (lastSlotByMin != slotByMin) { - LOG.info("Switching to shard " + shard + ", output=" + getShortDescription()); - lastSlotByMin = slotByMin; - } - - for (SolrInputDocument solrInputDocument : localBuffer) { - solrInputDocument.setField(ROUTER_FIELD, shard); - } - } - - private void addToSolr(OutputData outputData) throws SolrServerException, IOException { - UpdateResponse response = solrClient.add(localBuffer); - if (response.getStatus() != 0) { - String logMessageKey = this.getClass().getSimpleName() + "_SOLR_UPDATE_ERROR"; - LogFeederUtil.logErrorMessageByInterval(logMessageKey, - String.format("Error writing to Solr. response=%s, log=%s", response, outputData), null, LOG, Level.ERROR); - } - statMetric.value += localBuffer.size(); - writeBytesMetric.value += localBufferBytesSize; - for (InputMarker inputMarker : latestInputMarkers.values()) { - inputMarker.getInput().checkIn(inputMarker); - } - } - - private void closeSolrClient() { - if (solrClient != null) { - try { - solrClient.close(); - } catch (IOException e) { - // Ignore - } - } - } - - public void resetLocalBuffer() { - localBuffer.clear(); - localBufferBytesSize = 0; - latestInputMarkers.clear(); - } - - public boolean isDone() { - return localBuffer.isEmpty(); - } - } - - @Override - public void write(String block, InputMarker inputMarker) throws Exception { - } - - @Override - public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException { - throw new UnsupportedOperationException("copyFile method is not yet supported for output=solr"); - } - - @Override - public List getIdFields() { - return idFields; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java deleted file mode 100644 index 8c544cff2fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.util.LogFeederUtil; -import org.apache.ambari.logfeeder.util.PlaceholderUtil; - -import java.util.HashMap; - -/** - * A utility class that resolves variables like hostname, IP address and cluster name in S3 paths. - */ -public class S3LogPathResolver { - - /** - * Construct a full S3 path by resolving variables in the path name including hostname, IP address - * and cluster name - * @param baseKeyPrefix The prefix which can contain the variables. - * @param keySuffix The suffix appended to the prefix after variable expansion - * @param cluster The name of the cluster - * @return full S3 path. - */ - public String getResolvedPath(String baseKeyPrefix, String keySuffix, String cluster) { - HashMap contextParam = buildContextParam(cluster); - String resolvedKeyPrefix = PlaceholderUtil.replaceVariables(baseKeyPrefix, contextParam); - return resolvedKeyPrefix + LogFeederConstants.S3_PATH_SEPARATOR + keySuffix; - } - - private HashMap buildContextParam(String cluster) { - HashMap contextParam = new HashMap<>(); - contextParam.put("host", LogFeederUtil.hostName); - contextParam.put("ip", LogFeederUtil.ipAddress); - contextParam.put("cluster", cluster); - return contextParam; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java deleted file mode 100644 index a2d76926adb..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.plugin.common.ConfigItem; - -import java.util.HashMap; -import java.util.Map; - -/** - * Holds all configuration relevant for S3 upload. - */ -public class S3OutputConfiguration { - - public static final String SPOOL_DIR_KEY = "spool_dir"; - public static final String ROLLOVER_SIZE_THRESHOLD_BYTES_KEY = "rollover_size_threshold_bytes"; - public static final Long DEFAULT_ROLLOVER_SIZE_THRESHOLD_BYTES = 10 * 1024 * 1024L; - public static final String ROLLOVER_TIME_THRESHOLD_SECS_KEY = "rollover_time_threshold_secs"; - public static final Long DEFAULT_ROLLOVER_TIME_THRESHOLD_SECS = 3600L; - public static final String S3_BUCKET_NAME_KEY = "s3_bucket"; - public static final String S3_LOG_DIR_KEY = "s3_log_dir"; - public static final String S3_ACCESS_KEY = "s3_access_key"; - public static final String S3_SECRET_KEY = "s3_secret_key"; - public static final String COMPRESSION_ALGO_KEY = "compression_algo"; - public static final String ADDITIONAL_FIELDS_KEY = "add_fields"; - public static final String CLUSTER_KEY = "cluster"; - - private Map configs; - - S3OutputConfiguration(Map configs) { - this.configs = configs; - } - - public String getS3BucketName() { - return (String) configs.get(S3_BUCKET_NAME_KEY); - } - - public String getS3Path() { - return (String) configs.get(S3_LOG_DIR_KEY); - } - - public String getS3AccessKey() { - return (String) configs.get(S3_ACCESS_KEY); - } - - public String getS3SecretKey() { - return (String) configs.get(S3_SECRET_KEY); - } - - public String getCompressionAlgo() { - return (String) configs.get(COMPRESSION_ALGO_KEY); - } - - public Long getRolloverSizeThresholdBytes() { - return (Long) configs.get(ROLLOVER_SIZE_THRESHOLD_BYTES_KEY); - } - - public Long getRolloverTimeThresholdSecs() { - return (Long) configs.get(ROLLOVER_TIME_THRESHOLD_SECS_KEY); - } - - @SuppressWarnings("unchecked") - public String getCluster() { - return ((Map) configs.get(ADDITIONAL_FIELDS_KEY)).get(CLUSTER_KEY); - } - - public static S3OutputConfiguration fromConfigBlock(ConfigItem configItem) { - Map configs = new HashMap<>(); - String[] stringValuedKeysToCopy = new String[] { - SPOOL_DIR_KEY, S3_BUCKET_NAME_KEY, S3_LOG_DIR_KEY, - S3_ACCESS_KEY, S3_SECRET_KEY, COMPRESSION_ALGO_KEY - }; - - for (String key : stringValuedKeysToCopy) { - String value = configItem.getStringValue(key); - if (value != null) { - configs.put(key, value); - } - } - - String[] longValuedKeysToCopy = new String[] { - ROLLOVER_SIZE_THRESHOLD_BYTES_KEY, ROLLOVER_TIME_THRESHOLD_SECS_KEY - }; - - Long[] defaultValuesForLongValuedKeys = new Long[] { - DEFAULT_ROLLOVER_SIZE_THRESHOLD_BYTES, DEFAULT_ROLLOVER_TIME_THRESHOLD_SECS - }; - - for (int i = 0; i < longValuedKeysToCopy.length; i++) { - configs.put(longValuedKeysToCopy[i], configItem.getLongValue(longValuedKeysToCopy[i], defaultValuesForLongValuedKeys[i])); - } - - configs.put(ADDITIONAL_FIELDS_KEY, configItem.getNVList(ADDITIONAL_FIELDS_KEY)); - - return new S3OutputConfiguration(configs); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java deleted file mode 100644 index ddf3995d3f3..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.transfer.TransferManager; -import com.amazonaws.services.s3.transfer.Upload; -import com.google.common.annotations.VisibleForTesting; -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.util.CompressionUtil; -import org.apache.ambari.logfeeder.util.S3Util; -import org.apache.log4j.Logger; - -import java.io.File; -import java.util.Date; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * A class that handles the uploading of files to S3. - * - * This class can be used to upload a file one time, or start a daemon thread that can - * be used to upload files added to a queue one after the other. When used to upload - * files via a queue, one instance of this class is created for each file handled in - * {@link org.apache.ambari.logfeeder.input.InputFile}. - */ -public class S3Uploader implements Runnable { - private static final Logger LOG = Logger.getLogger(S3Uploader.class); - - public static final String POISON_PILL = "POISON-PILL"; - - private final S3OutputConfiguration s3OutputConfiguration; - private final boolean deleteOnEnd; - private final String logType; - private final BlockingQueue fileContextsToUpload; - private final AtomicBoolean stopRunningThread = new AtomicBoolean(false); - - public S3Uploader(S3OutputConfiguration s3OutputConfiguration, boolean deleteOnEnd, String logType) { - this.s3OutputConfiguration = s3OutputConfiguration; - this.deleteOnEnd = deleteOnEnd; - this.logType = logType; - this.fileContextsToUpload = new LinkedBlockingQueue<>(); - } - - /** - * Starts a thread that can be used to upload files from a queue. - * - * Add files to be uploaded using the method {@link #addFileForUpload(String)}. - * If this thread is started, it must be stopped using the method {@link #stopUploaderThread()}. - */ - void startUploaderThread() { - Thread s3UploaderThread = new Thread(this, "s3-uploader-thread-"+logType); - s3UploaderThread.setDaemon(true); - s3UploaderThread.start(); - } - - /** - * Stops the thread used to upload files from a queue. - * - * This method must be called to cleanly free up resources, typically on shutdown of the process. - * Note that this method does not drain any remaining files, and instead stops the thread - * as soon as any file being currently uploaded is complete. - */ - void stopUploaderThread() { - stopRunningThread.set(true); - boolean offerStatus = fileContextsToUpload.offer(POISON_PILL); - if (!offerStatus) { - LOG.warn("Could not add poison pill to interrupt uploader thread."); - } - } - - /** - * Add a file to a queue to upload asynchronously. - * @param fileToUpload Full path to the local file which must be uploaded. - */ - void addFileForUpload(String fileToUpload) { - boolean offerStatus = fileContextsToUpload.offer(fileToUpload); - if (!offerStatus) { - LOG.error("Could not add file " + fileToUpload + " for upload."); - } - } - - @Override - public void run() { - while (!stopRunningThread.get()) { - try { - String fileNameToUpload = fileContextsToUpload.take(); - if (POISON_PILL.equals(fileNameToUpload)) { - LOG.warn("Found poison pill while waiting for files to upload, exiting"); - return; - } - uploadFile(new File(fileNameToUpload), logType); - } catch (InterruptedException e) { - LOG.error("Interrupted while waiting for elements from fileContextsToUpload", e); - return; - } - } - } - - /** - * Upload the given file to S3. - * - * The file which should be available locally, is first compressed using the compression - * method specified by {@link S3OutputConfiguration#getCompressionAlgo()}. This compressed - * file is what is uploaded to S3. - * @param fileToUpload the file to upload - * @param logType the name of the log which is used in the S3 path constructed. - * @return - */ - String uploadFile(File fileToUpload, String logType) { - String bucketName = s3OutputConfiguration.getS3BucketName(); - String s3AccessKey = s3OutputConfiguration.getS3AccessKey(); - String s3SecretKey = s3OutputConfiguration.getS3SecretKey(); - String compressionAlgo = s3OutputConfiguration.getCompressionAlgo(); - - String keySuffix = fileToUpload.getName() + "." + compressionAlgo; - String s3Path = new S3LogPathResolver().getResolvedPath( - s3OutputConfiguration.getS3Path() + LogFeederConstants.S3_PATH_SEPARATOR + logType, keySuffix, - s3OutputConfiguration.getCluster()); - LOG.info(String.format("keyPrefix=%s, keySuffix=%s, s3Path=%s", s3OutputConfiguration.getS3Path(), keySuffix, s3Path)); - File sourceFile = createCompressedFileForUpload(fileToUpload, compressionAlgo); - - LOG.info("Starting S3 upload " + sourceFile + " -> " + bucketName + ", " + s3Path); - uploadFileToS3(bucketName, s3Path, sourceFile, s3AccessKey, s3SecretKey); - - // delete local compressed file - sourceFile.delete(); - if (deleteOnEnd) { - LOG.info("Deleting input file as required"); - if (!fileToUpload.delete()) { - LOG.error("Could not delete file " + fileToUpload.getAbsolutePath() + " after upload to S3"); - } - } - return s3Path; - } - - @VisibleForTesting - protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) { - TransferManager transferManager = S3Util.getTransferManager(accessKey, secretKey); - try { - Upload upload = transferManager.upload(bucketName, s3Key, localFile); - upload.waitForUploadResult(); - } catch (AmazonClientException | InterruptedException e) { - LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(), e); - } finally { - S3Util.shutdownTransferManager(transferManager); - } - } - - @VisibleForTesting - protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) { - File outputFile = new File(fileToUpload.getParent(), fileToUpload.getName() + "_" + new Date().getTime() + - "." + compressionAlgo); - outputFile = CompressionUtil.compressFile(fileToUpload, outputFile, compressionAlgo); - return outputFile; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java deleted file mode 100644 index 7fc47a9bc4f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.ambari.logfeeder.util.DateUtil; -import org.apache.log4j.Logger; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.Date; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * A class that manages local storage of log events before they are uploaded to the output destinations. - * - * This class should be used by any {@link org.apache.ambari.logfeeder.plugin.output.Output}s that wish to upload log files to an - * output destination on a periodic batched basis. Log events should be added to an instance - * of this class to be stored locally. This class determines when to - * rollover using calls to an interface {@link RolloverCondition}. Likewise, it uses an interface - * {@link RolloverHandler} to trigger the handling of the rolled over file. - */ -public class LogSpooler { - - private static final Logger LOG = Logger.getLogger(LogSpooler.class); - public static final long TIME_BASED_ROLLOVER_DISABLED_THRESHOLD = 0; - static final String fileDateFormat = "yyyy-MM-dd-HH-mm-ss"; - - private String spoolDirectory; - private String sourceFileNamePrefix; - private RolloverCondition rolloverCondition; - private RolloverHandler rolloverHandler; - private PrintWriter currentSpoolBufferedWriter; - private File currentSpoolFile; - private LogSpoolerContext currentSpoolerContext; - private Timer rolloverTimer; - private AtomicBoolean rolloverInProgress = new AtomicBoolean(false); - - /** - * Create an instance of the LogSpooler. - * @param spoolDirectory The directory under which spooler files are created. - * Should be unique per instance of {@link org.apache.ambari.logfeeder.plugin.output.Output} - * @param sourceFileNamePrefix The prefix with which the locally spooled files are created. - * @param rolloverCondition An object of type {@link RolloverCondition} that will be used to - * determine when to rollover. - * @param rolloverHandler An object of type {@link RolloverHandler} that will be called when - * there should be a rollover. - */ - public LogSpooler(String spoolDirectory, String sourceFileNamePrefix, RolloverCondition rolloverCondition, - RolloverHandler rolloverHandler) { - this(spoolDirectory, sourceFileNamePrefix, rolloverCondition, rolloverHandler, - TIME_BASED_ROLLOVER_DISABLED_THRESHOLD); - } - - /** - * Create an instance of the LogSpooler. - * @param spoolDirectory The directory under which spooler files are created. - * Should be unique per instance of {@link org.apache.ambari.logfeeder.plugin.output.Output} - * @param sourceFileNamePrefix The prefix with which the locally spooled files are created. - * @param rolloverCondition An object of type {@link RolloverCondition} that will be used to - * determine when to rollover. - * @param rolloverHandler An object of type {@link RolloverHandler} that will be called when - * there should be a rollover. - * @param rolloverTimeThresholdSecs Setting a non-zero value enables time based rollover of - * spool files. Sending a 0 value disables this functionality. - */ - public LogSpooler(String spoolDirectory, String sourceFileNamePrefix, RolloverCondition rolloverCondition, - RolloverHandler rolloverHandler, long rolloverTimeThresholdSecs) { - this.spoolDirectory = spoolDirectory; - this.sourceFileNamePrefix = sourceFileNamePrefix; - this.rolloverCondition = rolloverCondition; - this.rolloverHandler = rolloverHandler; - if (rolloverTimeThresholdSecs != TIME_BASED_ROLLOVER_DISABLED_THRESHOLD) { - rolloverTimer = new Timer("log-spooler-timer-" + sourceFileNamePrefix, true); - rolloverTimer.scheduleAtFixedRate(new LogSpoolerRolloverTimerTask(), - rolloverTimeThresholdSecs*1000, rolloverTimeThresholdSecs*1000); - } - initializeSpoolState(); - } - - private void initializeSpoolDirectory() { - File spoolDir = new File(spoolDirectory); - if (!spoolDir.exists()) { - LOG.info("Creating spool directory: " + spoolDir); - boolean result = spoolDir.mkdirs(); - if (!result) { - throw new LogSpoolerException("Could not create spool directory: " + spoolDirectory); - } - } - } - - private void initializeSpoolState() { - initializeSpoolDirectory(); - currentSpoolFile = initializeSpoolFile(); - try { - currentSpoolBufferedWriter = initializeSpoolWriter(currentSpoolFile); - } catch (IOException e) { - throw new LogSpoolerException("Could not create buffered writer for spool file: " + currentSpoolFile - + ", error message: " + e.getLocalizedMessage(), e); - } - currentSpoolerContext = new LogSpoolerContext(currentSpoolFile); - LOG.info("Initialized spool file at path: " + currentSpoolFile); - } - - @VisibleForTesting - protected File initializeSpoolFile() { - return new File(spoolDirectory, getCurrentFileName()); - } - - @VisibleForTesting - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return new PrintWriter(new BufferedWriter(new FileWriter(spoolFile))); - } - - /** - * Add an event for spooling. - * - * This method adds the event to the current spool file's buffer. On completion, it - * calls the {@link RolloverCondition#shouldRollover(LogSpoolerContext)} method to determine if - * it is ready to rollover the file. - * @param logEvent The log event to spool. - */ - public synchronized void add(String logEvent) { - currentSpoolBufferedWriter.println(logEvent); - currentSpoolerContext.logEventSpooled(); - if (rolloverCondition.shouldRollover(currentSpoolerContext)) { - LOG.info("Trying to rollover based on rollover condition"); - tryRollover(); - } - } - - /** - * Trigger a rollover of the current spool file. - * - * This method manages the rollover of the spool file, and then invokes the - * {@link RolloverHandler#handleRollover(File)} to handle what should be done with the - * rolled over file. - */ - public void rollover() { - LOG.info("Rollover condition detected, rolling over file: " + currentSpoolFile); - currentSpoolBufferedWriter.flush(); - if (currentSpoolFile.length()==0) { - LOG.info("No data in file " + currentSpoolFile + ", not doing rollover"); - } else { - currentSpoolBufferedWriter.close(); - rolloverHandler.handleRollover(currentSpoolFile); - LOG.info("Invoked rollover handler with file: " + currentSpoolFile); - initializeSpoolState(); - } - boolean status = rolloverInProgress.compareAndSet(true, false); - if (!status) { - LOG.error("Should have reset rollover flag!!"); - } - } - - private synchronized void tryRollover() { - if (rolloverInProgress.compareAndSet(false, true)) { - rollover(); - } else { - LOG.warn("Ignoring rollover call as rollover already in progress for file " + - currentSpoolFile); - } - } - - private String getCurrentFileName() { - Date currentDate = new Date(); - String dateStr = DateUtil.dateToString(currentDate, fileDateFormat); - return sourceFileNamePrefix + dateStr; - } - - /** - * Cancel's any time based rollover task, if started. - */ - public void close() { - if (rolloverTimer != null) { - rolloverTimer.cancel(); - } - } - - private class LogSpoolerRolloverTimerTask extends TimerTask { - @Override - public void run() { - LOG.info("Trying rollover based on time"); - tryRollover(); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java deleted file mode 100644 index 616300f6081..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -import java.io.File; -import java.util.Date; - -/** - * A class that holds the state of an spool file. - * - * The state in this class can be used by a {@link RolloverCondition} to determine - * if an active spool file should be rolled over. - */ -public class LogSpoolerContext { - - private File activeSpoolFile; - private long numEventsSpooled; - private Date activeLogCreationTime; - - /** - * Create a new LogSpoolerContext - * @param activeSpoolFile the spool file for which to hold state - */ - public LogSpoolerContext(File activeSpoolFile) { - this.activeSpoolFile = activeSpoolFile; - this.numEventsSpooled = 0; - this.activeLogCreationTime = new Date(); - } - - /** - * Increment number of spooled events by one. - */ - public void logEventSpooled() { - numEventsSpooled++; - } - - public File getActiveSpoolFile() { - return activeSpoolFile; - } - - public long getNumEventsSpooled() { - return numEventsSpooled; - } - - public Date getActiveLogCreationTime() { - return activeLogCreationTime; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - LogSpoolerContext that = (LogSpoolerContext) o; - - if (numEventsSpooled != that.numEventsSpooled) return false; - if (!activeSpoolFile.equals(that.activeSpoolFile)) return false; - return activeLogCreationTime.equals(that.activeLogCreationTime); - - } - - @Override - public int hashCode() { - int result = activeSpoolFile.hashCode(); - result = 31 * result + (int) (numEventsSpooled ^ (numEventsSpooled >>> 32)); - result = 31 * result + activeLogCreationTime.hashCode(); - return result; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java deleted file mode 100644 index 14bb1394568..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -public class LogSpoolerException extends RuntimeException { - public LogSpoolerException(String message, Exception cause) { - super(message, cause); - } - - public LogSpoolerException(String message) { - super(message); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java deleted file mode 100644 index 48ace11cca8..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -/** - * An interface that is used to determine whether a rollover of a locally spooled log file should be triggered. - */ -public interface RolloverCondition { - - /** - * Check if the active spool file should be rolled over. - * - * If this returns true, the {@link LogSpooler} will initiate activities related - * to rollover of the file - * @param currentSpoolerContext {@link LogSpoolerContext} that holds state about the file being checked - * for rollover. - * @return true if active spool file should be rolled over, false otherwise - */ - boolean shouldRollover(LogSpoolerContext currentSpoolerContext); -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java deleted file mode 100644 index 2ec27083865..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -import java.io.File; - -/** - * An interface that is used to trigger the handling of a rolled over file. - * - * Implementations of this interface will typically upload the rolled over file to - * a target destination, like HDFS. - */ -public interface RolloverHandler { - /** - * Handle a rolled over file. - * - * This method is called inline from the {@link LogSpooler#rollover()} method. - * Hence implementations should either complete the handling fast, or do so - * asynchronously. The cleanup of the file is left to implementors, but should - * typically be done once the upload the file to the target destination is complete. - * @param rolloverFile The file that has been rolled over. - */ - void handleRollover(File rolloverFile); -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java deleted file mode 100644 index f814a92df1c..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import org.apache.log4j.Logger; - -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.BasicAWSCredentials; - -public class AWSUtil { - private static final Logger LOG = Logger.getLogger(AWSUtil.class); - - private AWSUtil() { - throw new UnsupportedOperationException(); - } - - public static AWSCredentials createAWSCredentials(String accessKey, String secretKey) { - if (accessKey != null && secretKey != null) { - LOG.debug("Creating aws client as per new accesskey and secretkey"); - AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); - return awsCredentials; - } else { - return null; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java deleted file mode 100644 index c460ab39b46..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import org.apache.commons.compress.compressors.CompressorOutputStream; -import org.apache.commons.compress.compressors.CompressorStreamFactory; -import org.apache.commons.compress.utils.IOUtils; -import org.apache.log4j.Logger; - -public class CompressionUtil { - - private static final Logger LOG = Logger.getLogger(CompressionUtil.class); - - public static File compressFile(File inputFile, File outputFile, String algoName) { - CompressorOutputStream cos = null; - FileInputStream ios = null; - try { - if (!inputFile.exists()) { - throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is not exist."); - } - if (inputFile.isDirectory()) { - throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is a directory."); - } - File parent = outputFile.getParentFile(); - if (parent != null && !parent.exists()) { - boolean isParentCreated = parent.mkdirs(); - if (!isParentCreated) { - throw new IllegalAccessException( "User does not have permission to create parent directory :" + parent.getAbsolutePath()); - } - } - OutputStream out = new FileOutputStream(outputFile); - cos = new CompressorStreamFactory().createCompressorOutputStream(algoName, out); - ios = new FileInputStream(inputFile); - IOUtils.copy(ios, cos); - } catch (Exception e) { - LOG.error(e); - } finally { - if (cos != null) { - try { - cos.close(); - } catch (IOException e) { - LOG.error(e); - } - } - if (ios != null) { - try { - ios.close(); - } catch (IOException e) { - LOG.error(e); - } - } - } - return outputFile; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java deleted file mode 100644 index 6321e1728be..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.TimeZone; - -import org.apache.log4j.Logger; - -public class DateUtil { - private static final Logger LOG = Logger.getLogger(DateUtil.class); - - private DateUtil() { - throw new UnsupportedOperationException(); - } - - public static String dateToString(Date date, String dateFormat) { - if (date == null || dateFormat == null || dateFormat.isEmpty()) { - return ""; - } - try { - SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); - return formatter.format(date).toString(); - } catch (Exception e) { - LOG.error("Error in coverting dateToString format :" + dateFormat, e); - } - return ""; - } - - private final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - private static ThreadLocal dateFormatter = new ThreadLocal() { - @Override - protected SimpleDateFormat initialValue() { - SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - return sdf; - } - }; - - public static String getDate(String timeStampStr) { - try { - return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr))); - } catch (Exception ex) { - LOG.error(ex); - return null; - } - } - - public static String getActualDateStr() { - try { - return dateFormatter.get().format(new Date()); - } catch (Exception ex) { - LOG.error(ex); - return null; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java deleted file mode 100644 index 3270d29625f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logfeeder.util; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.io.FileUtils; -import org.apache.tools.ant.DirectoryScanner; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileUtil { - private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); - private static final String FOLDER_SEPARATOR = "/"; - - private FileUtil() { - throw new UnsupportedOperationException(); - } - - public static List getAllFileFromDir(File directory, String extension, boolean checkInSubDir) { - if (!directory.exists()) { - LOG.error(directory.getAbsolutePath() + " is not exists "); - } else if (!directory.isDirectory()) { - LOG.error(directory.getAbsolutePath() + " is not Directory "); - } else { - return (List) FileUtils.listFiles(directory, new String[]{extension}, checkInSubDir); - } - return new ArrayList(); - } - - - public static Object getFileKey(File file) { - try { - Path fileFullPath = Paths.get(file.getAbsolutePath()); - if (fileFullPath != null) { - BasicFileAttributes basicAttr = Files.readAttributes(fileFullPath, BasicFileAttributes.class); - return basicAttr.fileKey(); - } - } catch (Throwable ex) { - LOG.error("Error getting file attributes for file=" + file, ex); - } - return file.toString(); - } - - public static File getFileFromClasspath(String filename) { - URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename); - LOG.debug("File Complete URI :" + fileCompleteUrl); - File file = null; - try { - file = new File(fileCompleteUrl.toURI()); - } catch (Exception exception) { - LOG.debug(exception.getMessage(), exception.getCause()); - } - return file; - } - - public static HashMap readJsonFromFile(File jsonFile) { - ObjectMapper mapper = new ObjectMapper(); - try { - HashMap jsonmap = mapper.readValue(jsonFile, new TypeReference>() {}); - return jsonmap; - } catch (IOException e) { - LOG.error("{}", e); - } - return new HashMap(); - } - - public static File[] getInputFilesByPattern(String searchPath) { - File searchFile = new File(searchPath); - if (searchFile.isFile()) { - return new File[]{searchFile}; - } else { - if (searchPath.contains("*")) { - try { - String folderBeforeRegex = getLogDirNameBeforeWildCard(searchPath); - String fileNameAfterLastFolder = searchPath.substring(folderBeforeRegex.length()); - - DirectoryScanner scanner = new DirectoryScanner(); - scanner.setIncludes(new String[]{fileNameAfterLastFolder}); - scanner.setBasedir(folderBeforeRegex); - scanner.setCaseSensitive(true); - scanner.scan(); - String[] fileNames = scanner.getIncludedFiles(); - - if (fileNames != null && fileNames.length > 0) { - File[] files = new File[fileNames.length]; - for (int i = 0; i < fileNames.length; i++) { - files[i] = new File(folderBeforeRegex + fileNames[i]); - } - return files; - } - } catch (Exception e) { - LOG.info("Input file was not found by pattern (exception thrown); {}, message: {}", searchPath, e.getMessage()); - } - - } else { - LOG.info("Input file config was not found by pattern; {}", searchPath); - } - return new File[]{}; - } - } - - public static Map> getFoldersForFiles(File[] inputFiles) { - Map> foldersMap = new HashMap<>(); - if (inputFiles != null && inputFiles.length > 0) { - for (File inputFile : inputFiles) { - File folder = inputFile.getParentFile(); - if (folder.exists()) { - if (foldersMap.containsKey(folder.getAbsolutePath())) { - foldersMap.get(folder.getAbsolutePath()).add(inputFile); - } else { - List fileList = new ArrayList<>(); - fileList.add(inputFile); - foldersMap.put(folder.getAbsolutePath(), fileList); - } - } - } - } - if (!foldersMap.isEmpty()) { - for (Map.Entry> entry : foldersMap.entrySet()) { - Collections.sort(entry.getValue(), Collections.reverseOrder()); - } - } - return foldersMap; - } - - private static String getLogDirNameBeforeWildCard(String pattern) { - String[] splitByFirstRegex = pattern.split("\\*"); - String beforeRegex = splitByFirstRegex[0]; - if (beforeRegex.contains(FOLDER_SEPARATOR)) { - int endIndex = beforeRegex.lastIndexOf(FOLDER_SEPARATOR); - String parentFolder = beforeRegex; - if (endIndex != -1) { - parentFolder = beforeRegex.substring(0, endIndex) + FOLDER_SEPARATOR; - } - return parentFolder; - } else { - return beforeRegex; - } - } - - public static void move(File source, File target) throws IOException { - Path sourcePath = Paths.get(source.getAbsolutePath()); - Path targetPath = Paths.get(target.getAbsolutePath()); - Files.move(sourcePath, targetPath, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); - } - - public static boolean isFileTooOld(File file, long diffMin) { - return (System.currentTimeMillis() - file.lastModified()) > diffMin * 1000 * 60; - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederHDFSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederHDFSUtil.java deleted file mode 100644 index 4248ae184cf..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederHDFSUtil.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.Logger; - -public class LogFeederHDFSUtil { - private static final Logger LOG = Logger.getLogger(LogFeederHDFSUtil.class); - - private LogFeederHDFSUtil() { - throw new UnsupportedOperationException(); - } - - public static boolean copyFromLocal(String sourceFilepath, String destFilePath, FileSystem fileSystem, boolean overwrite, - boolean delSrc) { - Path src = new Path(sourceFilepath); - Path dst = new Path(destFilePath); - boolean isCopied = false; - try { - LOG.info("copying localfile := " + sourceFilepath + " to hdfsPath := " + destFilePath); - fileSystem.copyFromLocalFile(delSrc, overwrite, src, dst); - isCopied = true; - } catch (Exception e) { - LOG.error("Error copying local file :" + sourceFilepath + " to hdfs location : " + destFilePath, e); - } - return isCopied; - } - - public static FileSystem buildFileSystem(String hdfsHost, String hdfsPort) { - try { - Configuration configuration = buildHdfsConfiguration(hdfsHost, hdfsPort); - FileSystem fs = FileSystem.get(configuration); - return fs; - } catch (Exception e) { - LOG.error("Exception is buildFileSystem :", e); - } - return null; - } - - private static Configuration buildHdfsConfiguration(String hdfsHost, String hdfsPort) { - String url = "hdfs://" + hdfsHost + ":" + hdfsPort + "/"; - Configuration configuration = new Configuration(); - configuration.set("fs.default.name", url); - return configuration; - } - - public static void closeFileSystem(FileSystem fileSystem) { - if (fileSystem != null) { - try { - fileSystem.close(); - } catch (IOException e) { - LOG.error(e.getLocalizedMessage(), e.getCause()); - } - } - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java deleted file mode 100644 index 9b0b0e889df..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.reflect.TypeToken; -import org.apache.ambari.logfeeder.input.InputFile; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; - -import java.lang.reflect.Type; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.HashMap; -import java.util.Hashtable; -import java.util.Map; - -public class LogFeederUtil { - private static final Logger LOG = Logger.getLogger(LogFeederUtil.class); - - private final static String GSON_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - private static Gson gson = new GsonBuilder().setDateFormat(GSON_DATE_FORMAT).create(); - - public static Gson getGson() { - return gson; - } - - public static String hostName = null; - public static String ipAddress = null; - - static{ - try { - InetAddress ip = InetAddress.getLocalHost(); - ipAddress = ip.getHostAddress(); - String getHostName = ip.getHostName(); - String getCanonicalHostName = ip.getCanonicalHostName(); - if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) { - LOG.info("Using getCanonicalHostName()=" + getCanonicalHostName); - hostName = getCanonicalHostName; - } else { - LOG.info("Using getHostName()=" + getHostName); - hostName = getHostName; - } - LOG.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName + ", getCanonicalHostName=" + getCanonicalHostName + - ", hostName=" + hostName); - } catch (UnknownHostException e) { - LOG.error("Error getting hostname.", e); - } - } - - public static void logStatForMetric(MetricData metric, String prefixStr, String postFix) { - long currStat = metric.value; - long currMS = System.currentTimeMillis(); - if (currStat > metric.prevLogValue) { - LOG.info(prefixStr + ": total_count=" + metric.value + ", duration=" + (currMS - metric.prevLogTime) / 1000 + - " secs, count=" + (currStat - metric.prevLogValue) + postFix); - } - metric.prevLogValue = currStat; - metric.prevLogTime = currMS; - } - - public static Map cloneObject(Map map) { - if (map == null) { - return null; - } - String jsonStr = gson.toJson(map); - Type type = new TypeToken>() {}.getType(); - return gson.fromJson(jsonStr, type); - } - - public static Map toJSONObject(String jsonStr) { - if (StringUtils.isBlank(jsonStr)) { - return new HashMap(); - } - Type type = new TypeToken>() {}.getType(); - return gson.fromJson(jsonStr, type); - } - - public static int objectToInt(Object objValue, int retValue, String errMessage) { - if (objValue == null) { - return retValue; - } - String strValue = objValue.toString(); - if (StringUtils.isNotEmpty(strValue)) { - try { - retValue = Integer.parseInt(strValue); - } catch (Throwable t) { - LOG.error("Error parsing integer value. str=" + strValue + ", " + errMessage); - } - } - return retValue; - } - - private static class LogHistory { - private long lastLogTime = 0; - private int counter = 0; - } - - private static Map logHistoryList = new Hashtable<>(); - - public static boolean logErrorMessageByInterval(String key, String message, Throwable e, Logger callerLogger, Level level) { - LogFeederUtil.LogHistory log = logHistoryList.get(key); - if (log == null) { - log = new LogFeederUtil.LogHistory(); - logHistoryList.put(key, log); - } - - if ((System.currentTimeMillis() - log.lastLogTime) > 30 * 1000) { - log.lastLogTime = System.currentTimeMillis(); - if (log.counter > 0) { - message += ". Messages suppressed before: " + log.counter; - } - log.counter = 0; - callerLogger.log(level, message, e); - - return true; - } else { - log.counter++; - return false; - } - } - - public static void fillMapWithFieldDefaults(Map jsonObj, InputMarker inputMarker, boolean force) { - if (inputMarker != null && inputMarker.getInput() != null && (force || inputMarker.getInput().isInitDefaultFields())) { - if (jsonObj.get("type") == null) { - jsonObj.put("type", inputMarker.getInput().getInputDescriptor().getType()); - } - if (inputMarker.getInput() instanceof InputFile) { - if (jsonObj.get("path") == null && ((InputFile)inputMarker.getInput()).getFilePath() != null) { - jsonObj.put("path", ((InputFile)inputMarker.getInput()).getFilePath()); - } - } - if (jsonObj.get("path") == null && inputMarker.getInput().getInputDescriptor().getPath() != null) { - jsonObj.put("path", inputMarker.getInput().getInputDescriptor().getPath()); - } - if (jsonObj.get("host") == null && hostName != null) { - jsonObj.put("host", hostName); - } - if (jsonObj.get("ip") == null && ipAddress != null) { - jsonObj.put("ip", ipAddress); - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java deleted file mode 100644 index 13f286516cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.logfeeder.util; - -import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.commons.lang3.StringUtils; - -public class PlaceholderUtil { - private PlaceholderUtil() { - throw new UnsupportedOperationException(); - } - - private static final Pattern placeHolderPattern = Pattern.compile("\\$\\s*(\\w+)"); - - public static String replaceVariables(String inputStr, HashMap contextParam) { - Matcher m = placeHolderPattern.matcher(inputStr); - String output = new String(inputStr); - while (m.find()) { - String placeholder = m.group(); - if (placeholder != null && !placeholder.isEmpty()) { - String key = placeholder.replace("$","").toLowerCase();// remove brace - String replacement = getFromContext(contextParam, placeholder, key); - output = output.replace(placeholder, replacement); - } - } - return output; - } - - private static String getFromContext(HashMap contextParam, String defaultValue, String key) { - String returnValue = defaultValue; // by default set default value as a return - if (contextParam != null) { - String value = contextParam.get(key); - if (StringUtils.isNotBlank(value)) { - returnValue = value; - } - } - return returnValue; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java deleted file mode 100644 index 31a38d0abcf..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.zip.GZIPInputStream; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.commons.io.IOUtils; -import org.apache.log4j.Logger; - -import com.amazonaws.AmazonClientException; -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; -import com.amazonaws.services.s3.model.GetObjectRequest; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.transfer.TransferManager; - -/** - * Utility to connect to s3 - */ -public class S3Util { - private static final Logger LOG = Logger.getLogger(S3Util.class); - - private S3Util() { - throw new UnsupportedOperationException(); - } - - public static AmazonS3 getS3Client(String accessKey, String secretKey) { - AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey); - AmazonS3 s3client; - if (awsCredentials != null) { - s3client = new AmazonS3Client(awsCredentials); - } else { - s3client = new AmazonS3Client(); - } - return s3client; - } - - public static TransferManager getTransferManager(String accessKey, String secretKey) { - AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey); - TransferManager transferManager; - if (awsCredentials != null) { - transferManager = new TransferManager(awsCredentials); - } else { - transferManager = new TransferManager(); - } - return transferManager; - } - - public static void shutdownTransferManager(TransferManager transferManager) { - if (transferManager != null) { - transferManager.shutdownNow(); - } - } - - public static String getBucketName(String s3Path) { - String bucketName = null; - // s3path - if (s3Path != null) { - String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR); - bucketName = s3PathParts[0]; - } - return bucketName; - } - - public static String getS3Key(String s3Path) { - StringBuilder s3Key = new StringBuilder(); - if (s3Path != null) { - String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR); - ArrayList s3PathList = new ArrayList(Arrays.asList(s3PathParts)); - s3PathList.remove(0);// remove bucketName - for (int index = 0; index < s3PathList.size(); index++) { - if (index > 0) { - s3Key.append(LogFeederConstants.S3_PATH_SEPARATOR); - } - s3Key.append(s3PathList.get(index)); - } - } - return s3Key.toString(); - } - - /** - * Get the buffer reader to read s3 file as a stream - */ - public static BufferedReader getReader(String s3Path, String accessKey, String secretKey) throws IOException { - // TODO error handling - // Compression support - // read header and decide the compression(auto detection) - // For now hard-code GZIP compression - String s3Bucket = getBucketName(s3Path); - String s3Key = getS3Key(s3Path); - S3Object fileObj = getS3Client(accessKey, secretKey).getObject(new GetObjectRequest(s3Bucket, s3Key)); - try { - GZIPInputStream objectInputStream = new GZIPInputStream(fileObj.getObjectContent()); - BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(objectInputStream)); - return bufferedReader; - } catch (IOException e) { - LOG.error("Error in creating stream reader for s3 file :" + s3Path, e.getCause()); - throw e; - } - } - - public static void writeIntoS3File(String data, String bucketName, String s3Key, String accessKey, String secretKey) { - InputStream in = null; - try { - in = IOUtils.toInputStream(data, "UTF-8"); - } catch (IOException e) { - LOG.error(e); - } - - if (in != null) { - TransferManager transferManager = getTransferManager(accessKey, secretKey); - try { - if (transferManager != null) { - transferManager.upload(new PutObjectRequest(bucketName, s3Key, in, new ObjectMetadata())).waitForUploadResult(); - LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :" + bucketName); - } - } catch (AmazonClientException | InterruptedException e) { - LOG.error(e); - } finally { - try { - shutdownTransferManager(transferManager); - in.close(); - } catch (IOException e) { - // ignore - } - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json deleted file mode 100644 index 229a9b609b8..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "input": { - "file": { - "klass": "org.apache.ambari.logfeeder.input.InputFile" - }, - "s3_file": { - "klass": "org.apache.ambari.logfeeder.input.InputS3File" - }, - "simulate": { - "klass": "org.apache.ambari.logfeeder.input.InputSimulate" - }, - "socket": { - "klass": "org.apache.ambari.logfeeder.input.InputSocket" - } - }, - "filter": { - "json": { - "klass": "org.apache.ambari.logfeeder.filter.FilterJSON" - }, - "keyvalue": { - "klass": "org.apache.ambari.logfeeder.filter.FilterKeyValue" - }, - "grok": { - "klass": "org.apache.ambari.logfeeder.filter.FilterGrok" - } - }, - "mapper": { - "map_date": { - "klass": "org.apache.ambari.logfeeder.mapper.MapperDate" - }, - "map_field_copy": { - "klass": "org.apache.ambari.logfeeder.mapper.MapperFieldCopy" - }, - "map_field_name": { - "klass": "org.apache.ambari.logfeeder.mapper.MapperFieldName" - }, - "map_field_value": { - "klass": "org.apache.ambari.logfeeder.mapper.MapperFieldValue" - }, - "map_anonymize": { - "klass": "org.apache.ambari.logfeeder.mapper.MapperAnonymize" - } - }, - "output": { - "solr": { - "klass": "org.apache.ambari.logfeeder.output.OutputSolr" - }, - "file": { - "klass": "org.apache.ambari.logfeeder.output.OutputFile" - }, - "kafka": { - "klass": "org.apache.ambari.logfeeder.output.OutputKafka" - }, - "dev_null": { - "klass": "org.apache.ambari.logfeeder.output.OutputDevNull" - }, - "s3_file": { - "klass": "org.apache.ambari.logfeeder.output.OutputS3File" - }, - "hdfs": { - "klass": "org.apache.ambari.logfeeder.output.OutputHDFSFile" - } - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/filters.config.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/filters.config.json deleted file mode 100644 index d9006da0332..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/filters.config.json +++ /dev/null @@ -1,626 +0,0 @@ -{ - "filter":[ - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "accumulo_master" - ] - - } - - }, - "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "comment":"This one has one extra space after LEVEL", - "conditions":{ - "fields":{ - "type":[ - "accumulo_gc", - "accumulo_monitor", - "accumulo_tracer", - "accumulo_tserver" - ] - - } - - }, - "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "atlas_app", - "falcon_app" - ] - - } - - }, - "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ams_collector" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %p %c: %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ams_hbase_master", - "ams_hbase_regionserver", - "hbase_master", - "hbase_regionserver" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ambari_agent" - ] - - } - - }, - "log4j_format":"", - "multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - }, - "level":{ - "map_field_value":{ - "pre_value":"WARNING", - "post_value":"WARN" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ambari_server" - ] - - } - - }, - "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n", - "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})", - "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"dd MMM yyyy HH:mm:ss" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hdfs_datanode", - "hdfs_journalnode", - "hdfs_secondarynamenode", - "hdfs_namenode", - "hdfs_zkfc", - "knox_gateway", - "knox_cli", - "knox_ldap", - "mapred_historyserver", - "yarn_historyserver", - "yarn_jobsummary", - "yarn_nodemanager", - "yarn_resourcemanager", - "yarn_timelineserver" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hive_hiveserver2", - "hive_metastore" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "kafka_controller", - "kafka_request", - "kafka_logcleaner" - ] - - } - - }, - "log4j_format":"[%d] %p %m (%c)%n", - "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])", - "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed", - "conditions":{ - "fields":{ - "type":[ - "kafka_server", - "kafka_statechange" - ] - - } - - }, - "log4j_format":"[%d] %p %m (%c)%n", - "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])", - "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "oozie_app" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "logsearch_app", - "logsearch_feeder", - "logsearch_perf", - "ranger_admin", - "ranger_dbpatch" - ] - - } - - }, - "log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ranger_kms" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "ranger_usersync" - ] - - } - - }, - "log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n", - "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})", - "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"dd MMM yyyy HH:mm:ss" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "storm_drpc", - "storm_logviewer", - "storm_nimbus", - "storm_supervisor", - "storm_ui", - "storm_worker" - ] - - } - - }, - "log4j_format":"", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss.SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "zookeeper" - ] - - } - - }, - "log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "evtTime":{ - "map_date":{ - "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"keyvalue", - "sort_order":1, - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "source_field":"log_message", - "value_split":"=", - "field_split":"\t", - "post_map_values":{ - "src":{ - "map_field_name":{ - "new_field_name":"resource" - } - - }, - "ip":{ - "map_field_name":{ - "new_field_name":"cliIP" - } - - }, - "allowed":[ - { - "map_field_value":{ - "pre_value":"true", - "post_value":"1" - } - - }, - { - "map_field_value":{ - "pre_value":"false", - "post_value":"0" - } - - }, - { - "map_field_name":{ - "new_field_name":"result" - } - - } - - ], - "cmd":{ - "map_field_name":{ - "new_field_name":"action" - } - - }, - "proto":{ - "map_field_name":{ - "new_field_name":"cliType" - } - - }, - "callerContext":{ - "map_field_name":{ - "new_field_name":"req_caller_id" - } - - } - - } - - }, - { - "filter":"grok", - "sort_order":2, - "source_field":"ugi", - "remove_source_field":"false", - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}", - "post_map_values":{ - "user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "x_user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "p_user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "k_user":{ - "map_field_name":{ - "new_field_name":"proxyUsers" - } - - }, - "p_authType":{ - "map_field_name":{ - "new_field_name":"authType" - } - - }, - "k_authType":{ - "map_field_name":{ - "new_field_name":"proxyAuthType" - } - - } - - } - - } - - ] -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/grok-patterns b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/grok-patterns deleted file mode 100644 index 0f1d9775bfa..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/grok-patterns +++ /dev/null @@ -1,149 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#Updated JAVACLASS to be same as JAVAFILE. Because if class doesn't have package, then it doesn't work. -JAVACLASS (?:[A-Za-z$0-9_. -]+) -#JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* -#JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+ - -#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' -JAVAFILE (?:[A-Za-z0-9_. -]+) -#Allow special or method -JAVAMETHOD (?:()|()|[a-zA-Z$_][a-zA-Z$_0-9]*) -#Line number is optional in special cases 'Native method' or 'Unknown source' -JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\) -# Java Logs -JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) - -JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\) -JAVALOGMESSAGE (.*) -# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM -CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM) -# yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800 -TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE} -CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage} -# 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened... -TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage} - -USERNAME [a-zA-Z0-9._-]+ -USER %{USERNAME} -EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+ -EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} -HTTPDUSER %{EMAILADDRESS}|%{USER} -INT (?:[+-]?(?:[0-9]+)) -BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) -NUMBER (?:%{BASE10NUM}) -BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) -UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} - -# Networking -MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) -CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) -WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) -COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) -IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? -IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ -URIPROTO [A-Za-z]+(\+[A-Za-z+]+)? -URIHOST %{IPORHOST}(?::%{POSINT:port})? -# uripath comes loosely from RFC1738, but mostly from what Firefox -# doesn't turn into %XX -URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+ -#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)? -URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* -URIPATHPARAM %{URIPATH}(?:%{URIPARAM})? -URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})? - -# Months: January, Feb, 3, 03, 12, December -MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May?|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b -MONTHNUM (?:0?[1-9]|1[0-2]) -MONTHNUM2 (?:0[1-9]|1[0-2]) -MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) - -# Days: Monday, Tue, Thu, etc... -DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) - -# Years? -YEAR (?>\d\d){1,2} -HOUR (?:2[0123]|[01]?[0-9]) -MINUTE (?:[0-5][0-9]) -# '60' is a leap second in most time standards and thus is valid. -SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) -TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) -# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) -DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} -DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} -DATE_EU2 %{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY} -ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) -ISO8601_SECOND (?:%{SECOND}|60) -TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? -DATE %{DATE_US}|%{DATE_EU}|%{DATE_EU2} -DATESTAMP %{DATE}[- ]%{TIME} -TZ (?:[PMCE][SD]T|UTC) -DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} -DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} -DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} -DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} -HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} - -# Syslog Dates: Month Day HH:MM:SS -SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} -PROG [\x21-\x5a\x5c\x5e-\x7e]+ -SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])? -SYSLOGHOST %{IPORHOST} -SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}> -HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} - -# Shortcuts -QS %{QUOTEDSTRING} - -# Log formats -SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: -COMMONAPACHELOG %{IPORHOST:clientip} %{HTTPDUSER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-) -COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent} -HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:errormsg} -HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{WORD:module}:%{LOGLEVEL:loglevel}\] \[pid %{POSINT:pid}:tid %{NUMBER:tid}\]( \(%{POSINT:proxy_errorcode}\)%{DATA:proxy_errormessage}:)?( \[client %{IPORHOST:client}:%{POSINT:clientport}\])? %{DATA:errorcode}: %{GREEDYDATA:message} -HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} - - -# Log Levels -LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo|INFO|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) - - -# Custom -USER_SYNC_DATE %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} -SPARK_DATESTAMP %{YEAR}[/-]%{MONTHNUM2}[/-]%{MONTHDAY} %{TIME} -CUSTOM_DATESTAMP %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} -CUSTOM_SEPARATOR %{SPACE}\|%{SPACE} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/.gitignore b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/.gitignore deleted file mode 100644 index dfb10d62d2b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -logs/*.log -shipper-conf/input.config-*.json -!shipper-conf/input.config-sample.json - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/logs/service_sample.txt b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/logs/service_sample.txt deleted file mode 100644 index 21048ac0f81..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/logs/service_sample.txt +++ /dev/null @@ -1,3 +0,0 @@ -2016-07-13 10:45:49,640 [WARN] Sample log line 1 - warn level -2016-07-13 10:45:49,640 [ERROR] Sample log line 2 - error level -2016-07-13 10:45:50,351 [INFO] Sample log line 3 - info level \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/global.config.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/global.config.json deleted file mode 100644 index 6b8602c6e44..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/global.config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "global":{ - "add_fields":{ - "cluster":"cl1" - }, - "source":"file", - "tail":"true", - "gen_event_md5":"true" - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/input.config-sample.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/input.config-sample.json deleted file mode 100644 index 690bb29bb99..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/input.config-sample.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "input": [ - { - "type": "service_sample", - "rowtype": "service", - "path": "target/classes/log-samples/logs/service_sample.txt", - "group": "Ambari" - }, - { - "type": "service_socket", - "rowtype": "service", - "port": 61999, - "protocol" : "tcp", - "source" : "socket", - "log4j": "true" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "service_sample" - ] - } - }, - "log4j_format": "", - "multiline_pattern": "^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern": "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - }, - { - "filter": "json", - "conditions": { - "fields": { - "type": [ - "service_socket" - ] - } - } - } - ] -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/output.config-sample.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/output.config-sample.json deleted file mode 100644 index 4f1ad6d7d9d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log-samples/shipper-conf/output.config-sample.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "output": [ - { - "comment": "Output to solr for service logs", - "is_enabled": "true", - "collection": "hadoop_logs", - "destination": "solr", - "zk_connect_string": "localhost:2181", - "type": "service", - "skip_logtime": "true", - "conditions": { - "fields": { - "rowtype": [ - "service" - ] - } - } - }, - { - "comment": "Output to solr for audit records", - "is_enabled": "true", - "collection": "audit_logs", - "destination": "solr", - "zk_connect_string": "localhost:2181", - "type": "audit", - "skip_logtime": "true", - "conditions": { - "fields": { - "rowtype": [ - "audit" - ] - } - } - } - ] -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml deleted file mode 100644 index d01160c7b8f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties deleted file mode 100644 index 0fb1058505d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cluster.name=cl1 -logsearch.config.zk_connect_string=localhost:2181 - -logfeeder.metrics.collector.hosts= -logfeeder.checkpoint.folder=${LOGFEEDER_RELATIVE_LOCATION:}target/checkpoints -logfeeder.config.dir=${LOGFEEDER_RELATIVE_LOCATION:}target/classes/log-samples/shipper-conf/ -logfeeder.config.files=${LOGFEEDER_RELATIVE_LOCATION:}target/classes/log-samples/shipper-conf/global.config.json,\ - ${LOGFEEDER_RELATIVE_LOCATION:}target/classes/log-samples/shipper-conf/output.config-sample.json - -logfeeder.log.filter.enable=true -logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN - -logfeeder.solr.zk_connect_string=localhost:2181 - -logfeeder.cache.enabled=true -logfeeder.cache.size=100 -logfeeder.cache.key.field=log_message -logfeeder.cache.dedup.interval=1000 -logfeeder.cache.last.dedup.enabled=true - -#logfeeder tmp dir -logfeeder.tmp.dir=${LOGFEEDER_RELATIVE_LOCATION:}target/tmp - -#logfeeder.configs.local.enabled=true -#logfeeder.configs.filter.solr.enabled=true diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder-env.sh b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder-env.sh deleted file mode 100755 index 37e99306217..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder-env.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Log Feeder extra options -export LOGFEEDER_JAVA_OPTS=${LOGFEEDER_JAVA_OPTS:-""} - -# Log Feeder debug options -# export LOGFEEDER_DEBUG=true -# exoprt LOGFEEDER_DEBUG_SUSPEND=n -export LOGFEEDER_DEBUG_PORT=5006 - -# Log Feeder memory -# export LOGFEEDER_JAVA_MEM="-Xmx512m" - -# export LOG_PATH=/var/log/ambari-logsearch-logfeeder/ -# export LOG_FILE=logfeeder.log - -# Pid file of the application -# export LOGFEEDER_PID_DIR=/var/run/ambari-logsearch-logfeeder -# export LOGFEEDER_PID_FILE=logfeeder.pid - -# SSL settings" -# export LOGFEEDER_SSL="true" -# export LOGFEEDER_KEYSTORE_LOCATION="/my/path/keystore.jks" -# export LOGFEEDER_KEYSTORE_TYPE="jks" -# export LOGFEEDER_TRUSTSTORE_LOCATION="/my/path/trutstore.jks" -# export LOGFEEDER_TRUSTSTORE_TYPE="jks" \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder.sh b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder.sh deleted file mode 100755 index 7f1d8ec59da..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/logfeeder.sh +++ /dev/null @@ -1,313 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "$LOGFEEDER_JAVA_MEM" = "" ]; then - LOGFEEDER_JAVA_MEM="-Xmx512m" -fi - -readlinkf(){ - # get real path on mac OSX - perl -MCwd -e 'print Cwd::abs_path shift' "$1"; -} - -if [ "$(uname -s)" = 'Linux' ]; then - SCRIPT_DIR="`dirname "$(readlink -f "$0")"`" -else - SCRIPT_DIR="`dirname "$(readlinkf "$0")"`" -fi - -LOGFEEDER_ROOT_DIR="`dirname \"$SCRIPT_DIR\"`" -LOGFEEDER_LIBS_DIR="$LOGFEEDER_ROOT_DIR/libs" - -if [ "$LOGFEEDER_CONF_DIR" = "" ]; then - LOGFEEDER_CONF_DIR="/usr/lib/ambari-logsearch-logfeeder/conf" - if [ ! -d $LOGFEEDER_CONF_DIR ]; then - if [ -d "$LOGFEEDER_ROOT_DIR/conf" ]; then - LOGFEEDER_CONF_DIR="$LOGFEEDER_ROOT_DIR/conf" - fi - fi -fi - -if [ -f "$LOGFEEDER_CONF_DIR/logfeeder-env.sh" ]; then - source $LOGFEEDER_CONF_DIR/logfeeder-env.sh -fi - -JVM="java" - -if [ -x $JAVA_HOME/bin/java ]; then - JVM=$JAVA_HOME/bin/java -fi - -if [ ! -z "$LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE" ]; then - source $LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE -fi - -if [ -z "$LOGFEEDER_PID_FILE" ]; then - LOGFEEDER_DEFAULT_PID_DIR="/var/run/ambari-logsearch-logfeeder" - if [ -d "$LOGFEEDER_DEFAULT_PID_DIR" ]; then - LOGFEEDER_PID_DIR=$LOGFEEDER_DEFAULT_PID_DIR - else - LOGFEEDER_PID_DIR=$HOME - fi - export LOGFEEDER_PID_FILE=$LOGFEEDER_PID_DIR/logfeeder.pid -fi - -if [ -z "$LOG_FILE" ]; then - export LOG_FILE="logfeeder.log" -fi - -LOGFEEDER_GC_LOGFILE="logfeeder_gc.log" - -if [ -z "$LOG_PATH" ]; then - LOG_FILE="$HOME/$LOG_FILE" - LOGFEEDER_GC_LOGFILE="$HOME/$LOGFEEDER_GC_LOGFILE" -else - LOG_PATH_WITHOUT_SLASH=${LOG_PATH%/} - LOG_FILE="$LOG_PATH_WITHOUT_SLASH/$LOG_FILE" - LOGFEEDER_GC_LOGFILE="$LOG_PATH_WITHOUT_SLASH/$LOGFEEDER_GC_LOGFILE" -fi - -LOGFEEDER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$LOGFEEDER_GC_LOGFILE" - -function print_usage() { - cat << EOF - - Usage: [] [] - - commands: - start Start Log Feeder - stop Stop Log Feeder - status Check Log Feeder status (pid file) - checkpoints Checkpoint operations - test Test Log Feeder shipper configs - help Print usage - - - start command arguments: - -d, --debug Start java process in debug mode - -f, --foreground Start java process in foreground - - test command arguments: - -h, --help Print usage - -tle, --test-log-entry Log entry to test if it's parseable (required) - -tsc, --test-shipper-config Shipper configuration file for testing if log entry is parseable (required) - -tgc, --test-global-config Global configuration files (comma separated list) for testing if log entry is parseable - -tli, --test-log-id The id of the log to test - - checkpoints command arguments: - -l, --list Print checkpoints - -cf, --checkpoints-folder Checkpoints folder location - -c, --clean Remove a checkpoint file (by key/log type or use on all) - -k, --file-key Filter on file key (for list and clean) - -lt, --log-type Filter on log type (for list and clean) - -a, --all Flag all checkpoints to be deleted by clean command - -EOF -} - -function spinner() { - local pid=$1 - local delay=0.5 - local spinstr='|/-\' - while [ "$(ps aux | awk '{print $2}' | grep -w $pid)" ]; do - local temp=${spinstr#?} - printf " [%c] " "$spinstr" - local spinstr=$temp${spinstr%"$temp"} - sleep $delay - printf "\b\b\b\b\b\b" - done - printf " \b\b\b\b" -} - -function status() { - echo "Checking Log Feeder status ..." >&2 - if [ -f "$LOGFEEDER_PID_FILE" ]; then - LOGFEEDER_PID=`cat "$LOGFEEDER_PID_FILE"` - else - echo "Log Feeder pid not exists. (probably the process is not running)" >&2 - return 1 - fi - - if ps -p $LOGFEEDER_PID > /dev/null - then - echo "Log Feeder process is running. (pid: $LOGFEEDER_PID)" >&2 - return 0 - else - echo "Log Feeder process is not running." >&2 - return 1 - fi -} - -function start() { - exit_status=$(status; echo $?) - if [ "$exit_status" = "0" ]; then - echo "Skipping start process." - exit 0 - fi - - FG="false" - LOGFEEDER_DEBUG_SUSPEND=${LOGFEEDER_DEBUG_SUSPEND:-n} - LOGFEEDER_DEBUG_PORT=${LOGFEEDER_DEBUG_PORT:-"5006"} - - if [ "$LOGFEEDER_DEBUG" = "true" ]; then - LOGFEEDER_JAVA_OPTS="$LOGFEEDER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGFEEDER_DEBUG_PORT,server=y,suspend=$LOGFEEDER_DEBUG_SUSPEND " - fi - - if [ "$LOGFEEDER_SSL" = "true" ]; then - LOGFEEDER_JAVA_OPTS="$LOGFEEDER_JAVA_OPTS -Djavax.net.ssl.keyStore=$LOGFEEDER_KEYSTORE_LOCATION -Djavax.net.ssl.keyStoreType=$LOGFEEDER_KEYSTORE_TYPE -Djavax.net.ssl.trustStore=$LOGFEEDER_TRUSTSTORE_LOCATION -Djavax.net.ssl.trustStoreType=$LOGFEEDER_TRUSTSTORE_TYPE" - fi - - if [ "$LOGFEEDER_JMX" = "true" ]; then - LOGFEEDER_JAVA_OPTS="$LOGFEEDER_JAVA_OPTS -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=2098" - fi - - if [ $# -gt 0 ]; then - while true; do - case "$1" in - -f|--foreground) - FG="true" - shift - ;; - -d|--debug) - if [ "$LOGFEEDER_DEBUG" != "true" ]; then - LOGFEEDER_JAVA_OPTS="$LOGFEEDER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGFEEDER_DEBUG_PORT,server=y,suspend=$LOGFEEDER_DEBUG_SUSPEND " - fi - shift - ;; - *) - if [ "${1:0:2}" == "-D" ]; then - # pass thru any opts that begin with -D (java system props) - LOGFEEDER_JAVA_OPTS+=("$1") - echo "$LOGFEEDER_JAVA_OPTS" - shift - else - if [ "$1" != "" ]; then - print_usage - exit 1 - else - break - fi - fi - ;; - esac - done - fi - - if [ $FG == "true" ]; then - echo "Starting logfeeder... (foreground) pid_file=$LOGFEEDER_PID_FILE" - echo "Run command $JVM -cp '$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*' $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_OPTS $LOGFEEDER_JAVA_MEM org.apache.ambari.logfeeder.LogFeeder" - $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_OPTS $LOGFEEDER_JAVA_MEM org.apache.ambari.logfeeder.LogFeeder - else - echo "Starting logfeeder... Output file=$LOG_FILE pid_file=$LOGFEEDER_PID_FILE" - echo "Run command nohup $JVM -cp '$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*' $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_OPTS $LOGFEEDER_JAVA_MEM org.apache.ambari.logfeeder.LogFeeder" - nohup $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_OPTS $LOGFEEDER_JAVA_MEM org.apache.ambari.logfeeder.LogFeeder > $LOG_FILE 2>&1 & - fi -} - -function stop() { - LOGFEEDER_STOP_WAIT=3 - if [ -f "$LOGFEEDER_PID_FILE" ]; then - LOGFEEDER_PID=`cat "$LOGFEEDER_PID_FILE"` - fi - - if [ "$LOGFEEDER_PID" != "" ]; then - echo -e "Sending stop command to Log Feeder... Checking PID: $LOGFEEDER_PID." - kill $LOGFEEDER_PID - (loops=0 - while true - do - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGFEEDER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - slept=$((loops * 2)) - if [ $slept -lt $LOGFEEDER_STOP_WAIT ]; then - sleep 2 - loops=$[$loops+1] - else - exit # subshell! - fi - else - exit # subshell! - fi - done) & - spinner $! - rm -f "$LOGFEEDER_PID_FILE" - else - echo -e "No LogFeeder process found to stop." - exit 0 - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGFEEDER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo -e "Log Feeder process $LOGFEEDER_PID is still running; forcefully killing it now." - kill -9 $LOGFEEDER_PID - echo "Killed process $LOGFEEDER_PID" - rm -f "$LOGFEEDER_PID_FILE" - sleep 1 - else - echo "Log Feeder is stopped." - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGFEEDER_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo "ERROR: Failed to kill Log Feeder Java process $LOGFEEDER_PID ... script fails." - exit 1 - fi -} - -function test() { - echo "Running command: $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" org.apache.ambari.logfeeder.LogFeederCommandLine --test ${@}" - $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" $LOGFEEDER_JAVA_OPTS org.apache.ambari.logfeeder.LogFeederCommandLine --test ${@} -} - -function checkpoints() { - echo "Running command: $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" org.apache.ambari.logfeeder.LogFeederCommandLine --checkpoints ${@}" - $JVM -cp "$LOGFEEDER_CONF_DIR:$LOGFEEDER_LIBS_DIR/*" $LOGFEEDER_JAVA_OPTS org.apache.ambari.logfeeder.LogFeederCommandLine --checkpoints ${@} -} - -if [ $# -gt 0 ]; then - SCRIPT_CMD="$1" - shift -else - print_usage - exit 1 -fi - -case $SCRIPT_CMD in - start) - start ${1+"$@"} - ;; - stop) - stop - ;; - status) - status - ;; - test) - test ${1+"$@"} - ;; - checkpoints) - checkpoints ${1+"$@"} - ;; - help) - print_usage - exit 0 - ;; - *) - print_usage - exit 1 - ;; - -esac \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/IdGeneratorHelperTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/IdGeneratorHelperTest.java deleted file mode 100644 index ae43ac04f6d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/IdGeneratorHelperTest.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.common; - -import org.junit.Test; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class IdGeneratorHelperTest { - - @Test - public void testGenerateRandomUUID() { - // GIVEN - Map fieldKeyMap = new HashMap<>(); - List fields = new ArrayList<>(); - // WHEN - String uuid1 = IdGeneratorHelper.generateUUID(fieldKeyMap, fields); - String uuid2 = IdGeneratorHelper.generateUUID(fieldKeyMap, fields); - // THEN - assertFalse(uuid1.equals(uuid2)); - } - - @Test - public void testUUIDFromFields() { - // GIVEN - Map fieldKeyMap1 = new HashMap<>(); - fieldKeyMap1.put("one-field", "1"); - Map fieldKeyMap2 = new HashMap<>(); - fieldKeyMap2.put("one-field", "1"); - List fields = new ArrayList<>(); - fields.add("one-field"); - // WHEN - String uuid1 = IdGeneratorHelper.generateUUID(fieldKeyMap1, fields); - String uuid2 = IdGeneratorHelper.generateUUID(fieldKeyMap2, fields); - // THEN - assertTrue(uuid1.equals(uuid2)); - } - - @Test - public void testUUIDFromFieldsWithMultipleFields() { - // GIVEN - Map fieldKeyMap1 = new HashMap<>(); - fieldKeyMap1.put("one-field", "1"); - fieldKeyMap1.put("two-field", "2"); - Map fieldKeyMap2 = new HashMap<>(); - fieldKeyMap2.put("one-field", "1"); - fieldKeyMap2.put("two-field", "2"); - List fields = new ArrayList<>(); - fields.add("one-field"); - fields.add("two-field"); - // WHEN - String uuid1 = IdGeneratorHelper.generateUUID(fieldKeyMap1, fields); - String uuid2 = IdGeneratorHelper.generateUUID(fieldKeyMap2, fields); - // THEN - assertTrue(uuid1.equals(uuid2)); - } - - @Test - public void testUUIDFromFieldsDifferentNumberOfFields() { - // GIVEN - Map fieldKeyMap1 = new HashMap<>(); - fieldKeyMap1.put("one-field", "1"); - Map fieldKeyMap2 = new HashMap<>(); - fieldKeyMap2.put("one-field", "1"); - fieldKeyMap2.put("two-field", "2"); - List fields = new ArrayList<>(); - fields.add("one-field"); - fields.add("two-field"); - // WHEN - String uuid1 = IdGeneratorHelper.generateUUID(fieldKeyMap1, fields); - String uuid2 = IdGeneratorHelper.generateUUID(fieldKeyMap2, fields); - // THEN - assertFalse(uuid1.equals(uuid2)); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java deleted file mode 100644 index 8d3967ba4d7..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.filter; - -import java.util.Map; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.FilterGrokDescriptorImpl; -import org.apache.log4j.Logger; -import org.easymock.Capture; -import org.easymock.CaptureType; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class FilterGrokTest { - private static final Logger LOG = Logger.getLogger(FilterGrokTest.class); - - private FilterGrok filterGrok; - private OutputManager mockOutputManager; - private Capture> capture; - - public void init(FilterGrokDescriptor filterGrokDescriptor) throws Exception { - mockOutputManager = EasyMock.strictMock(OutputManager.class); - capture = EasyMock.newCapture(CaptureType.LAST); - - filterGrok = new FilterGrok(); - filterGrok.loadConfig(filterGrokDescriptor); - filterGrok.setOutputManager(mockOutputManager); - filterGrok.setInput(EasyMock.mock(Input.class)); - filterGrok.init(new LogFeederProps()); - } - - @Test - public void testFilterGrok_parseMessage() throws Exception { - LOG.info("testFilterGrok_parseMessage()"); - - FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl(); - filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}"); - filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})"); - init(filterGrokDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputFileMarker(null, null, 0)); - filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertNotNull(jsonParams); - assertEquals("Incorrect parsing: log time", "2016-04-08 15:55:23,548", jsonParams.remove("logtime")); - assertEquals("Incorrect parsing: log level", "INFO", jsonParams.remove("level")); - assertEquals("Incorrect parsing: log message", "This is a test", jsonParams.remove("log_message")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testFilterGrok_parseMultiLineMessage() throws Exception { - LOG.info("testFilterGrok_parseMultiLineMessage()"); - - FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl(); - filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}"); - filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})"); - init(filterGrokDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - String multiLineMessage = "This is a multiline test message\r\n" + "having multiple lines\r\n" - + "as one may expect"; - String[] messageLines = multiLineMessage.split("\r\n"); - for (int i = 0; i < messageLines.length; i++) - filterGrok.apply((i == 0 ? "2016-04-08 15:55:23,548 INFO " : "") + messageLines[i], new InputFileMarker(null, null, 0)); - filterGrok.flush(); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertNotNull(jsonParams); - assertEquals("Incorrect parsing: log time", "2016-04-08 15:55:23,548", jsonParams.remove("logtime")); - assertEquals("Incorrect parsing: log level", "INFO", jsonParams.remove("level")); - assertEquals("Incorrect parsing: log message", multiLineMessage, jsonParams.remove("log_message")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testFilterGrok_notMatchingMesagePattern() throws Exception { - LOG.info("testFilterGrok_notMatchingMesagePattern()"); - - FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl(); - filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}"); - filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})"); - init(filterGrokDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall().anyTimes(); - EasyMock.replay(mockOutputManager); - - filterGrok.apply("04/08/2016 15:55:23,548 INFO This is a test", new InputFileMarker(null, null, 0)); - filterGrok.apply("04/08/2016 15:55:24,548 WARN Next message", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - assertFalse("Something was captured!", capture.hasCaptured()); - } - - @Test - public void testFilterGrok_noMesagePattern() throws Exception { - LOG.info("testFilterGrok_noMesagePattern()"); - - FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl(); - filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})"); - init(filterGrokDescriptor); - - EasyMock.replay(mockOutputManager); - - filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputFileMarker(null, null, 0)); - filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - assertFalse("Something was captured", capture.hasCaptured()); - } - - @After - public void cleanUp() { - capture.reset(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java deleted file mode 100644 index a328eb812ed..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.filter; - -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Map; -import java.util.TimeZone; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.common.LogFeederException; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.FilterJsonDescriptorImpl; -import org.apache.log4j.Logger; -import org.easymock.Capture; -import org.easymock.CaptureType; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -public class FilterJSONTest { - private static final Logger LOG = Logger.getLogger(FilterJSONTest.class); - - private FilterJSON filterJson; - private OutputManager mockOutputManager; - private Capture> capture; - - public void init(FilterJsonDescriptorImpl filterJsonDescriptor) throws Exception { - mockOutputManager = EasyMock.strictMock(OutputManager.class); - capture = EasyMock.newCapture(CaptureType.LAST); - - filterJson = new FilterJSON(); - filterJson.loadConfig(filterJsonDescriptor); - filterJson.setOutputManager(mockOutputManager); - filterJson.init(new LogFeederProps()); - } - - @Test - public void testJSONFilterCode_convertFields() throws Exception { - LOG.info("testJSONFilterCode_convertFields()"); - - init(new FilterJsonDescriptorImpl()); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - Date d = new Date(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - String dateString = sdf.format(d); - filterJson.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime")); - assertEquals("Incorrect decoding: in memory timestamp", d.getTime(), jsonParams.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP)); - assertEquals("Incorrect decoding: line number", 100l, jsonParams.remove("line_number")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testJSONFilterCode_logTimeOnly() throws Exception { - LOG.info("testJSONFilterCode_logTimeOnly()"); - - init(new FilterJsonDescriptorImpl()); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - Date d = new Date(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - String dateString = sdf.format(d); - filterJson.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime")); - assertEquals("Incorrect decoding: in memory timestamp", d.getTime(), jsonParams.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP)); - assertEquals("Incorrect decoding: some field", "abc", jsonParams.remove("some_field")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testJSONFilterCode_lineNumberOnly() throws Exception { - LOG.info("testJSONFilterCode_lineNumberOnly()"); - - init(new FilterJsonDescriptorImpl()); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - filterJson.apply("{ line_number: 100, some_field: 'abc' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Incorrect decoding: line number", 100l, jsonParams.remove("line_number")); - assertEquals("Incorrect decoding: some field", "abc", jsonParams.remove("some_field")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - - @Test - public void testJSONFilterCode_invalidJson() throws Exception { - LOG.info("testJSONFilterCode_invalidJson()"); - - init(new FilterJsonDescriptorImpl()); - - String inputStr = "invalid json"; - try{ - filterJson.apply(inputStr,new InputFileMarker(null, null, 0)); - fail("Expected LogFeederException was not occured"); - } catch(LogFeederException logFeederException) { - assertEquals("Json parsing failed for inputstr = " + inputStr, logFeederException.getLocalizedMessage()); - } - } - - @After - public void cleanUp() { - capture.reset(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java deleted file mode 100644 index efda7e2307e..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java +++ /dev/null @@ -1,150 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.filter; - -import java.util.Map; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.plugin.manager.OutputManager; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.FilterKeyValueDescriptorImpl; -import org.apache.log4j.Logger; -import org.easymock.Capture; -import org.easymock.CaptureType; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class FilterKeyValueTest { - private static final Logger LOG = Logger.getLogger(FilterKeyValueTest.class); - - private FilterKeyValue filterKeyValue; - private OutputManager mockOutputManager; - private Capture> capture; - - public void init(FilterKeyValueDescriptor filterKeyValueDescriptor) throws Exception { - mockOutputManager = EasyMock.strictMock(OutputManager.class); - capture = EasyMock.newCapture(CaptureType.LAST); - - filterKeyValue = new FilterKeyValue(); - filterKeyValue.loadConfig(filterKeyValueDescriptor); - filterKeyValue.setOutputManager(mockOutputManager); - filterKeyValue.init(new LogFeederProps()); - } - - @Test - public void testFilterKeyValue_extraction() throws Exception { - LOG.info("testFilterKeyValue_extraction()"); - - FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl(); - filterKeyValueDescriptor.setSourceField("keyValueField"); - filterKeyValueDescriptor.setFieldSplit("&"); - init(filterKeyValueDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("keyValueField")); - assertEquals("Incorrect extraction: name1", "value1", jsonParams.remove("name1")); - assertEquals("Incorrect extraction: name2", "value2", jsonParams.remove("name2")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testFilterKeyValue_extractionWithBorders() throws Exception { - LOG.info("testFilterKeyValue_extractionWithBorders()"); - - FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl(); - filterKeyValueDescriptor.setSourceField("keyValueField"); - filterKeyValueDescriptor.setFieldSplit("&"); - filterKeyValueDescriptor.setValueBorders("()"); - init(filterKeyValueDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall(); - EasyMock.replay(mockOutputManager); - - filterKeyValue.apply("{ keyValueField: 'name1(value1)&name2(value2)' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Original missing!", "name1(value1)&name2(value2)", jsonParams.remove("keyValueField")); - assertEquals("Incorrect extraction: name1", "value1", jsonParams.remove("name1")); - assertEquals("Incorrect extraction: name2", "value2", jsonParams.remove("name2")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @Test - public void testFilterKeyValue_missingSourceField() throws Exception { - LOG.info("testFilterKeyValue_missingSourceField()"); - - FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl(); - filterKeyValueDescriptor.setFieldSplit("&"); - init(filterKeyValueDescriptor); - - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall().anyTimes(); - EasyMock.replay(mockOutputManager); - - filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - assertFalse("Something was captured!", capture.hasCaptured()); - } - - @Test - public void testFilterKeyValue_noSourceFieldPresent() throws Exception { - LOG.info("testFilterKeyValue_noSourceFieldPresent()"); - - FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl(); - filterKeyValueDescriptor.setSourceField("keyValueField"); - filterKeyValueDescriptor.setFieldSplit("&"); - init(filterKeyValueDescriptor); - - // using default value split: = - mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputFileMarker.class)); - EasyMock.expectLastCall().anyTimes(); - EasyMock.replay(mockOutputManager); - - filterKeyValue.apply("{ otherField: 'name1=value1&name2=value2' }", new InputFileMarker(null, null, 0)); - - EasyMock.verify(mockOutputManager); - Map jsonParams = capture.getValue(); - - assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("otherField")); - assertTrue("jsonParams are not empty!", jsonParams.isEmpty()); - } - - @After - public void cleanUp() { - capture.reset(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java deleted file mode 100644 index 0a953427df1..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.input; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.List; - -import org.apache.ambari.logfeeder.conf.LogEntryCacheConfig; -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.file.checkpoint.FileCheckpointManager; -import org.apache.ambari.logfeeder.plugin.filter.Filter; -import org.apache.ambari.logfeeder.plugin.input.InputMarker; -import org.apache.ambari.logfeeder.plugin.manager.CheckpointManager; -import org.apache.ambari.logfeeder.plugin.manager.InputManager; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputFileDescriptorImpl; -import org.apache.commons.io.FileUtils; -import org.apache.log4j.Logger; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.junit.Assert.assertEquals; - -public class InputFileTest { - private static final Logger LOG = Logger.getLogger(InputFileTest.class); - - private static final String TEST_DIR_NAME = "/logfeeder_test_dir/"; - private static final File TEST_DIR = new File(FileUtils.getTempDirectoryPath() + TEST_DIR_NAME); - - private static final String TEST_LOG_FILE_CONTENT = "2016-03-10 14:09:38,278 INFO datanode.DataNode (DataNode.java:(418)) - File descriptor passing is enabled.\n" - + "2016-03-10 14:09:38,278 INFO datanode.DataNode (DataNode.java:(429)) - Configured hostname is c6401.ambari.apache.org\n" - + "2016-03-10 14:09:38,294 INFO datanode.DataNode (DataNode.java:startDataNode(1127)) - Starting DataNode with maxLockedMemory = 0\n" - + "2016-03-10 14:09:38,340 INFO datanode.DataNode (DataNode.java:initDataXceiver(921)) - Opened streaming server at /0.0.0.0:50010\n" - + "2016-03-10 14:09:38,343 INFO datanode.DataNode (DataXceiverServer.java:(76)) - Balancing bandwith is 6250000 bytes/s\n" - + "2016-03-10 14:09:38,343 INFO datanode.DataNode (DataXceiverServer.java:(77)) - Number threads for balancing is 5\n" - + "2016-03-10 14:09:38,345 INFO datanode.DataNode (DataXceiverServer.java:(76)) - Balancing bandwith is 6250000 bytes/s\n" - + "2016-03-10 14:09:38,346 INFO datanode.DataNode (DataXceiverServer.java:(77)) - Number threads for balancing is 5\n"; - - private static final String[] TEST_LOG_FILE_ROWS = TEST_LOG_FILE_CONTENT.split("\n"); - private InputFile inputFile; - private List rows = new ArrayList<>(); - - private InputFileMarker testInputMarker; - - private LogFeederProps logFeederProps; - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @BeforeClass - public static void initDir() throws IOException { - if (!TEST_DIR.exists()) { - TEST_DIR.mkdir(); - } - FileUtils.cleanDirectory(TEST_DIR); - } - - @AfterClass - public static void deleteDir() throws IOException { - if (TEST_DIR.exists()) { - FileUtils.deleteDirectory(TEST_DIR); - } - } - - @Before - public void setUp() throws Exception { - logFeederProps = new LogFeederProps(); - LogEntryCacheConfig logEntryCacheConfig = new LogEntryCacheConfig(); - logEntryCacheConfig.setCacheEnabled(false); - logEntryCacheConfig.setCacheLastDedupEnabled(false); - logEntryCacheConfig.setCacheSize(10); - logFeederProps.setLogEntryCacheConfig(logEntryCacheConfig); - logFeederProps.setCheckpointFolder("process3_checkpoint"); - testInputMarker = new InputFileMarker(inputFile, "", 0); - } - - public void init(String path) throws Exception { - InputFileDescriptorImpl inputFileDescriptor = new InputFileDescriptorImpl(); - inputFileDescriptor.setSource("file"); - inputFileDescriptor.setTail(true); - inputFileDescriptor.setGenEventMd5(true); - inputFileDescriptor.setType("hdfs_datanode"); - inputFileDescriptor.setRowtype("service"); - inputFileDescriptor.setPath(path); - - Filter capture = new Filter() { - @Override - public void init(LogFeederProps logFeederProps) { - } - - @Override - public String getShortDescription() { - return null; - } - - @Override - public void apply(String inputStr, InputMarker inputMarker) { - rows.add(inputStr); - if (rows.size() % 3 == 0) - inputFile.setDrain(true); - } - }; - - inputFile = new InputFile(); - inputFile.loadConfig(inputFileDescriptor); - inputFile.addFilter(capture); - inputFile.init(logFeederProps); - } - - @Test - public void testInputFile_process3Rows() throws Exception { - LOG.info("testInputFile_process3Rows()"); - - File testFile = createFile("process3.log"); - - init(testFile.getAbsolutePath()); - - InputManager inputManager = EasyMock.createStrictMock(InputManager.class); - CheckpointManager checkpointManager = new FileCheckpointManager(); - EasyMock.expect(inputManager.getCheckpointHandler()).andReturn(checkpointManager); - EasyMock.replay(inputManager); - inputFile.setInputManager(inputManager); - - inputFile.isReady(); - inputFile.start(); - - assertEquals("Amount of the rows is incorrect", rows.size(), 3); - for (int row = 0; row < 3; row++) - assertEquals("Row #" + (row + 1) + " not correct", TEST_LOG_FILE_ROWS[row], rows.get(row)); - - EasyMock.verify(inputManager); - } - - @Test - public void testInputFile_noLogPath() throws Exception { - LOG.info("testInputFile_noLogPath()"); - - expectedException.expect(NullPointerException.class); - - init(null); - inputFile.isReady(); - } - - @After - public void tearDown() throws Exception { - rows.clear(); - } - - @AfterClass - public static void cleanUp() throws Exception { - FileUtils.deleteDirectory(TEST_DIR); - } - - private File createFile(String filename) throws IOException { - File newFile = new File(FileUtils.getTempDirectoryPath() + TEST_DIR_NAME + filename); - FileUtils.writeStringToFile(newFile, TEST_LOG_FILE_CONTENT, Charset.defaultCharset()); - return newFile; - } - - private File createCheckpointDir(String dirname) { - File newDir = new File(TEST_DIR + "/" + dirname); - if (!newDir.exists()) { - newDir.mkdir(); - } - return newDir; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java deleted file mode 100644 index 574fa355423..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.input; - -import static org.easymock.EasyMock.*; -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.junit.Test; - -public class InputManagerTest { - - @Test - public void testInputManager_addAndRemoveInputs() { - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - Input input4 = strictMock(Input.class); - - expect(input3.getShortDescription()).andReturn("").times(2); - expect(input4.getShortDescription()).andReturn("").once(); - - replay(input1, input2, input3, input4); - - InputManagerImpl manager = new InputManagerImpl(); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.removeInput(input3); - manager.removeInput(input4); - - verify(input1, input2, input3, input4); - - List inputList = manager.getInputList("serviceName"); - assertEquals(inputList.size(), 2); - assertEquals(inputList.get(0), input1); - assertEquals(inputList.get(1), input2); - } - - @Test - public void testInputManager_monitor() throws Exception { - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - - LogFeederProps logFeederProps = new LogFeederProps(); - - input1.init(logFeederProps); expectLastCall(); - input2.init(logFeederProps); expectLastCall(); - input3.init(logFeederProps); expectLastCall(); - - expect(input1.isReady()).andReturn(true); - expect(input2.isReady()).andReturn(true); - expect(input3.isReady()).andReturn(false); - - expect(input1.monitor()).andReturn(false); - expect(input2.monitor()).andReturn(false); - expect(input3.getShortDescription()).andReturn("").once(); - - replay(input1, input2, input3); - - InputManagerImpl manager = new InputManagerImpl(); - manager.setLogFeederProps(logFeederProps); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.startInputs("serviceName"); - - verify(input1, input2, input3); - } - - - @Test - public void testInputManager_addMetricsContainers() throws Exception { - List metrics = new ArrayList(); - - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - - input1.addMetricsContainers(metrics); expectLastCall(); - input2.addMetricsContainers(metrics); expectLastCall(); - input3.addMetricsContainers(metrics); expectLastCall(); - - expect(input1.isReady()).andReturn(true); - expect(input2.isReady()).andReturn(true); - expect(input3.isReady()).andReturn(false); - - replay(input1, input2, input3); - - InputManagerImpl manager = new InputManagerImpl(); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.addMetricsContainers(metrics); - - verify(input1, input2, input3); - } - - @Test - public void testInputManager_logStat() throws Exception { - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - - input1.logStat(); expectLastCall(); - input2.logStat(); expectLastCall(); - input3.logStat(); expectLastCall(); - - expect(input1.isReady()).andReturn(true); - expect(input2.isReady()).andReturn(true); - expect(input3.isReady()).andReturn(false); - - replay(input1, input2, input3); - - InputManagerImpl manager = new InputManagerImpl(); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.logStats(); - - verify(input1, input2, input3); - } - - @Test - public void testInputManager_checkInAll() throws Exception { - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - - input1.lastCheckIn(); expectLastCall(); - input2.lastCheckIn(); expectLastCall(); - input3.lastCheckIn(); expectLastCall(); - - replay(input1, input2, input3); - - InputManagerImpl manager = new InputManagerImpl(); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.checkInAll(); - - verify(input1, input2, input3); - } - - @Test - public void testInputManager_close() throws Exception { - Input input1 = strictMock(Input.class); - Input input2 = strictMock(Input.class); - Input input3 = strictMock(Input.class); - - input1.setDrain(true); expectLastCall(); - input2.setDrain(true); expectLastCall(); - input3.setDrain(true); expectLastCall(); - - expect(input1.isClosed()).andReturn(true); - expect(input2.isClosed()).andReturn(true); - expect(input3.isClosed()).andReturn(true); - - replay(input1, input2, input3); - - InputManagerImpl manager = new InputManagerImpl(); - manager.add("serviceName", input1); - manager.add("serviceName", input2); - manager.add("serviceName", input3); - - manager.close(); - - verify(input1, input2, input3); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/cache/LRUCacheTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/cache/LRUCacheTest.java deleted file mode 100644 index 4ff818a0a83..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/cache/LRUCacheTest.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.input.cache; - -import org.apache.ambari.logfeeder.plugin.input.cache.LRUCache; -import org.joda.time.DateTime; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -public class LRUCacheTest { - - private LRUCache underTest; - - @Before - public void setUp() { - underTest = new LRUCache(4, "/mypath", Long.parseLong("1000"), true); - } - - @Test - public void testLruCachePut() { - // GIVEN - // WHEN - underTest.put("mymessage1", 1000L); - underTest.put("mymessage2", 1000L); - underTest.put("mymessage3", 1000L); - underTest.put("mymessage4", 1000L); - underTest.put("mymessage5", 1000L); - underTest.put("mymessage1", 1500L); - underTest.put("mymessage1", 3500L); - underTest.put("mymessage5", 1700L); - // THEN - assertEquals((Long) 1500L, underTest.get("mymessage1")); - assertEquals((Long) 1000L, underTest.get("mymessage5")); - assertEquals(underTest.getMRUKey(), "mymessage5"); - assertEquals(4, underTest.size()); - assertFalse(underTest.containsKey("mymessage2")); - } - - @Test - public void testLruCacheFilterMruKeys() { - // GIVEN - // WHEN - underTest.put("mymessage1", 1000L); - underTest.put("mymessage1", 3000L); - underTest.put("mymessage1", 5000L); - underTest.put("mymessage1", 7000L); - // THEN - assertEquals((Long) 1000L, underTest.get("mymessage1")); - } - - @Test - public void testLruCacheDoNotFilterMruKeysIfLastDedupDisabled() { - // GIVEN - underTest = new LRUCache(4, "/mypath", 1000, false); - // WHEN - underTest.put("mymessage1", 1000L); - underTest.put("mymessage1", 3000L); - // THEN - assertEquals((Long) 3000L, underTest.get("mymessage1")); - } - - @Test - public void testLruCacheFilterByDedupInterval() { - // GIVEN - // WHEN - underTest.put("mymessage1", 1000L); - underTest.put("mymessage2", 1000L); - underTest.put("mymessage1", 1250L); - underTest.put("mymessage2", 1500L); - underTest.put("mymessage1", 1500L); - underTest.put("mymessage2", 2100L); - // THEN - assertEquals((Long) 1000L, underTest.get("mymessage1")); - assertEquals((Long) 2100L, underTest.get("mymessage2")); - } - - @Test - public void testLruCacheWithDates() { - // GIVEN - DateTime firstDate = DateTime.now(); - DateTime secondDate = firstDate.plusMillis(500); - // WHEN - underTest.put("mymessage1", firstDate.toDate().getTime()); - underTest.put("mymessage2", firstDate.toDate().getTime()); - underTest.put("mymessage1", secondDate.toDate().getTime()); - // THEN - assertEquals((Long) firstDate.toDate().getTime(), underTest.get("mymessage1")); - assertEquals((Long) firstDate.toDate().getTime(), underTest.get("mymessage2")); - } - - @Test - public void testLruCacheWithDatesReachDedupInterval() { - // GIVEN - DateTime firstDate = DateTime.now(); - DateTime secondDate = firstDate.plusMillis(1500); - // WHEN - underTest.put("mymessage1", firstDate.toDate().getTime()); - underTest.put("mymessage2", firstDate.toDate().getTime()); - underTest.put("mymessage1", secondDate.toDate().getTime()); - // THEN - assertEquals((Long) secondDate.toDate().getTime(), underTest.get("mymessage1")); - assertEquals((Long) firstDate.toDate().getTime(), underTest.get("mymessage2")); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperAnonymizeTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperAnonymizeTest.java deleted file mode 100644 index c22f31269f4..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperAnonymizeTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.MapAnonymizeDescriptorImpl; -import org.apache.log4j.Logger; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class MapperAnonymizeTest { - private static final Logger LOG = Logger.getLogger(MapperAnonymizeTest.class); - - @Test - public void testMapperAnonymize_anonymize() { - LOG.info("testMapperAnonymize_anonymize()"); - - MapAnonymizeDescriptorImpl mapAnonymizeDescriptorImpl = new MapAnonymizeDescriptorImpl(); - mapAnonymizeDescriptorImpl.setPattern("secret / is here"); - - MapperAnonymize mapperAnonymize = new MapperAnonymize(); - assertTrue("Could not initialize!", mapperAnonymize.init(null, "someField", null, mapAnonymizeDescriptorImpl)); - - Map jsonObj = new HashMap<>(); - mapperAnonymize.apply(jsonObj, "something else secret SECRET1 / SECRET2 is here something else 2"); - - assertEquals("Field wasnt anonymized", "something else secret ******* / ******* is here something else 2", jsonObj.remove("someField")); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperAnonymize_anonymize2() { - LOG.info("testMapperAnonymize_anonymize2()"); - - MapAnonymizeDescriptorImpl mapAnonymizeDescriptorImpl = new MapAnonymizeDescriptorImpl(); - mapAnonymizeDescriptorImpl.setPattern(" / is the secret"); - mapAnonymizeDescriptorImpl.setHideChar('X'); - - MapperAnonymize mapperAnonymize = new MapperAnonymize(); - assertTrue("Could not initialize!", mapperAnonymize.init(null, "someField", null, mapAnonymizeDescriptorImpl)); - - Map jsonObj = new HashMap<>(); - mapperAnonymize.apply(jsonObj, "something else SECRET1 / SECRET2 is the secret something else 2"); - - assertEquals("Field wasnt anonymized", "something else XXXXXXX / XXXXXXX is the secret something else 2", jsonObj.remove("someField")); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperAnonymize_noPattern() { - LOG.info("testMapperAnonymize_noPattern()"); - - MapAnonymizeDescriptorImpl mapAnonymizeDescriptorImpl = new MapAnonymizeDescriptorImpl(); - - MapperAnonymize mapperAnonymize = new MapperAnonymize(); - assertFalse("Was not able to initialize!", mapperAnonymize.init(null, "someField", null, mapAnonymizeDescriptorImpl)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java deleted file mode 100644 index 5e94996eb09..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.MapDateDescriptorImpl; -import org.apache.commons.lang3.time.DateUtils; -import org.apache.log4j.Logger; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class MapperDateTest { - private static final Logger LOG = Logger.getLogger(MapperDateTest.class); - - @Test - public void testMapperDate_epoch() { - LOG.info("testMapperDate_epoch()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - mapDateDescriptor.setTargetDatePattern("epoch"); - - MapperDate mapperDate = new MapperDate(); - assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - - Map jsonObj = new HashMap<>(); - - Date d = DateUtils.truncate(new Date(), Calendar.SECOND); - Object mappedValue = mapperDate.apply(jsonObj, Long.toString(d.getTime() / 1000)); - - assertEquals("Value wasn't matched properly", d, mappedValue); - assertEquals("Value wasn't put into jsonObj", d, jsonObj.remove("someField")); - assertEquals("Value wasn't put into jsonObj", d.getTime(), jsonObj.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP)); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperDate_pattern() throws Exception { - LOG.info("testMapperDate_pattern()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - mapDateDescriptor.setTargetDatePattern("yyyy-MM-dd HH:mm:ss.SSS"); - - MapperDate mapperDate = new MapperDate(); - assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - - Map jsonObj = new HashMap<>(); - String dateString = "2016-04-08 15:55:23.548"; - Object mappedValue = mapperDate.apply(jsonObj, dateString); - - Date d = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(dateString); - - assertEquals("Value wasn't matched properly", d, mappedValue); - assertEquals("Value wasn't put into jsonObj", d, jsonObj.remove("someField")); - assertEquals("Value wasn't put into jsonObj", d.getTime(), jsonObj.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP)); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperDate_noDatePattern() { - LOG.info("testMapperDate_noDatePattern()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - - MapperDate mapperDate = new MapperDate(); - assertFalse("Was not able to initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - } - - @Test - public void testMapperDate_notParsableDatePattern() { - LOG.info("testMapperDate_notParsableDatePattern()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - mapDateDescriptor.setTargetDatePattern("not_parsable_content"); - - MapperDate mapperDate = new MapperDate(); - assertFalse("Was not able to initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - } - - @Test - public void testMapperDate_invalidEpochValue() { - LOG.info("testMapperDate_invalidEpochValue()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - mapDateDescriptor.setTargetDatePattern("epoch"); - - MapperDate mapperDate = new MapperDate(); - assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - - Map jsonObj = new HashMap<>(); - String invalidValue = "abc"; - Object mappedValue = mapperDate.apply(jsonObj, invalidValue); - - assertEquals("Invalid value wasn't returned as it is", invalidValue, mappedValue); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperDate_invalidDateStringValue() { - LOG.info("testMapperDate_invalidDateStringValue()"); - - MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl(); - mapDateDescriptor.setTargetDatePattern("yyyy-MM-dd HH:mm:ss.SSS"); - - MapperDate mapperDate = new MapperDate(); - assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor)); - - Map jsonObj = new HashMap<>(); - String invalidValue = "abc"; - Object mappedValue = mapperDate.apply(jsonObj, invalidValue); - - assertEquals("Invalid value wasn't returned as it is", invalidValue, mappedValue); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java deleted file mode 100644 index 5c6cc93771f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.MapFieldCopyDescriptorImpl; -import org.apache.log4j.Logger; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class MapperFieldCopyTest { - private static final Logger LOG = Logger.getLogger(MapperFieldCopyTest.class); - - @Test - public void testMapperFieldCopy_copyField() { - LOG.info("testMapperFieldCopy_copyField()"); - - MapFieldCopyDescriptorImpl mapFieldCopyDescriptor = new MapFieldCopyDescriptorImpl(); - mapFieldCopyDescriptor.setCopyName("someOtherField"); - - MapperFieldCopy mapperFieldCopy = new MapperFieldCopy(); - assertTrue("Could not initialize!", mapperFieldCopy.init(null, "someField", null, mapFieldCopyDescriptor)); - - Map jsonObj = new HashMap<>(); - jsonObj.put("someField", "someValue"); - - mapperFieldCopy.apply(jsonObj, "someValue"); - - assertEquals("Old field name wasn't removed", "someValue", jsonObj.remove("someField")); - assertEquals("New field wasn't put", "someValue", jsonObj.remove("someOtherField")); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperFieldCopy_noNewFieldName() { - LOG.info("testMapperFieldCopy_noNewFieldName()"); - - MapFieldCopyDescriptorImpl mapFieldCopyDescriptor = new MapFieldCopyDescriptorImpl(); - - MapperFieldCopy mapperFieldCopy = new MapperFieldCopy(); - assertFalse("Was not able to initialize!", mapperFieldCopy.init(null, "someField", null, mapFieldCopyDescriptor)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java deleted file mode 100644 index f74c9f8f858..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.MapFieldNameDescriptorImpl; -import org.apache.log4j.Logger; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class MapperFieldNameTest { - private static final Logger LOG = Logger.getLogger(MapperFieldNameTest.class); - - @Test - public void testMapperFieldName_replaceField() { - LOG.info("testMapperFieldName_replaceField()"); - - MapFieldNameDescriptorImpl mapFieldNameDescriptor = new MapFieldNameDescriptorImpl(); - mapFieldNameDescriptor.setNewFieldName("someOtherField"); - - MapperFieldName mapperFieldName = new MapperFieldName(); - assertTrue("Could not initialize!", mapperFieldName.init(null, "someField", null, mapFieldNameDescriptor)); - - Map jsonObj = new HashMap<>(); - jsonObj.put("someField", "someValue"); - - mapperFieldName.apply(jsonObj, "someOtherValue"); - - assertFalse("Old field name wasn't removed", jsonObj.containsKey("someField")); - assertEquals("New field wasn't put", "someOtherValue", jsonObj.remove("someOtherField")); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperFieldName_noNewFieldName() { - LOG.info("testMapperFieldName_noNewFieldName()"); - - MapFieldNameDescriptorImpl mapFieldNameDescriptor = new MapFieldNameDescriptorImpl(); - - MapperFieldName mapperFieldName = new MapperFieldName(); - assertFalse("Was able to initialize!", mapperFieldName.init(null, "someField", null, mapFieldNameDescriptor)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java deleted file mode 100644 index 92befa9e621..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.mapper; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.MapFieldValueDescriptorImpl; -import org.apache.log4j.Logger; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class MapperFieldValueTest { - private static final Logger LOG = Logger.getLogger(MapperFieldValueTest.class); - - @Test - public void testMapperFieldValue_replaceValue() { - LOG.info("testMapperFieldValue_replaceValue()"); - - MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl(); - mapFieldValueDescriptor.setPreValue("someValue"); - mapFieldValueDescriptor.setPostValue("someOtherValue"); - - MapperFieldValue mapperFieldValue = new MapperFieldValue(); - assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor)); - - Map jsonObj = new HashMap<>(); - - Object mappedValue = mapperFieldValue.apply(jsonObj, "someValue"); - - assertEquals("Value wasn't mapped", "someOtherValue", mappedValue); - assertEquals("New field wasn't put into jsonObj", "someOtherValue", jsonObj.remove("someField")); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } - - @Test - public void testMapperFieldValue_noPostValue() { - LOG.info("testMapperFieldValue_noPostValue()"); - - MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl(); - - MapperFieldValue mapperFieldValue = new MapperFieldValue(); - assertFalse("Was not able to initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor)); - } - - @Test - public void testMapperFieldValue_noPreValueFound() { - LOG.info("testMapperFieldValue_noPreValueFound()"); - - MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl(); - mapFieldValueDescriptor.setPreValue("someValue"); - mapFieldValueDescriptor.setPostValue("someOtherValue"); - - MapperFieldValue mapperFieldValue = new MapperFieldValue(); - assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor)); - - Map jsonObj = new HashMap<>(); - - Object mappedValue = mapperFieldValue.apply(jsonObj, "yetAnotherValue"); - - assertEquals("Value was mapped", "yetAnotherValue", mappedValue); - assertTrue("jsonObj is not empty", jsonObj.isEmpty()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetricsManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetricsManagerTest.java deleted file mode 100644 index 9699156b7fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetricsManagerTest.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.metrics; - -import static org.easymock.EasyMock.*; -import static org.junit.Assert.*; - -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.easymock.Capture; -import org.easymock.CaptureType; -import org.easymock.EasyMock; - -import java.lang.reflect.Field; -import java.util.Arrays; -import java.util.List; -import java.util.TreeMap; - -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import org.junit.Before; -import org.junit.Test; - -public class MetricsManagerTest { - - private MetricsManager manager; - private LogFeederAMSClient mockClient; - private Capture capture; - - @Before - public void init() throws Exception { - manager = new MetricsManager(); - - mockClient = strictMock(LogFeederAMSClient.class); - Field f = MetricsManager.class.getDeclaredField("amsClient"); - f.setAccessible(true); - f.set(manager, mockClient); - - EasyMock.expect(mockClient.getCollectorUri(null)).andReturn("null://null:null/null").anyTimes(); - capture = EasyMock.newCapture(CaptureType.FIRST); - mockClient.emitMetrics(EasyMock.capture(capture)); - EasyMock.expectLastCall().andReturn(true).once(); - - replay(mockClient); - manager.setAmsClient(mockClient); - manager.init(); - } - - @Test - public void testMetricManager_pointInTime() throws Exception { - MetricData metricCount1 = new MetricData("metric1", true); - metricCount1.value = 123; - metricCount1.prevPublishValue = 0; - metricCount1.publishCount = 0; - - manager.useMetrics(Arrays.asList(metricCount1)); - - verify(mockClient); - - TimelineMetrics metrics = capture.getValue(); - List metricList = metrics.getMetrics(); - assertEquals(metricList.size(), 1); - - TimelineMetric metric = metricList.get(0); - assertEquals(metric.getAppId(), "logfeeder"); - assertEquals(metric.getMetricName(), "metric1"); - assertEquals(metric.getType(), "Long"); - - TreeMap values = metric.getMetricValues(); - assertEquals(values.size(), 1); - assertEquals(values.firstEntry().getValue(), Double.valueOf(123.0)); - } - - @Test - public void testMetricManager_notPointInTime() throws Exception { - MetricData metricCount1 = new MetricData("metric1", false); - metricCount1.value = 123; - metricCount1.prevPublishValue = 0; - metricCount1.publishCount = 0; - - MetricData metricCount2 = new MetricData("metric1", false); - metricCount2.value = 123; - metricCount2.prevPublishValue = 100; - metricCount2.publishCount = 0; - - MetricData metricCount3 = new MetricData("metric1", false); // not included due to decrease of count - metricCount3.value = 99; - metricCount3.prevPublishValue = 100; - metricCount3.publishCount = 1; - - manager.useMetrics(Arrays.asList(metricCount1, metricCount2, metricCount3)); - - verify(mockClient); - - TimelineMetrics metrics = capture.getValue(); - List metricList = metrics.getMetrics(); - assertEquals(metricList.size(), 1); - - TimelineMetric metric = metricList.get(0); - assertEquals(metric.getAppId(), "logfeeder"); - assertEquals(metric.getMetricName(), "metric1"); - assertEquals(metric.getType(), "Long"); - - TreeMap values = metric.getMetricValues(); - assertEquals(values.size(), 1); - assertEquals(values.firstEntry().getValue(), Double.valueOf(146.0)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java deleted file mode 100644 index 1623738f855..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.Future; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.output.OutputKafka.KafkaCallBack; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.log4j.Logger; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class OutputKafkaTest { - private static final Logger LOG = Logger.getLogger(OutputKafkaTest.class); - - private static final String TEST_TOPIC = "test topic"; - - private OutputKafka outputKafka; - - @SuppressWarnings("unchecked") - private KafkaProducer mockKafkaProducer = EasyMock.strictMock(KafkaProducer.class); - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Before - public void init() { - outputKafka = new OutputKafka() { - @Override - protected KafkaProducer creteKafkaProducer(Properties props) { - return mockKafkaProducer; - } - }; - } - - @Test - public void testOutputKafka_uploadData() throws Exception { - LOG.info("testOutputKafka_uploadData()"); - - Map config = new HashMap(); - config.put("broker_list", "some broker list"); - config.put("topic", TEST_TOPIC); - - outputKafka.loadConfig(config); - outputKafka.init(new LogFeederProps()); - - @SuppressWarnings("unchecked") - Future mockFuture = EasyMock.mock(Future.class); - - EasyMock.expect(mockKafkaProducer.send(new ProducerRecord(TEST_TOPIC, "value0"))) - .andReturn(mockFuture); - EasyMock.expect(mockFuture.get()).andReturn(null); - - for (int i = 1; i < 10; i++) - EasyMock.expect(mockKafkaProducer.send(EasyMock.eq(new ProducerRecord(TEST_TOPIC, "value" + i)), - EasyMock.anyObject(KafkaCallBack.class))).andReturn(null); - - EasyMock.replay(mockKafkaProducer); - - for (int i = 0; i < 10; i++) { - InputFileMarker inputMarker = new InputFileMarker(EasyMock.mock(Input.class), null, 0); - outputKafka.write("value" + i, inputMarker); - } - - EasyMock.verify(mockKafkaProducer); - } - - @Test - public void testOutputKafka_noBrokerList() throws Exception { - LOG.info("testOutputKafka_noBrokerList()"); - - expectedException.expect(Exception.class); - expectedException.expectMessage("For kafka output, bootstrap broker_list is needed"); - - Map config = new HashMap(); - config.put("topic", TEST_TOPIC); - - outputKafka.loadConfig(config); - outputKafka.init(new LogFeederProps()); - } - - @Test - public void testOutputKafka_noTopic() throws Exception { - LOG.info("testOutputKafka_noBrokerList()"); - - expectedException.expect(Exception.class); - expectedException.expectMessage("For kafka output, topic is needed"); - - Map config = new HashMap(); - config.put("broker_list", "some broker list"); - - outputKafka.loadConfig(config); - outputKafka.init(new LogFeederProps()); - } - - @After - public void cleanUp() { - EasyMock.reset(mockKafkaProducer); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java deleted file mode 100644 index 502641fc8c6..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.common.LogFeederConstants; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.input.cache.LRUCache; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputDescriptorImpl; -import org.easymock.EasyMock; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class OutputLineFilterTest { - - private static final String CACHE_KEY_FIELD = "log_message"; - private static final String DEFAULT_DUMMY_MESSAGE = "myMessage"; - - private OutputLineFilter underTest; - private Input inputMock; - - @Before - public void setUp() { - underTest = new OutputLineFilter(); - inputMock = EasyMock.mock(Input.class); - } - - @Test - public void testApplyWithFilterOutByDedupInterval() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, false)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - // WHEN - boolean result = underTest.apply(generateLineMap(), inputMock); - // THEN - EasyMock.verify(inputMock); - assertTrue(result); - } - - @Test - public void testApplyDoNotFilterOutDataByDedupInterval() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 10L, false)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - // WHEN - boolean result = underTest.apply(generateLineMap(), inputMock); - // THEN - EasyMock.verify(inputMock); - assertFalse(result); - } - - @Test - public void testApplyWithFilterOutByDedupLast() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 10L, true)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - // WHEN - boolean result = underTest.apply(generateLineMap(), inputMock); - // THEN - EasyMock.verify(inputMock); - assertTrue(result); - } - - @Test - public void testApplyDoNotFilterOutDataByDedupLast() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache("myMessage2", 10L, true)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - // WHEN - boolean result = underTest.apply(generateLineMap(), inputMock); - // THEN - EasyMock.verify(inputMock); - assertFalse(result); - } - - @Test - public void testApplyWithoutLruCache() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(null); - EasyMock.replay(inputMock); - // WHEN - boolean result = underTest.apply(generateLineMap(), inputMock); - // THEN - EasyMock.verify(inputMock); - assertFalse(result); - } - - @Test - public void testApplyWithoutInMemoryTimestamp() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, true)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - Map lineMap = generateLineMap(); - lineMap.remove(LogFeederConstants.IN_MEMORY_TIMESTAMP); - // WHEN - boolean result = underTest.apply(lineMap, inputMock); - // THEN - EasyMock.verify(inputMock); - assertFalse(result); - } - - @Test - public void testApplyWithoutLogMessage() { - // GIVEN - EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, true)); - EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor()); - EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD); - EasyMock.replay(inputMock); - Map lineMap = generateLineMap(); - lineMap.remove(CACHE_KEY_FIELD); - // WHEN - boolean result = underTest.apply(lineMap, inputMock); - // THEN - EasyMock.verify(inputMock); - assertFalse(result); - } - - private Map generateLineMap() { - Map lineMap = new HashMap<>(); - lineMap.put(CACHE_KEY_FIELD, "myMessage"); - lineMap.put(LogFeederConstants.IN_MEMORY_TIMESTAMP, 150L); - return lineMap; - } - - private InputDescriptor generateInputDescriptor() { - InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {}; - inputDescriptor.setRowtype("service"); - return inputDescriptor; - } - - private LRUCache createLruCache(String defaultKey, long defaultValue, boolean lastDedupEanabled) { - LRUCache lruCache = new LRUCache(4, "myfilepath", 100, lastDedupEanabled); - lruCache.put(defaultKey, defaultValue); - return lruCache; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java deleted file mode 100644 index 9536cf88437..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import static org.easymock.EasyMock.*; -import static org.junit.Assert.*; - -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.input.InputFileMarker; -import org.apache.ambari.logfeeder.loglevelfilter.LogLevelFilterHandler; -import org.apache.ambari.logfeeder.plugin.common.MetricData; -import org.apache.ambari.logfeeder.plugin.input.Input; -import org.apache.ambari.logfeeder.plugin.output.Output; -import org.apache.ambari.logsearch.config.json.model.inputconfig.impl.InputDescriptorImpl; -import org.junit.Test; - -public class OutputManagerTest { - - @Test - public void testOutputManager_addOutputs() { - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - replay(output1, output2, output3); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - verify(output1, output2, output3); - - List outputs = manager.getOutputs(); - assertEquals(outputs.size(), 3); - assertEquals(outputs.get(0), output1); - assertEquals(outputs.get(1), output2); - } - - @Test - public void testOutputManager_init() throws Exception { - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - LogFeederProps logFeederProps = new LogFeederProps(); - output1.init(logFeederProps); expectLastCall(); - output2.init(logFeederProps); expectLastCall(); - output3.init(logFeederProps); expectLastCall(); - - replay(output1, output2, output3); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - manager.setLogFeederProps(logFeederProps); - - manager.init(); - - verify(output1, output2, output3); - } - - @Test - public void testOutputManager_write() throws Exception { - Map jsonObj = new HashMap<>(); - jsonObj.put("type", "testType"); - jsonObj.put("path", "testPath"); - jsonObj.put("host", "testHost"); - jsonObj.put("ip", "testIp"); - jsonObj.put("level", "testLevel"); - jsonObj.put("id", "testId"); - - Input mockInput = strictMock(Input.class); - InputFileMarker inputMarker = new InputFileMarker(mockInput, null, 0); - InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {}; - inputDescriptor.setAddFields(Collections. emptyMap()); - - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - LogLevelFilterHandler mockFilter = strictMock(LogLevelFilterHandler.class); - - expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor); - expect(mockInput.isUseEventMD5()).andReturn(false).anyTimes(); - expect(mockInput.isGenEventMD5()).andReturn(false).anyTimes(); - expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor).anyTimes(); - expect(mockFilter.isAllowed(jsonObj, inputMarker, null)).andReturn(true).anyTimes(); - expect(mockInput.getCache()).andReturn(null); - expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3)); - - output1.write(jsonObj, inputMarker); expectLastCall(); - output2.write(jsonObj, inputMarker); expectLastCall(); - output3.write(jsonObj, inputMarker); expectLastCall(); - - replay(output1, output2, output3, mockFilter, mockInput); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.setLogFeederProps(new LogFeederProps()); - manager.setLogLevelFilterHandler(mockFilter); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.write(jsonObj, inputMarker); - - verify(output1, output2, output3, mockInput); - } - - @Test - public void testOutputManager_write2() throws Exception { - String jsonString = "{}"; - - Input mockInput = strictMock(Input.class); - InputFileMarker inputMarker = new InputFileMarker(mockInput, null, 0); - InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {}; - - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - LogLevelFilterHandler mockFilter = strictMock(LogLevelFilterHandler.class); - - expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor).anyTimes(); - expect(mockFilter.isAllowed(jsonString, inputMarker, null)).andReturn(true).anyTimes(); - expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3)); - - output1.write(jsonString, inputMarker); expectLastCall(); - output2.write(jsonString, inputMarker); expectLastCall(); - output3.write(jsonString, inputMarker); expectLastCall(); - - replay(output1, output2, output3, mockInput, mockFilter); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.setLogLevelFilterHandler(mockFilter); - manager.setLogFeederProps(new LogFeederProps()); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.write(jsonString, inputMarker); - - verify(output1, output2, output3, mockInput); - } - - @Test - public void testOutputManager_addMetricsContainers() throws Exception { - List metrics = new ArrayList(); - - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - output1.addMetricsContainers(metrics); expectLastCall(); - output2.addMetricsContainers(metrics); expectLastCall(); - output3.addMetricsContainers(metrics); expectLastCall(); - - replay(output1, output2, output3); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.addMetricsContainers(metrics); - - verify(output1, output2, output3); - } - - @Test - public void testOutputManager_logStat() throws Exception { - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - output1.logStat(); expectLastCall(); - output2.logStat(); expectLastCall(); - output3.logStat(); expectLastCall(); - - replay(output1, output2, output3); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.logStats(); - - verify(output1, output2, output3); - } - - @Test - public void testOutputManager_copyFile() throws Exception { - File f = new File(""); - - Input mockInput = strictMock(Input.class); - InputFileMarker inputMarker = new InputFileMarker(mockInput, null, 0); - - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3)); - - output1.copyFile(f, inputMarker); expectLastCall(); - output2.copyFile(f, inputMarker); expectLastCall(); - output3.copyFile(f, inputMarker); expectLastCall(); - - replay(output1, output2, output3, mockInput); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.copyFile(f, inputMarker); - - verify(output1, output2, output3, mockInput); - } - - @Test - public void testOutputManager_close() throws Exception { - Output output1 = strictMock(Output.class); - Output output2 = strictMock(Output.class); - Output output3 = strictMock(Output.class); - - output1.setDrain(true); expectLastCall(); - output2.setDrain(true); expectLastCall(); - output3.setDrain(true); expectLastCall(); - - output1.close(); expectLastCall(); - output2.close(); expectLastCall(); - output3.close(); expectLastCall(); - - expect(output1.isClosed()).andReturn(true); - expect(output2.isClosed()).andReturn(true); - expect(output3.isClosed()).andReturn(true); - - replay(output1, output2, output3); - - OutputManagerImpl manager = new OutputManagerImpl(); - manager.add(output1); - manager.add(output2); - manager.add(output3); - - manager.close(); - - verify(output1, output2, output3); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java deleted file mode 100644 index 6674be11be6..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.apache.ambari.logfeeder.conf.LogFeederProps; -import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext; -import org.junit.Before; -import org.junit.Test; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -import static org.easymock.EasyMock.*; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class OutputS3FileTest { - - private Map configMap; - - @Before - public void setupConfiguration() { - configMap = new HashMap<>(); - String[] configKeys = new String[] { - S3OutputConfiguration.SPOOL_DIR_KEY, - S3OutputConfiguration.S3_BUCKET_NAME_KEY, - S3OutputConfiguration.S3_LOG_DIR_KEY, - S3OutputConfiguration.S3_ACCESS_KEY, - S3OutputConfiguration.S3_SECRET_KEY, - S3OutputConfiguration.COMPRESSION_ALGO_KEY, - S3OutputConfiguration.ADDITIONAL_FIELDS_KEY - }; - Map additionalKeys = new HashMap<>(); - additionalKeys.put(S3OutputConfiguration.CLUSTER_KEY, "cl1"); - Object[] configValues = new Object[] { - "/var/ambari-logsearch/logfeeder", - "s3_bucket_name", - "logs", - "ABCDEFGHIJ1234", - "amdfbldkfdlf", - "gz", - additionalKeys - }; - for (int i = 0; i < configKeys.length; i++) { - configMap.put(configKeys[i], configValues[i]); - } - } - - @Test - public void shouldRolloverWhenSufficientSizeIsReached() throws Exception { - - String thresholdSize = Long.toString(15 * 1024 * 1024L); - LogSpoolerContext logSpoolerContext = mock(LogSpoolerContext.class); - File activeSpoolFile = mock(File.class); - expect(activeSpoolFile.length()).andReturn(20*1024*1024L); - expect(logSpoolerContext.getActiveSpoolFile()).andReturn(activeSpoolFile); - replay(logSpoolerContext, activeSpoolFile); - - OutputS3File outputS3File = new OutputS3File(); - configMap.put(S3OutputConfiguration.ROLLOVER_SIZE_THRESHOLD_BYTES_KEY, thresholdSize); - outputS3File.loadConfig(configMap); - outputS3File.init(new LogFeederProps()); - - assertTrue(outputS3File.shouldRollover(logSpoolerContext)); - } - - @Test - public void shouldNotRolloverBeforeSufficientSizeIsReached() throws Exception { - String thresholdSize = Long.toString(15 * 1024 * 1024L); - LogSpoolerContext logSpoolerContext = mock(LogSpoolerContext.class); - File activeSpoolFile = mock(File.class); - expect(activeSpoolFile.length()).andReturn(10*1024*1024L); - expect(logSpoolerContext.getActiveSpoolFile()).andReturn(activeSpoolFile); - replay(logSpoolerContext, activeSpoolFile); - - OutputS3File outputS3File = new OutputS3File(); - configMap.put(S3OutputConfiguration.ROLLOVER_SIZE_THRESHOLD_BYTES_KEY, thresholdSize); - outputS3File.loadConfig(configMap); - outputS3File.init(new LogFeederProps()); - - assertFalse(outputS3File.shouldRollover(logSpoolerContext)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java deleted file mode 100644 index d1376c4bbf0..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -import org.apache.ambari.logfeeder.util.LogFeederUtil; - -public class S3LogPathResolverTest { - - @Test - public void shouldResolveHostName() { - String resolvedPath = new S3LogPathResolver().getResolvedPath("my_s3_path/$host", "filename.log", "cl1"); - assertEquals("my_s3_path/" + LogFeederUtil.hostName + "/filename.log", resolvedPath); - } - - @Test - public void shouldResolveIpAddress() { - String resolvedPath = new S3LogPathResolver().getResolvedPath("my_s3_path/$ip", "filename.log", "cl1"); - assertEquals("my_s3_path/" + LogFeederUtil.ipAddress + "/filename.log", resolvedPath); - } - - @Test - public void shouldResolveCluster() { - String resolvedPath = new S3LogPathResolver().getResolvedPath("my_s3_path/$cluster", "filename.log", "cl1"); - assertEquals("my_s3_path/cl1/filename.log", resolvedPath); - } - - @Test - public void shouldResolveCombinations() { - String resolvedPath = new S3LogPathResolver().getResolvedPath("my_s3_path/$cluster/$host", "filename.log", "cl1"); - assertEquals("my_s3_path/cl1/"+ LogFeederUtil.hostName + "/filename.log", resolvedPath); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java deleted file mode 100644 index 5477f5cbf9b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output; - -import org.junit.Test; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -import static org.easymock.EasyMock.*; -import static org.junit.Assert.assertEquals; - -public class S3UploaderTest { - - public static final String TEST_BUCKET = "test_bucket"; - public static final String TEST_PATH = "test_path"; - public static final String GZ = "gz"; - public static final String LOG_TYPE = "hdfs_namenode"; - public static final String ACCESS_KEY_VALUE = "accessKeyValue"; - public static final String SECRET_KEY_VALUE = "secretKeyValue"; - - @Test - public void shouldUploadToS3ToRightBucket() { - File fileToUpload = mock(File.class); - String fileName = "hdfs_namenode.log.123343493473948"; - expect(fileToUpload.getName()).andReturn(fileName); - final File compressedFile = mock(File.class); - Map configs = setupS3Configs(); - - S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs); - expect(compressedFile.delete()).andReturn(true); - expect(fileToUpload.delete()).andReturn(true); - replay(fileToUpload, compressedFile); - - S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) { - @Override - protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) { - return compressedFile; - } - protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) { - } - }; - String resolvedPath = s3Uploader.uploadFile(fileToUpload, LOG_TYPE); - - assertEquals("test_path/hdfs_namenode/hdfs_namenode.log.123343493473948.gz", resolvedPath); - } - - @Test - public void shouldCleanupLocalFilesOnSuccessfulUpload() { - File fileToUpload = mock(File.class); - String fileName = "hdfs_namenode.log.123343493473948"; - expect(fileToUpload.getName()).andReturn(fileName); - final File compressedFile = mock(File.class); - Map configs = setupS3Configs(); - - S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs); - expect(compressedFile.delete()).andReturn(true); - expect(fileToUpload.delete()).andReturn(true); - replay(fileToUpload, compressedFile); - - S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) { - @Override - protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) { - return compressedFile; - } - protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) { - } - }; - s3Uploader.uploadFile(fileToUpload, LOG_TYPE); - - verify(fileToUpload); - verify(compressedFile); - } - - @Test - public void shouldNotCleanupUncompressedFileIfNotRequired() { - File fileToUpload = mock(File.class); - String fileName = "hdfs_namenode.log.123343493473948"; - expect(fileToUpload.getName()).andReturn(fileName); - final File compressedFile = mock(File.class); - Map configs = setupS3Configs(); - - S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs); - expect(compressedFile.delete()).andReturn(true); - replay(fileToUpload, compressedFile); - - S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, LOG_TYPE) { - @Override - protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) { - return compressedFile; - } - protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) { - } - }; - s3Uploader.uploadFile(fileToUpload, LOG_TYPE); - - verify(fileToUpload); - verify(compressedFile); - } - - @Test - public void shouldExpandVariablesInPath() { - File fileToUpload = mock(File.class); - String fileName = "hdfs_namenode.log.123343493473948"; - expect(fileToUpload.getName()).andReturn(fileName); - final File compressedFile = mock(File.class); - Map configs = setupS3Configs(); - configs.put(S3OutputConfiguration.S3_LOG_DIR_KEY, "$cluster/"+TEST_PATH); - - - S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs); - expect(compressedFile.delete()).andReturn(true); - expect(fileToUpload.delete()).andReturn(true); - replay(fileToUpload, compressedFile); - - S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) { - @Override - protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) { - return compressedFile; - } - protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) { - } - }; - s3Uploader.uploadFile(fileToUpload, LOG_TYPE); - } - - private Map setupS3Configs() { - Map configs = new HashMap<>(); - configs.put(S3OutputConfiguration.S3_BUCKET_NAME_KEY, TEST_BUCKET); - configs.put(S3OutputConfiguration.S3_LOG_DIR_KEY, TEST_PATH); - configs.put(S3OutputConfiguration.S3_ACCESS_KEY, ACCESS_KEY_VALUE); - configs.put(S3OutputConfiguration.S3_SECRET_KEY, SECRET_KEY_VALUE); - configs.put(S3OutputConfiguration.COMPRESSION_ALGO_KEY, GZ); - Map nameValueMap = new HashMap<>(); - nameValueMap.put(S3OutputConfiguration.CLUSTER_KEY, "cl1"); - configs.put(S3OutputConfiguration.ADDITIONAL_FIELDS_KEY, nameValueMap); - return configs; - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java deleted file mode 100644 index 2cfe9ff6746..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.output.spool; - -import org.easymock.EasyMockRule; -import org.easymock.LogicalOperator; -import org.easymock.Mock; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.Comparator; - -import static org.easymock.EasyMock.*; - -public class LogSpoolerTest { - - @Rule - public TemporaryFolder testFolder = new TemporaryFolder(); - - @Rule - public EasyMockRule mocks = new EasyMockRule(this); - - private String spoolDirectory; - private static final String SOURCE_FILENAME_PREFIX = "hdfs-namenode.log"; - - @Mock - private RolloverCondition rolloverCondition; - - @Mock - private RolloverHandler rolloverHandler; - - @Before - public void setup() { - spoolDirectory = testFolder.getRoot().getAbsolutePath(); - } - - @Test - public void shouldSpoolEventToFile() { - final PrintWriter spoolWriter = mock(PrintWriter.class); - spoolWriter.println("log event"); - - final File mockFile = setupInputFileExpectations(); - LogSpoolerContext logSpoolerContext = new LogSpoolerContext(mockFile); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext, new LogSpoolerFileComparator(), LogicalOperator.EQUAL))). - andReturn(false); - - replay(spoolWriter, rolloverCondition, mockFile); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - return mockFile; - } - }; - logSpooler.add("log event"); - - verify(spoolWriter); - } - - private File setupInputFileExpectations() { - final File mockFile = mock(File.class); - expect(mockFile.length()).andReturn(10240L); - return mockFile; - } - - @Test - public void shouldIncrementSpooledEventsCount() { - - final PrintWriter spoolWriter = mock(PrintWriter.class); - spoolWriter.println("log event"); - - final File mockFile = setupInputFileExpectations(); - LogSpoolerContext logSpoolerContext = new LogSpoolerContext(mockFile); - logSpoolerContext.logEventSpooled(); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext, new LogSpoolerEventCountComparator(), LogicalOperator.EQUAL))). - andReturn(false); - - replay(spoolWriter, rolloverCondition, mockFile); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - return mockFile; - } - }; - logSpooler.add("log event"); - - verify(rolloverCondition); - } - - @Test - public void shouldCloseCurrentSpoolFileOnRollOver() { - final PrintWriter spoolWriter = mock(PrintWriter.class); - spoolWriter.println("log event"); - spoolWriter.flush(); - spoolWriter.close(); - - final File mockFile = setupInputFileExpectations(); - LogSpoolerContext logSpoolerContext = new LogSpoolerContext(mockFile); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext, new LogSpoolerFileComparator(), LogicalOperator.EQUAL))). - andReturn(true); - rolloverHandler.handleRollover(mockFile); - - replay(spoolWriter, rolloverCondition, rolloverHandler, mockFile); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - return mockFile; - } - }; - logSpooler.add("log event"); - - verify(spoolWriter); - } - - @Test - public void shouldReinitializeFileOnRollover() { - final PrintWriter spoolWriter1 = mock(PrintWriter.class); - final PrintWriter spoolWriter2 = mock(PrintWriter.class); - spoolWriter1.println("log event1"); - spoolWriter2.println("log event2"); - spoolWriter1.flush(); - spoolWriter1.close(); - - final File mockFile1 = setupInputFileExpectations(); - final File mockFile2 = setupInputFileExpectations(); - - LogSpoolerContext logSpoolerContext1 = new LogSpoolerContext(mockFile1); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext1, new LogSpoolerFileComparator(), LogicalOperator.EQUAL)) - ).andReturn(true); - - LogSpoolerContext logSpoolerContext2 = new LogSpoolerContext(mockFile2); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext2, new LogSpoolerFileComparator(), LogicalOperator.EQUAL)) - ).andReturn(false); - - rolloverHandler.handleRollover(mockFile1); - - replay(spoolWriter1, spoolWriter2, rolloverCondition, rolloverHandler, mockFile1, mockFile2); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - private boolean wasRolledOver; - - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - if (!wasRolledOver) { - wasRolledOver = true; - return spoolWriter1; - } else { - return spoolWriter2; - } - } - - @Override - protected File initializeSpoolFile() { - if (!wasRolledOver) { - return mockFile1; - } else { - return mockFile2; - } - } - }; - logSpooler.add("log event1"); - logSpooler.add("log event2"); - - verify(spoolWriter1, spoolWriter2, rolloverCondition); - } - - @Test - public void shouldCallRolloverHandlerOnRollover() { - final PrintWriter spoolWriter = mock(PrintWriter.class); - spoolWriter.println("log event"); - spoolWriter.flush(); - spoolWriter.close(); - - final File mockFile = setupInputFileExpectations(); - LogSpoolerContext logSpoolerContext = new LogSpoolerContext(mockFile); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext, new LogSpoolerFileComparator(), LogicalOperator.EQUAL)) - ).andReturn(true); - rolloverHandler.handleRollover(mockFile); - - replay(spoolWriter, rolloverCondition, rolloverHandler, mockFile); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - return mockFile; - } - }; - logSpooler.add("log event"); - - verify(rolloverHandler); - } - - // Rollover twice - the second rollover should work if the "rolloverInProgress" - // flag is being reset correctly. Third file expectations being setup due - // to auto-initialization. - @Test - public void shouldResetRolloverInProgressFlag() { - final PrintWriter spoolWriter1 = mock(PrintWriter.class); - final PrintWriter spoolWriter2 = mock(PrintWriter.class); - final PrintWriter spoolWriter3 = mock(PrintWriter.class); - spoolWriter1.println("log event1"); - spoolWriter2.println("log event2"); - spoolWriter1.flush(); - spoolWriter1.close(); - spoolWriter2.flush(); - spoolWriter2.close(); - - final File mockFile1 = setupInputFileExpectations(); - final File mockFile2 = setupInputFileExpectations(); - final File mockFile3 = setupInputFileExpectations(); - - LogSpoolerContext logSpoolerContext1 = new LogSpoolerContext(mockFile1); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext1, new LogSpoolerFileComparator(), LogicalOperator.EQUAL)) - ).andReturn(true); - - LogSpoolerContext logSpoolerContext2 = new LogSpoolerContext(mockFile2); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext2, new LogSpoolerFileComparator(), LogicalOperator.EQUAL)) - ).andReturn(true); - - rolloverHandler.handleRollover(mockFile1); - rolloverHandler.handleRollover(mockFile2); - - replay(spoolWriter1, spoolWriter2, rolloverCondition, rolloverHandler, mockFile1, mockFile2, mockFile3); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - private int currentFileNum; - - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - PrintWriter spoolWriter = null; - switch (currentFileNum) { - case 0: - spoolWriter = spoolWriter1; - break; - case 1: - spoolWriter = spoolWriter2; - break; - case 2: - spoolWriter = spoolWriter3; - break; - } - currentFileNum++; - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - switch (currentFileNum) { - case 0: - return mockFile1; - case 1: - return mockFile2; - case 2: - return mockFile3; - default: - return null; - } - } - }; - logSpooler.add("log event1"); - logSpooler.add("log event2"); - - verify(spoolWriter1, spoolWriter2, rolloverCondition); - } - - @Test - public void shouldNotRolloverZeroLengthFiles() { - final PrintWriter spoolWriter = mock(PrintWriter.class); - spoolWriter.println("log event"); - spoolWriter.flush(); - spoolWriter.close(); - - final File mockFile = mock(File.class); - expect(mockFile.length()).andReturn(0L); - - LogSpoolerContext logSpoolerContext = new LogSpoolerContext(mockFile); - expect(rolloverCondition.shouldRollover( - cmp(logSpoolerContext, new LogSpoolerFileComparator(), LogicalOperator.EQUAL))). - andReturn(true); - - replay(spoolWriter, rolloverCondition, mockFile); - - LogSpooler logSpooler = new LogSpooler(spoolDirectory, SOURCE_FILENAME_PREFIX, - rolloverCondition, rolloverHandler) { - - @Override - protected PrintWriter initializeSpoolWriter(File spoolFile) throws IOException { - return spoolWriter; - } - - @Override - protected File initializeSpoolFile() { - return mockFile; - } - }; - logSpooler.add("log event"); - - verify(mockFile); - } - - class LogSpoolerFileComparator implements Comparator { - @Override - public int compare(LogSpoolerContext o1, LogSpoolerContext o2) { - return o1.getActiveSpoolFile()==o2.getActiveSpoolFile() ? 0 : -1; - } - } - - class LogSpoolerEventCountComparator implements Comparator { - @Override - public int compare(LogSpoolerContext o1, LogSpoolerContext o2) { - return (int)(o1.getNumEventsSpooled()-o2.getNumEventsSpooled()); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java deleted file mode 100644 index 43e03c78e0f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.logfeeder.util; - -import java.util.HashMap; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class PlaceholderUtilTest { - @Test - public void testPlaceholderUtil_replaceVariables() { - String hostName = "host1"; - String ip = "127.0.0.1"; - String clusterName = "test-cluster"; - - HashMap contextParam = new HashMap(); - contextParam.put("host", hostName); - contextParam.put("ip", ip); - contextParam.put("cluster", clusterName); - - String resultStr = PlaceholderUtil.replaceVariables("$CLUSTER/logfeeder/$HOST-$IP/logs", contextParam); - String expectedStr = clusterName + "/logfeeder/" + hostName + "-" + ip + "/logs"; - - assertEquals("Result string :" + resultStr + " is not equal to exptected string :" + expectedStr, resultStr, expectedStr); - } -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java deleted file mode 100644 index 02918be990f..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logfeeder.util; - -import static org.junit.Assert.assertEquals; - -import org.apache.ambari.logfeeder.util.S3Util; - -public class S3UtilTest { - public void testS3Util_pathToBucketName() throws Exception { - String s3Path = "s3://bucket_name/path/file.txt"; - String expectedBucketName = "bucket_name"; - String actualBucketName = S3Util.getBucketName(s3Path); - assertEquals(expectedBucketName, actualBucketName); - } - - public void testS3Util_pathToS3Key() throws Exception { - String s3Path = "s3://bucket_name/path/file.txt"; - String expectedS3key = "path/file.txt"; - String actualS3key = S3Util.getS3Key(s3Path); - assertEquals(expectedS3key, actualS3key); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/log4j.xml deleted file mode 100644 index 1d28fcc688d..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/log4j.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties deleted file mode 100644 index 5476c968d5e..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logfeeder.log.filter.enable=true -logfeeder.solr.config.interval=5 -logfeeder.solr.zk_connect_string=some_connect_string -logfeeder.metrics.collector.hosts=some_collector_host -logfeeder.include.default.level=FATAL,ERROR,WARN \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/sample_filter.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/sample_filter.json deleted file mode 100644 index 8c64c28af50..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/sample_filter.json +++ /dev/null @@ -1,166 +0,0 @@ -{ - "filter": { - "hdfs_namenode": { - "label": "hdfs_namenode", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "yarn_resourcemanager": { - "label": "yarn_resourcemanager", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "logsearch_perf": { - "label": "logsearch_perf", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hdfs_secondarynamenode": { - "label": "hdfs_secondarynamenode", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "mapred_historyserver": { - "label": "mapred_historyserver", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "ams_hbase_master": { - "label": "ams_hbase_master", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "ambari_server": { - "label": "ambari_server", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "yarn_timelineserver": { - "label": "yarn_timelineserver", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hdfs_datanode": { - "label": "hdfs_datanode", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "logsearch_app": { - "label": "logsearch_app", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "kafka_statechange": { - "label": "kafka_statechange", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hbase_master": { - "label": "hbase_master", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "kafka_server": { - "label": "kafka_server", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "oozie_app": { - "label": "oozie_app", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hive_hiveserver2": { - "label": "hive_hiveserver2", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "yarn_nodemanager": { - "label": "yarn_nodemanager", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hbase_regionserver": { - "label": "hbase_regionserver", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "ambari_agent": { - "label": "ambari_agent", - "hosts": ["host1","host2"], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "2016-04-05T08:30:00.000Z" - }, - "logsearch_feeder": { - "label": "logsearch_feeder", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "kafka_controller": { - "label": "kafka_controller", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "ams_collector": { - "label": "ams_collector", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "yarn_jobsummary": { - "label": "yarn_jobsummary", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - }, - "hive_metastore": { - "label": "hive_metastore", - "hosts": [], - "defaultLevels": ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"], - "overrideLevels": [], - "expiryTime": "" - } - }, - "id": "1459861568220" -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_audit.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_audit.json deleted file mode 100644 index 5d827125731..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_audit.json +++ /dev/null @@ -1,172 +0,0 @@ -{ - "global": { - "add_fields": { - "cluster": "audit" - }, - "source": "file", - "tail": "true", - "gen_event_md5": "true" - }, - "input": [{ - "type": "hdfs_audit", - "rowtype": "hdfs_audit", - "path": "{path}/src/test/resources/samples/jsonlogs/audit_log.json" - }], - "filter": [{ - "filter": "json", - "conditions": { - "fields": { - "type": [ - "hdfs_audit" - ] - - } - } - }, { - "filter": "keyvalue", - "sort_order": 1, - "conditions": { - "fields": { - "type": [ - "hdfs_audit" - ] - - } - - }, - "source_field": "log_message", - "value_split": "=", - "field_split": "\t", - "post_map_values": { - "src": { - "map_field_name": { - "new_field_name": "resource" - } - - }, - "ip": { - "map_field_name": { - "new_field_name": "cliIP" - } - - }, - "allowed": [{ - "map_field_value": { - "pre_value": "true", - "post_value": "1" - } - - }, { - "map_field_value": { - "pre_value": "false", - "post_value": "0" - } - - }, { - "map_field_name": { - "new_field_name": "result" - } - - } - - ], - "cmd": { - "map_field_name": { - "new_field_name": "action" - } - - }, - "proto": { - "map_field_name": { - "new_field_name": "cliType" - } - - }, - "callerContext": { - "map_field_name": { - "new_field_name": "req_caller_id" - } - - } - - } - - }, { - "filter": "grok", - "sort_order": 2, - "source_field": "ugi", - "remove_source_field": "false", - "conditions": { - "fields": { - "type": [ - "hdfs_audit" - ] - - } - - }, - "message_pattern": "%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}", - "post_map_values": { - "user": { - "map_field_name": { - "new_field_name": "reqUser" - } - - }, - "x_user": { - "map_field_name": { - "new_field_name": "reqUser" - } - - }, - "p_user": { - "map_field_name": { - "new_field_name": "reqUser" - } - - }, - "k_user": { - "map_field_name": { - "new_field_name": "proxyUsers" - } - - }, - "p_authType": { - "map_field_name": { - "new_field_name": "authType" - } - - }, - "k_authType": { - "map_field_name": { - "new_field_name": "proxyAuthType" - } - - } - - } - - } - - ], - - "output": [{ - "is_enabled": "true", - "comment": "Output to file for audit logs", - "destination": "solr", - "url": "http://localhost:8983/solr/audit_logs", - "collection": "audit_logs", - "number_of_shards": "1", - "splits_interval_mins": "100000", - "conditions": { - "fields": { - "rowtype": [ - "hdfs_audit" - ] - } - } - } - - ] - -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_service.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_service.json deleted file mode 100644 index 0fff0503012..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/config_service.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "global": { - "add_fields": { - "cluster": "cluster_name" - }, - "source": "file", - "tail": "true", - "gen_event_md5": "true" - }, - "input": [{ - "type": "logsearch", - "rowtype": "service", - "path": "{path}/src/test/resources/samples/jsonlogs/service_log.json" - }], - "filter": [{ - "filter": "json", - "conditions": { - "fields": { - "type": [ - "logsearch" - ] - } - } - }], - "output": [{ - "comment": "Output to solr for service records", - "is_enabled": "true", - "destination": "solr", - "url": "http://localhost:8983/solr/hadoop_logs", - "collection": "hadoop_logs", - "number_of_shards": "1", - "splits_interval_mins": "100000", - "conditions": { - "fields": { - "rowtype": [ - "service" - ] - } - } - }] - -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/output-hdfs-config.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/output-hdfs-config.json deleted file mode 100644 index 336934a917b..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/config/output-hdfs-config.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - -"output": [{ - "comment": "Write log to hdfs", - "destination": "hdfs", - "hdfs_out_dir": "logfeeder/$HOST/service", - "file_name_prefix":"service-logs-", - "hdfs_host": "hdfs_host", - "hdfs_port": "8020", - "rollover_sec":"300", - "conditions": { - "fields": { - "rowtype": [ - "service" - ] - } - } - }] - -} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/audit_log.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/audit_log.json deleted file mode 100644 index 3ffa40d41e3..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/audit_log.json +++ /dev/null @@ -1,9 +0,0 @@ -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917617284"} -{"level":"DEBUG","file":"App.java","thread_name":"main","line_number":15,"log_message":"FSNamesystem.audit: allowed\u003dtrue\tugi\u003doozie (auth:SIMPLE)\tip\u003d/10.10.10.41\tcmd\u003dlistStatus\tsrc\u003d/user/oozie/share/lib\tdst\u003dnull\tperm\u003dnull\tproto\u003drpc","logger_name":"logserach.appender.test.App","logtime":"1456917717290"} diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/service_log.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/service_log.json deleted file mode 100644 index e0a3728da5a..00000000000 --- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/samples/jsonlogs/service_log.json +++ /dev/null @@ -1,7 +0,0 @@ -{"level":"INFO","file":"LogSearch.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":48,"log_message":"Starting logsearch server...","logger_name":"org.apache.ambari.logsearch.LogSearch","logtime":"1457000117434"} -{"level":"INFO","file":"PropertiesLoaderSupport.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":177,"log_message":"Loading properties file from class path resource [logsearch.properties]","logger_name":"org.apache.ambari.logsearch.util.PropertiesUtil","logtime":"1457000118770"} -{"level":"INFO","file":"PropertiesLoaderSupport.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":177,"log_message":"Loading properties file from class path resource [custom.properties]","logger_name":"org.apache.ambari.logsearch.util.PropertiesUtil","logtime":"1457000118774"} -{"level":"INFO","file":"SolrDaoBase.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":83,"log_message":"connectToSolr() zkHosts\u003dnull, collection\u003daudit_logs, url\u003dhttp://localhost:8983/solr","logger_name":"org.apache.ambari.logsearch.dao.SolrDaoBase","logtime":"1457000118940"} -{"level":"INFO","file":"SolrDaoBase.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":104,"log_message":"Connencting to solr : http://localhost:8983/solr/audit_logs","logger_name":"org.apache.ambari.logsearch.dao.SolrDaoBase","logtime":"1457000118942"} -{"level":"ERROR","file":"AuditSolrDao.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":53,"log_message":"Error while connecting to Solr for audit logs : solrUrl\u003dhttp://localhost:8983/solr, zkHosts\u003dnull, collection\u003daudit_logs\norg.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://localhost:8983/solr/audit_logs: Expected mime type application/octet-stream but got text/html. \u003chtml\u003e\n\u003chead\u003e\n\u003cmeta http-equiv\u003d\"Content-Type\" content\u003d\"text/html; charset\u003dUTF-8\"/\u003e\n\u003ctitle\u003eError 404 Not Found\u003c/title\u003e\n\u003c/head\u003e\n\u003cbody\u003e\u003ch2\u003eHTTP ERROR 404\u003c/h2\u003e\n\u003cp\u003eProblem accessing /solr/audit_logs/admin/collections. Reason:\n\u003cpre\u003e Not Found\u003c/pre\u003e\u003c/p\u003e\u003chr\u003e\u003ci\u003e\u003csmall\u003ePowered by Jetty://\u003c/small\u003e\u003c/i\u003e\u003chr/\u003e\n\n\u003c/body\u003e\n\u003c/html\u003e\n\n\tat org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:528)\n\tat org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:234)\n\tat org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:226)\n\tat org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:135)\n\tat org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:152)\n\tat org.apache.ambari.logsearch.dao.SolrDaoBase.setupCollections(SolrDaoBase.java:169)\n\tat org.apache.ambari.logsearch.dao.AuditSolrDao.postConstructor(AuditSolrDao.java:50)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleElement.invoke(InitDestroyAnnotationBeanPostProcessor.java:346)\n\tat org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleMetadata.invokeInitMethods(InitDestroyAnnotationBeanPostProcessor.java:299)\n\tat org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor.postProcessBeforeInitialization(InitDestroyAnnotationBeanPostProcessor.java:132)\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.applyBeanPostProcessorsBeforeInitialization(AbstractAutowireCapableBeanFactory.java:394)\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1448)\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:519)\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:456)\n\tat org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:294)\n\tat org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:225)\n\tat org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:291)\n\tat org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:193)\n\tat org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:605)\n\tat org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:925)\n\tat org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:472)\n\tat org.springframework.web.context.ContextLoader.createWebApplicationContext(ContextLoader.java:282)\n\tat org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:204)\n\tat org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:47)\n\tat org.mortbay.jetty.handler.ContextHandler.startContext(ContextHandler.java:549)\n\tat org.mortbay.jetty.servlet.Context.startContext(Context.java:136)\n\tat org.mortbay.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1282)\n\tat org.mortbay.jetty.handler.ContextHandler.doStart(ContextHandler.java:518)\n\tat org.mortbay.jetty.webapp.WebAppContext.doStart(WebAppContext.java:499)\n\tat org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)\n\tat org.mortbay.jetty.handler.HandlerCollection.doStart(HandlerCollection.java:152)\n\tat org.mortbay.jetty.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:156)\n\tat org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)\n\tat org.mortbay.jetty.handler.HandlerWrapper.doStart(HandlerWrapper.java:130)\n\tat org.mortbay.jetty.Server.doStart(Server.java:224)\n\tat org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)\n\tat org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:857)\n\tat org.apache.ambari.logsearch.LogSearch.main(LogSearch.java:50)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297)\n\tat java.lang.Thread.run(Thread.java:745)\n","logger_name":"org.apache.ambari.logsearch.dao.AuditSolrDao","logtime":"1457000119375"} -{"level":"INFO","file":"SolrDaoBase.java","thread_name":"org.apache.ambari.logsearch.LogSearch.main()","line_number":83,"log_message":"connectToSolr() zkHosts\u003dnull, collection\u003dhadoop_logs, url\u003dhttp://localhost:8983/solr","logger_name":"org.apache.ambari.logsearch.dao.SolrDaoBase","logtime":"1457000119392"} diff --git a/ambari-logsearch/ambari-logsearch-server/.gitignore b/ambari-logsearch/ambari-logsearch-server/.gitignore deleted file mode 100644 index 7ea6a1f8de3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -target -.settings -.classpath -.project -/bin/ -node_modules/ -logs/ -node/ -*.pid - diff --git a/ambari-logsearch/ambari-logsearch-server/README.md b/ambari-logsearch/ambari-logsearch-server/README.md deleted file mode 100644 index 7432344dc78..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/README.md +++ /dev/null @@ -1,39 +0,0 @@ - - -# Log Search Server - -## Start locally from maven / IDE - -Other services (like zookeeper, solr, logfeeder) can be started with `docker-compose` -```bash -cd ambari/ambari-logsearch/docker -docker-compose up -d zookeeper solr logfeeder -``` - -Then you can start Log Search server from maven - -```bash -cd ambari/ambari-logsearch/ambari-logsearch-server -./run.sh -# or -mvn clean package -DskipTests spring-boot:run -``` - -You can also start Log Search server from an IDE as well. One thing is important: the config set location that the server tries to upload to ZooKeeper. By default config sets are located at `${LOGSEARCH_SERVER_RELATIVE_LOCATION:}src/main/configsets` in `logsearch.properties`. Based or from where you run `LogSearch.java`, you need to set `LOGSEARCH_SERVER_RELATIVE_LOCATION` env variable properly. diff --git a/ambari-logsearch/ambari-logsearch-server/build.properties b/ambari-logsearch/ambari-logsearch-server/build.properties deleted file mode 100644 index 4e4948973ff..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/build.properties +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# log4j configuration used during build and unit tests - -TOMCAT_HOME=/Library/Tomcat/Home -app.work.dir=${builddir}/build/work -app.war.dir=${app.work.dir}/war -app.pkg.dir=${app.work.dir}/pkg - -app.dev.war.dir=${app.work.dir}/webapps/logsearch -app.war.name=logsearch.war - -app.target.dir=${builddir}/target/classes/static \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/build.xml b/ambari-logsearch/ambari-logsearch-server/build.xml deleted file mode 100644 index 15ceedc4628..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/build.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/pom.xml b/ambari-logsearch/ambari-logsearch-server/pom.xml deleted file mode 100755 index 783fba35306..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/pom.xml +++ /dev/null @@ -1,553 +0,0 @@ - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - ambari-logsearch-server - jar - http://maven.apache.org - Ambari Logsearch Server - - 4.3.17.RELEASE - 4.2.4.RELEASE - 2.3.2.RELEASE - 2.25.1 - 9.4.11.v20180605 - 1.5.16 - 2.0.2.RELEASE - 0.6.0 - 1.5.13.RELEASE - - - - dev - - true - - - - - src/main/resources - true - - - - - - maven-compiler-plugin - 3.0 - - - maven-dependency-plugin - 2.8 - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.0 - - ${jdk.version} - ${jdk.version} - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - unpack - prepare-package - - unpack - - - - - org.apache.ambari - ambari-logsearch-web - ${project.version} - ${project.build.directory}/ - dist/** - - - - - - copy-dependencies - package - - copy-dependencies - - - ambari-logsearch-web - true - ${basedir}/target/libs - false - false - true - runtime - - - - - - org.apache.maven.plugins - maven-antrun-plugin - 1.7 - - - package - - - - - - - - - run - - - - - - org.apache.rat - apache-rat-plugin - - - src/main/configsets/hadoop_logs/conf/managed-schema - **/*.log - **/*.json - - - - - test - - check - - - - - - - - - - - junit - junit - test - - - org.easymock - easymock - 3.4 - test - - - - org.springframework - spring-beans - ${spring.version} - - - org.springframework - spring-context - ${spring.version} - - - org.springframework - spring-test - ${spring.version} - - - - org.springframework.security - spring-security-web - ${spring.security.version} - - - org.springframework.security - spring-security-core - ${spring.security.version} - - - org.springframework.security - spring-security-config - ${spring.security.version} - - - org.springframework.security - spring-security-ldap - ${spring.security.version} - - - - org.springframework.security.kerberos - spring-security-kerberos-core - 1.0.1.RELEASE - - - org.springframework.security.kerberos - spring-security-kerberos-web - 1.0.1.RELEASE - - - org.springframework.security.kerberos - spring-security-kerberos-client - 1.0.1.RELEASE - - - org.apache.httpcomponents - httpclient - - - - - org.springframework.boot - spring-boot-starter - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-logging - - - - - org.springframework.boot - spring-boot-starter-log4j - 1.3.8.RELEASE - - - org.springframework.boot - spring-boot-starter-web - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-security - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-actuator - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jetty - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jersey - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-freemarker - ${spring-boot.version} - - - org.springframework.boot - spring-boot-autoconfigure - ${spring-boot.version} - - - org.springframework.boot - spring-boot-configuration-processor - ${spring-boot.version} - - - org.glassfish.jersey.media - jersey-media-json-jettison - ${jersey.version} - - - guava - com.google.guava - 25.0-jre - - - org.codehaus.jackson - jackson-core-asl - 1.9.13 - - - org.codehaus.jackson - jackson-mapper-asl - 1.9.13 - - - - javax.servlet - javax.servlet-api - 3.1.0 - - - org.apache.solr - solr-solrj - ${solr.version} - - - org.apache.solr - solr-core - ${solr.version} - - - * - * - - - - - org.apache.lucene - lucene-core - ${solr.version} - - - org.apache.lucene - lucene-analyzers-common - ${solr.version} - - - - org.apache.hadoop - hadoop-auth - 3.0.0 - - - commons-io - commons-io - 2.4 - - - org.apache.ambari - ambari-logsearch-appender - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-solr - ${project.version} - - - org.apache.ambari - ambari-logsearch-config-zookeeper - ${project.version} - - - org.apache.ambari - ambari-logsearch-logfeeder - ${project.version} - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.apache.ambari - ambari-metrics-common - - - com.fasterxml.woodstox - woodstox-core - - - javax.servlet.jsp - jsp-api - - - com.sun.jersey - jetty-util - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - org.eclipse.jetty - jetty-server - - - javax.servlet - servlet-api - - - guava - com.google.guava - - - javax.inject - javax.inject - - - - - commons-cli - commons-cli - 1.3.1 - - - commons-codec - commons-codec - 1.8 - - - commons-lang - commons-lang - 2.5 - - - cglib - cglib - 3.2.4 - - - io.swagger - swagger-annotations - ${swagger.version} - - - io.swagger - swagger-core - ${swagger.version} - - - io.swagger - swagger-jersey2-jaxrs - ${swagger.version} - - - javax.ws.rs - jsr311-api - - - - - io.swagger - swagger-models - ${swagger.version} - - - org.webjars - swagger-ui - 2.2.2 - - - org.springframework.data - spring-data-solr - ${spring-data-solr.version} - - - org.springframework.data - spring-data-commons - 1.13.11.RELEASE - - - org.springframework - spring-context-support - ${spring.version} - - - io.jsonwebtoken - jjwt - ${jjwt.version} - - - org.bouncycastle - bcprov-jdk15on - 1.55 - - - org.bouncycastle - bcpkix-jdk15on - 1.55 - - - org.apache.ambari - ambari-logsearch-web - ${project.version} - - - commons-fileupload - commons-fileupload - 1.3.3 - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - - - - org.springframework.boot - spring-boot-starter-tomcat - ${spring-boot.version} - provided - - - org.apache.tomcat.embed - tomcat-embed-el - 8.5.16 - provided - - - commons-beanutils - commons-beanutils - 1.7.0 - provided - - - commons-logging - commons-logging - 1.1.1 - provided - - - diff --git a/ambari-logsearch/ambari-logsearch-server/run.sh b/ambari-logsearch/ambari-logsearch-server/run.sh deleted file mode 100755 index 2de05364c25..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/run.sh +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -echo " -██╗ ██████╗ ██████╗ ███████╗███████╗ █████╗ ██████╗ ██████╗██╗ ██╗ -██║ ██╔═══██╗██╔════╝ ██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██║ ██║ -██║ ██║ ██║██║ ███╗ ███████╗█████╗ ███████║██████╔╝██║ ███████║ -██║ ██║ ██║██║ ██║ ╚════██║██╔══╝ ██╔══██║██╔══██╗██║ ██╔══██║ -███████╗╚██████╔╝╚██████╔╝ ███████║███████╗██║ ██║██║ ██║╚██████╗██║ ██║ -╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ -" -mvn clean package -DskipTests spring-boot:run diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.html deleted file mode 100755 index fecab20513d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.html +++ /dev/null @@ -1,24 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-bottom.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-bottom.html deleted file mode 100755 index 3359a460a48..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-bottom.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-top.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-top.html deleted file mode 100755 index 0886cee37a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/admin-extra.menu-top.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/elevate.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/elevate.xml deleted file mode 100644 index 25d5cebe4fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/elevate.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/enumsConfig.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/enumsConfig.xml deleted file mode 100644 index 458ee7e256e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/enumsConfig.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - UNKNOWN - TRACE - DEBUG - INFO - WARN - ERROR - FATAL - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/managed-schema deleted file mode 100644 index fd90adab269..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/managed-schema +++ /dev/null @@ -1,143 +0,0 @@ - - - - - id - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/solrconfig.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/solrconfig.xml deleted file mode 100644 index 8f541212a28..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/audit_logs/conf/solrconfig.xml +++ /dev/null @@ -1,1886 +0,0 @@ - - - - - - - - - 7.4.0 - - - - - - - - - - - - - - - - - - - - - - - ${solr.data.dir:} - - - - - - - - - ${solr.hdfs.home:} - - ${solr.hdfs.confdir:} - - ${solr.hdfs.blockcache.enabled:true} - - ${solr.hdfs.blockcache.global:true} - - - - - - - - - - true - managed-schema - - - - - - - - - - 10000 - - - - - 50 - - - - - - - - - - - - - - - 5 - 5 - - - - - - - ${solr.lock.type:native} - - - - - - - - - - - - - - - - true - - - - - - - - - - - - - - - - ${solr.ulog.dir:} - - - - - ${solr.autoCommit.maxTime:15000} - false - - - - - - ${solr.autoSoftCommit.maxTime:5000} - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - 20 - - - 200 - - - - - - - - - - - - static firstSearcher warming in solrconfig.xml - - - - - - false - - - 2 - - - - - - - - - - - - - - - - - - - - - - solr-data-config.xml - - - - - - - - explicit - 10 - text - - - - - - - - - - - - - - explicit - json - true - text - - - - - - - - true - json - true - - - - - - - - explicit - - - velocity - browse - layout - - - edismax - *:* - 10 - *,score - - - on - 1 - - - - - - - text - add-unknown-fields-to-the-schema - - - - - - - - - - - - - true - ignored_ - - - true - links - ignored_ - - - - - - - - - - - - - - - - - - - - - - - - solrpingquery - - - all - - - - - - - - - explicit - true - - - - - - - - - - - - - - - - key_lower_case - - - - - - default - text - solr.DirectSolrSpellChecker - - internal - - 0.5 - - 2 - - 1 - - 5 - - 4 - - 0.01 - - - - - - wordbreak - solr.WordBreakSolrSpellChecker - name - true - true - 10 - - - - - - - - - - - - - - - - text - - default - wordbreak - on - true - 10 - 5 - 5 - true - true - 10 - 5 - - - spellcheck - - - - - - mySuggester - FuzzyLookupFactory - DocumentDictionaryFactory - cat - price - string - - - - - - true - 10 - - - suggest - - - - - - - - - text - true - - - tvComponent - - - - - - - lingo - - - org.carrot2.clustering.lingo.LingoClusteringAlgorithm - - - clustering/carrot2 - - - - - stc - org.carrot2.clustering.stc.STCClusteringAlgorithm - - - - - kmeans - org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm - - - - - - - true - true - - name - - id - - features - - true - - - - false - - - edismax - - text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4 - - *:* - 10 - *,score - - - clustering - - - - - - - - - - true - false - - - terms - - - - - - - - string - elevate.xml - - - - - - explicit - text - - - elevator - - - - - - - - - - - 100 - - - - - - - - 70 - - 0.5 - - [-\w ,/\n\"']{20,200} - - - - - - - ]]> - ]]> - - - - - - - - - - - - - - - - - - - - - - - - ,, - ,, - ,, - ,, - ,]]> - ]]> - - - - - - 10 - .,!? - - - - - - - WORD - - - en - US - - - - - - - - - - - - _ttl_ - +7DAYS - - - 86400 - _ttl_ - _expire_at_ - - - _expire_at_ - - - - - - - - - - yyyy-MM-dd'T'HH:mm:ss.SSSZ - yyyy-MM-dd'T'HH:mm:ss,SSSZ - yyyy-MM-dd'T'HH:mm:ss.SSS - yyyy-MM-dd'T'HH:mm:ss,SSS - yyyy-MM-dd'T'HH:mm:ssZ - yyyy-MM-dd'T'HH:mm:ss - yyyy-MM-dd'T'HH:mmZ - yyyy-MM-dd'T'HH:mm - yyyy-MM-dd HH:mm:ss.SSSZ - yyyy-MM-dd HH:mm:ss,SSSZ - yyyy-MM-dd HH:mm:ss.SSS - yyyy-MM-dd HH:mm:ss,SSS - yyyy-MM-dd HH:mm:ssZ - yyyy-MM-dd HH:mm:ss - yyyy-MM-dd HH:mmZ - yyyy-MM-dd HH:mm - yyyy-MM-dd - - - - key_lower_case - - java.lang.Boolean - boolean - - - java.util.Date - tdate - - - java.lang.Long - java.lang.Integer - tlong - - - java.lang.Number - tdouble - - - - - - - - - - - - - - - - - - - - - - - - text/plain; charset=UTF-8 - - - - - ${velocity.template.base.dir:} - - - - - 5 - - - - - - - - - - - - - - - - - - *:* - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.html deleted file mode 100755 index fecab20513d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.html +++ /dev/null @@ -1,24 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-bottom.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-bottom.html deleted file mode 100755 index 3359a460a48..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-bottom.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-top.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-top.html deleted file mode 100755 index 0886cee37a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/admin-extra.menu-top.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/elevate.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/elevate.xml deleted file mode 100644 index 25d5cebe4fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/elevate.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/enumsConfig.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/enumsConfig.xml deleted file mode 100644 index 458ee7e256e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/enumsConfig.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - UNKNOWN - TRACE - DEBUG - INFO - WARN - ERROR - FATAL - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/managed-schema deleted file mode 100644 index b1fda991cfc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/managed-schema +++ /dev/null @@ -1,139 +0,0 @@ - - - - - id - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/solrconfig.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/solrconfig.xml deleted file mode 100644 index 9f1c36d44be..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/hadoop_logs/conf/solrconfig.xml +++ /dev/null @@ -1,1886 +0,0 @@ - - - - - - - - - 7.4.0 - - - - - - - - - - - - - - - - - - - - - - - ${solr.data.dir:} - - - - - - - - - ${solr.hdfs.home:} - - ${solr.hdfs.confdir:} - - ${solr.hdfs.blockcache.enabled:true} - - ${solr.hdfs.blockcache.global:true} - - - - - - - - - - true - managed-schema - - - - - - - - - - 10000 - - - - - 50 - - - - - - - - - - - - - - - 5 - 5 - - - - - - - ${solr.lock.type:native} - - - - - - - - - - - - - - - - true - - - - - - - - - - - - - - - - ${solr.ulog.dir:} - - - - - ${solr.autoCommit.maxTime:15000} - false - - - - - - ${solr.autoSoftCommit.maxTime:5000} - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - 20 - - - 200 - - - - - - - - - - - - static firstSearcher warming in solrconfig.xml - - - - - - true - - - 2 - - - - - - - - - - - - - - - - - - - - - - solr-data-config.xml - - - - - - - - explicit - 10 - text - - - - - - - - - - - - - - explicit - json - true - text - - - - - - - - true - json - true - - - - - - - - explicit - - - velocity - browse - layout - - - edismax - *:* - 10 - *,score - - - on - 1 - - - - - - - text - add-unknown-fields-to-the-schema - - - - - - - - - - - - - true - ignored_ - - - true - links - ignored_ - - - - - - - - - - - - - - - - - - - - - - - - solrpingquery - - - all - - - - - - - - - explicit - true - - - - - - - - - - - - - - - - lowercase - - - - - - default - text - solr.DirectSolrSpellChecker - - internal - - 0.5 - - 2 - - 1 - - 5 - - 4 - - 0.01 - - - - - - wordbreak - solr.WordBreakSolrSpellChecker - name - true - true - 10 - - - - - - - - - - - - - - - - text - - default - wordbreak - on - true - 10 - 5 - 5 - true - true - 10 - 5 - - - spellcheck - - - - - - mySuggester - FuzzyLookupFactory - DocumentDictionaryFactory - cat - price - string - - - - - - true - 10 - - - suggest - - - - - - - - - text - true - - - tvComponent - - - - - - - lingo - - - org.carrot2.clustering.lingo.LingoClusteringAlgorithm - - - clustering/carrot2 - - - - - stc - org.carrot2.clustering.stc.STCClusteringAlgorithm - - - - - kmeans - org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm - - - - - - - true - true - - name - - id - - features - - true - - - - false - - - edismax - - text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4 - - *:* - 10 - *,score - - - clustering - - - - - - - - - - true - false - - - terms - - - - - - - - string - elevate.xml - - - - - - explicit - text - - - elevator - - - - - - - - - - - 100 - - - - - - - - 70 - - 0.5 - - [-\w ,/\n\"']{20,200} - - - - - - - ]]> - ]]> - - - - - - - - - - - - - - - - - - - - - - - - ,, - ,, - ,, - ,, - ,]]> - ]]> - - - - - - 10 - .,!? - - - - - - - WORD - - - en - US - - - - - - - - - - - - _ttl_ - +7DAYS - - - 86400 - _ttl_ - _expire_at_ - - - _expire_at_ - - - - - - - - - - yyyy-MM-dd'T'HH:mm:ss.SSSZ - yyyy-MM-dd'T'HH:mm:ss,SSSZ - yyyy-MM-dd'T'HH:mm:ss.SSS - yyyy-MM-dd'T'HH:mm:ss,SSS - yyyy-MM-dd'T'HH:mm:ssZ - yyyy-MM-dd'T'HH:mm:ss - yyyy-MM-dd'T'HH:mmZ - yyyy-MM-dd'T'HH:mm - yyyy-MM-dd HH:mm:ss.SSSZ - yyyy-MM-dd HH:mm:ss,SSSZ - yyyy-MM-dd HH:mm:ss.SSS - yyyy-MM-dd HH:mm:ss,SSS - yyyy-MM-dd HH:mm:ssZ - yyyy-MM-dd HH:mm:ss - yyyy-MM-dd HH:mmZ - yyyy-MM-dd HH:mm - yyyy-MM-dd - - - - lowercase - - java.lang.Boolean - booleans - - - java.util.Date - tdates - - - java.lang.Long - java.lang.Integer - tlongs - - - java.lang.Number - tdoubles - - - - - - - - - - - - - - - - - - - - - - - - text/plain; charset=UTF-8 - - - - - ${velocity.template.base.dir:} - - - - - 5 - - - - - - - - - - - - - - - - - - *:* - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.html deleted file mode 100755 index fecab20513d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.html +++ /dev/null @@ -1,24 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-bottom.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-bottom.html deleted file mode 100755 index 3359a460a48..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-bottom.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-top.html b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-top.html deleted file mode 100755 index 0886cee37a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/admin-extra.menu-top.html +++ /dev/null @@ -1,25 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/elevate.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/elevate.xml deleted file mode 100644 index 25d5cebe4fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/elevate.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/managed-schema b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/managed-schema deleted file mode 100644 index a4773c4bb26..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/managed-schema +++ /dev/null @@ -1,95 +0,0 @@ - - - - - - - id - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/solrconfig.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/solrconfig.xml deleted file mode 100644 index 866b218905c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/history/conf/solrconfig.xml +++ /dev/null @@ -1,530 +0,0 @@ - - - - 7.4.0 - - - - - - - - - - - - - - - - ${solr.data.dir:} - - - ${solr.hdfs.home:} - ${solr.hdfs.confdir:} - ${solr.hdfs.blockcache.enabled:true} - ${solr.hdfs.blockcache.global:true} - - - - - - true - managed-schema - - - - 10000 - 50 - - 5 - 5 - - ${solr.lock.type:native} - true - - - - - - ${solr.ulog.dir:} - - - ${solr.autoCommit.maxTime:15000} - false - - - ${solr.autoSoftCommit.maxTime:5000} - - - - 1024 - - - - - true - - 20 - 200 - - - - - - - - static firstSearcher warming in solrconfig.xml - - - - - true - - 2 - - - - - - - - - - - - - solr-data-config.xml - - - - - - explicit - 10 - text - - - - - - explicit - json - true - text - - - - - - true - json - true - - - - - - explicit - - - velocity - browse - layout - - - edismax - *:* - 10 - *,score - - - on - 1 - - - - - - - text - add-unknown-fields-to-the-schema - - - - - - - - - true - ignored_ - - - true - links - ignored_ - - - - - - - - - - solrpingquery - - - all - - - - - - explicit - true - - - - - - - - - key_lower_case - - - default - text - solr.DirectSolrSpellChecker - internal - 0.5 - 2 - 1 - 5 - 4 - 0.01 - - - - wordbreak - solr.WordBreakSolrSpellChecker - name - true - true - 10 - - - - - - text - default - wordbreak - on - true - 10 - 5 - 5 - true - true - 10 - 5 - - - spellcheck - - - - - - mySuggester - FuzzyLookupFactory - DocumentDictionaryFactory - cat - price - string - - - - - - true - 10 - - - suggest - - - - - - - - text - true - - - tvComponent - - - - - - lingo - org.carrot2.clustering.lingo.LingoClusteringAlgorithm - clustering/carrot2 - - - stc - org.carrot2.clustering.stc.STCClusteringAlgorithm - - - kmeans - org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm - - - - - - true - true - name - id - features - true - false - edismax - - text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4 - - *:* - 10 - *,score - - - clustering - - - - - - - - true - false - - - terms - - - - - string - elevate.xml - - - - - explicit - text - - - elevator - - - - - - - - 100 - - - - - - 70 - 0.5 - [-\w ,/\n\"']{20,200} - - - - - - ]]> - ]]> - - - - - - - - - - - - - - - - - - ,, - ,, - ,, - ,, - ,]]> - ]]> - - - - - - 10 - .,!? - - - - - - WORD - en - US - - - - - - - - - - - - - - yyyy-MM-dd'T'HH:mm:ss.SSSZ - yyyy-MM-dd'T'HH:mm:ss,SSSZ - yyyy-MM-dd'T'HH:mm:ss.SSS - yyyy-MM-dd'T'HH:mm:ss,SSS - yyyy-MM-dd'T'HH:mm:ssZ - yyyy-MM-dd'T'HH:mm:ss - yyyy-MM-dd'T'HH:mmZ - yyyy-MM-dd'T'HH:mm - yyyy-MM-dd HH:mm:ss.SSSZ - yyyy-MM-dd HH:mm:ss,SSSZ - yyyy-MM-dd HH:mm:ss.SSS - yyyy-MM-dd HH:mm:ss,SSS - yyyy-MM-dd HH:mm:ssZ - yyyy-MM-dd HH:mm:ss - yyyy-MM-dd HH:mmZ - yyyy-MM-dd HH:mm - yyyy-MM-dd - - - - key_lower_case - - java.lang.Boolean - booleans - - - java.util.Date - tdates - - - java.lang.Long - java.lang.Integer - tlongs - - - java.lang.Number - tdoubles - - - - - - - - - text/plain; charset=UTF-8 - - - - ${velocity.template.base.dir:} - - - - 5 - - - - *:* - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/solr.xml b/ambari-logsearch/ambari-logsearch-server/src/main/configsets/solr.xml deleted file mode 100644 index e0d93df75b3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/configsets/solr.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java deleted file mode 100644 index 7d42a928a33..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch; - -import org.springframework.boot.Banner; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.data.rest.RepositoryRestMvcAutoConfiguration; -import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration; -import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.system.ApplicationPidFileWriter; - -@SpringBootApplication( - scanBasePackages = {"org.apache.ambari.logsearch"}, - exclude = { - RepositoryRestMvcAutoConfiguration.class, - WebMvcAutoConfiguration.class, - SolrAutoConfiguration.class - } -) -public class LogSearch { - - public static void main(String[] args) { - - String pidFile = System.getenv("LOGSEARCH_PID_FILE") == null ? "logsearch.pid" : System.getenv("LOGSEARCH_PID_FILE"); - new SpringApplicationBuilder(LogSearch.class) - .bannerMode(Banner.Mode.OFF) - .listeners(new ApplicationPidFileWriter(pidFile)) - .web(true) - .run(args); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/filter/AbstractJWTFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/filter/AbstractJWTFilter.java deleted file mode 100644 index 9b02e2eed55..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/filter/AbstractJWTFilter.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.auth.filter; - -import com.google.gson.Gson; -import io.jsonwebtoken.Claims; -import io.jsonwebtoken.ExpiredJwtException; -import io.jsonwebtoken.Jwts; -import io.jsonwebtoken.MalformedJwtException; -import io.jsonwebtoken.SignatureException; -import org.apache.ambari.logsearch.auth.model.JWTAuthenticationToken; -import org.apache.commons.lang.StringUtils; -import org.apache.http.client.utils.URIBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.authentication.AnonymousAuthenticationToken; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter; -import org.springframework.security.web.util.matcher.RequestMatcher; - -import javax.servlet.FilterChain; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URISyntaxException; -import java.net.URLEncoder; -import java.security.cert.CertificateException; -import java.security.cert.CertificateFactory; -import java.security.cert.X509Certificate; -import java.security.interfaces.RSAPublicKey; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public abstract class AbstractJWTFilter extends AbstractAuthenticationProcessingFilter { - - private static final Logger LOG = LoggerFactory.getLogger(AbstractJWTFilter.class); - - private static final String PEM_HEADER = "-----BEGIN CERTIFICATE-----\n"; - private static final String PEM_FOOTER = "\n-----END CERTIFICATE-----"; - private static final String PROXY_LOGSEARCH_URL_PATH = "/logsearch"; - - protected AbstractJWTFilter(RequestMatcher requestMatcher) { - super(requestMatcher); - } - - @Override - public Authentication attemptAuthentication(HttpServletRequest request, HttpServletResponse response) throws AuthenticationException, IOException, ServletException { - if (StringUtils.isEmpty(getProvidedUrl())) { - throw new BadCredentialsException("Authentication provider URL must not be null or empty."); - } - if (StringUtils.isEmpty(getPublicKey())) { - throw new BadCredentialsException("Public key for signature validation must be provisioned."); - } - - try { - Claims claims = Jwts - .parser() - .setSigningKey(parseRSAPublicKey(getPublicKey())) - .parseClaimsJws(getJWTFromCookie(request)) - .getBody(); - String userName = claims.getSubject(); - LOG.info("USERNAME: " + userName); - LOG.info("URL = " + request.getRequestURL()); - if (StringUtils.isNotEmpty(claims.getAudience()) && !getAudiences().contains(claims.getAudience())) { - throw new IllegalArgumentException(String.format("Audience validation failed. (Not found: %s)", claims.getAudience())); - } - Authentication authentication = new JWTAuthenticationToken(userName, getPublicKey(), getAuthorities(userName)); - authentication.setAuthenticated(true); - SecurityContextHolder.getContext().setAuthentication(authentication); - return authentication; - } catch (ExpiredJwtException | MalformedJwtException | SignatureException | IllegalArgumentException e) { - LOG.info("URL = " + request.getRequestURL()); - LOG.warn("Error during JWT authentication: {}", e.getMessage()); - throw new BadCredentialsException(e.getMessage(), e); - } - } - - @Override - public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { - Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - if (!isAuthJwtEnabled() || isAuthenticated(authentication)) { - chain.doFilter(req, res); - return; - } - super.doFilter(req, res, chain); - } - - @Override - protected void successfulAuthentication(HttpServletRequest request, HttpServletResponse response, FilterChain chain, Authentication authResult) throws IOException, ServletException { - super.successfulAuthentication(request, response, chain, authResult); - String ajaxRequestHeader = request.getHeader("X-Requested-With"); - if (isWebUserAgent(request.getHeader("User-Agent")) && !"XMLHttpRequest".equals(ajaxRequestHeader)) { - chain.doFilter(request, response); - //response.sendRedirect(createForwardableURL(request) + getOriginalQueryString(request)); - } - } - - @Override - protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException, ServletException { - super.unsuccessfulAuthentication(request, response, failed); - String ajaxRequestHeader = request.getHeader("X-Requested-With"); - String loginUrl = constructLoginURL(request); - if (loginUrl.endsWith("?doAs=anonymous")) { // HACK! - use proper solution, investigate which filter changes ? to & - loginUrl = StringUtils.removeEnd(loginUrl, "?doAs=anonymous"); - } - if (!isWebUserAgent(request.getHeader("User-Agent")) || "XMLHttpRequest".equals(ajaxRequestHeader)) { - Map mapObj = new HashMap<>(); - mapObj.put("knoxssoredirectURL", URLEncoder.encode(loginUrl, "UTF-8")); - response.setContentType("application/json"); - response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, new Gson().toJson(mapObj)); - } else { - response.sendRedirect(loginUrl); - } - } - - private String getJWTFromCookie(HttpServletRequest req) { - String serializedJWT = null; - Cookie[] cookies = req.getCookies(); - if (cookies != null) { - for (Cookie cookie : cookies) { - if (getCookieName().equals(cookie.getName())) { - LOG.info(getCookieName() + " cookie has been found and is being processed"); - serializedJWT = cookie.getValue(); - break; - } - } - } - return serializedJWT; - } - - private boolean isWebUserAgent(String userAgent) { - boolean isWeb = false; - List userAgentList = getUserAgentList(); - if (userAgentList != null && userAgentList.size() > 0) { - for (String ua : userAgentList) { - if (StringUtils.startsWithIgnoreCase(userAgent, ua)) { - isWeb = true; - break; - } - } - } - return isWeb; - } - - private RSAPublicKey parseRSAPublicKey(String pem) throws ServletException { - String fullPem = PEM_HEADER + pem + PEM_FOOTER; - try { - CertificateFactory fact = CertificateFactory.getInstance("X.509"); - ByteArrayInputStream is = new ByteArrayInputStream(fullPem.getBytes("UTF8")); - - X509Certificate cer = (X509Certificate) fact.generateCertificate(is); - return (RSAPublicKey) cer.getPublicKey(); - } catch (CertificateException ce) { - String message; - if (pem.startsWith(PEM_HEADER)) { - message = "CertificateException - be sure not to include PEM header " - + "and footer in the PEM configuration element."; - } else { - message = "CertificateException - PEM may be corrupt"; - } - throw new ServletException(message, ce); - } catch (UnsupportedEncodingException uee) { - throw new ServletException(uee); - } - } - - private String constructLoginURL(HttpServletRequest request) { - String delimiter = "?"; - if (getProvidedUrl().contains("?")) { - delimiter = "&"; - } - return getProvidedUrl() + delimiter - + getOriginalUrlQueryParam() + "=" - + createForwardableURL(request) + getOriginalQueryString(request); - } - - private String createForwardableURL(HttpServletRequest request) { - String xForwardedProto = request.getHeader("x-forwarded-proto"); - String xForwardedHost = request.getHeader("x-forwarded-host"); - String xForwardedContext = request.getHeader("x-forwarded-context"); - if (StringUtils.isNotBlank(xForwardedProto) - && StringUtils.isNotBlank(xForwardedHost) - && StringUtils.isNotBlank(xForwardedContext)) { - try { - URIBuilder builder = new URIBuilder(); - builder.setScheme(xForwardedProto) - .setHost(xForwardedHost) - .setPath(xForwardedContext + PROXY_LOGSEARCH_URL_PATH + request.getRequestURI()); - - return builder.build().toString(); - } catch (URISyntaxException ue) { - LOG.error("URISyntaxException while build xforward url ", ue); - return request.getRequestURL().toString(); - } - } else { - return request.getRequestURL().toString(); - } - } - - private String getOriginalQueryString(HttpServletRequest request) { - String originalQueryString = request.getQueryString(); - return (originalQueryString == null) ? "" : "?" + originalQueryString; - } - - private boolean isAuthenticated(Authentication authentication) { - return authentication != null && !(authentication instanceof AnonymousAuthenticationToken) && authentication.isAuthenticated(); - } - - protected abstract String getPublicKey(); - - protected abstract String getProvidedUrl(); - - protected abstract boolean isAuthJwtEnabled(); - - protected abstract String getCookieName(); - - protected abstract String getOriginalUrlQueryParam(); - - protected abstract List getAudiences(); - - protected abstract Collection getAuthorities(String username); - - protected abstract List getUserAgentList(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/model/JWTAuthenticationToken.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/model/JWTAuthenticationToken.java deleted file mode 100644 index 5fb9f05e515..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/auth/model/JWTAuthenticationToken.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.auth.model; - -import org.springframework.security.authentication.AbstractAuthenticationToken; -import org.springframework.security.core.GrantedAuthority; - -import java.util.Collection; - -public class JWTAuthenticationToken extends AbstractAuthenticationToken { - - private String credential; - - private String principal; - - public JWTAuthenticationToken(String principal, String credential, Collection authorities) { - super(authorities); - this.principal = principal; - this.credential = credential; - } - - @Override - public Object getCredentials() { - return credential; - } - - @Override - public Object getPrincipal() { - return principal; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ACLPropertiesSplitter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ACLPropertiesSplitter.java deleted file mode 100644 index 10e9d10e392..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ACLPropertiesSplitter.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import com.google.common.base.Splitter; -import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZKHelper; -import org.apache.zookeeper.ZooDefs; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Id; - -import javax.inject.Named; -import java.util.ArrayList; -import java.util.List; - -@Named -public class ACLPropertiesSplitter { - - public List parseAcls(String aclStr) { - List acls = new ArrayList<>(); - List aclStrList = Splitter.on(",").omitEmptyStrings().trimResults().splitToList(aclStr); - for (String unparcedAcl : aclStrList) { - String[] parts = unparcedAcl.split(":"); - if (parts.length == 3) { - acls.add(new ACL(LogSearchConfigZKHelper.parsePermission(parts[2]), new Id(parts[0], parts[1]))); - } - } - return acls; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ApiDocStorage.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ApiDocStorage.java deleted file mode 100644 index d7b866661bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ApiDocStorage.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import io.swagger.jaxrs.config.BeanConfig; -import io.swagger.models.Swagger; -import io.swagger.util.Yaml; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -@Named -public class ApiDocStorage { - - private static final Logger LOG = LoggerFactory.getLogger(ApiDocStorage.class); - - private final Map swaggerMap = new ConcurrentHashMap<>(); - - @Inject - private BeanConfig beanConfig; - - @PostConstruct - private void postConstruct() { - Thread loadApiDocThread = new Thread("load_swagger_api_doc") { - @Override - public void run() { - LOG.info("Start thread to scan REST API doc from endpoints."); - Swagger swagger = beanConfig.getSwagger(); - beanConfig.configure(swagger); - beanConfig.scanAndRead(); - setSwagger(swagger); - try { - if (swagger != null) { - String yaml = Yaml.mapper().writeValueAsString(swagger); - StringBuilder b = new StringBuilder(); - String[] parts = yaml.split("\n"); - for (String part : parts) { - b.append(part); - b.append("\n"); - } - setSwaggerYaml(b.toString()); - } - } catch (Exception e) { - e.printStackTrace(); - } - LOG.info("Scanning REST API endpoints and generating docs has been successful."); - } - }; - loadApiDocThread.setDaemon(true); - loadApiDocThread.start(); - } - - public Swagger getSwagger() { - return (Swagger) swaggerMap.get("swaggerObject"); - } - - public void setSwagger(final Swagger swagger) { - swaggerMap.put("swaggerObject", swagger); - } - - public void setSwaggerYaml(final String swaggerYaml) { - swaggerMap.put("swaggerYaml", swaggerYaml); - } - - public String getSwaggerYaml() { - return (String) swaggerMap.get("swaggerYaml"); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java deleted file mode 100644 index 76d43e5fe92..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.configurer.SslConfigurer; -import org.apache.http.auth.InvalidCredentialsException; -import org.apache.log4j.Logger; -import org.glassfish.jersey.client.JerseyClient; -import org.glassfish.jersey.client.JerseyClientBuilder; -import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; - -/** - * Layer to send REST request to External server using jersey client - */ -@Named -public class ExternalServerClient { - - @Inject - private SslConfigurer sslConfigurer; - - private static Logger LOG = Logger.getLogger(ExternalServerClient.class); - private ThreadLocal localJerseyClient; - - @Inject - private AuthPropsConfig authPropsConfig; - - /** - * Send GET request to an external server - */ - public Object sendGETRequest(String loginUrl, Class klass, String username, String password) throws Exception { - if (localJerseyClient == null) { - localJerseyClient = new ThreadLocal() { - @Override - protected JerseyClient initialValue() { - return sslConfigurer.isKeyStoreSpecified() ? - new JerseyClientBuilder().sslContext(sslConfigurer.getSSLContext()).build() : - JerseyClientBuilder.createClient(); - } - }; - } - String url = authPropsConfig.getExternalAuthHostUrl() + loginUrl; - JerseyClient client = localJerseyClient.get(); - HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder() - .credentials(username, password) - .build(); - client.register(authFeature); - - WebTarget target = client.target(url); - LOG.debug("URL: " + url); - - Invocation.Builder invocationBuilder = target.request(); - try { - Response response = invocationBuilder.get(); - if (response.getStatus() != Response.Status.OK.getStatusCode() - && response.getStatus() != Response.Status.FOUND.getStatusCode()) { - throw new InvalidCredentialsException(String.format("External auth failed with status code: %d, response: %s", - response.getStatus(), response.readEntity(String.class))); - } - return response.readEntity(klass); - } catch (Exception e) { - throw new Exception(e.getCause()); - } finally { - localJerseyClient.remove(); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LabelFallbackHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LabelFallbackHandler.java deleted file mode 100644 index 4fff3723e09..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LabelFallbackHandler.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.apache.ambari.logsearch.conf.UIMappingConfig; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.text.WordUtils; - -import javax.inject.Inject; -import javax.inject.Named; -import java.util.List; - -@Named -public class LabelFallbackHandler { - - private final UIMappingConfig uiMappingConfig; - - @Inject - public LabelFallbackHandler(UIMappingConfig uiMappingConfig) { - this.uiMappingConfig = uiMappingConfig; - } - - public String fallbackIfRequired(String field, String label, boolean replaceUnderscore, - boolean replaceUppercaseInWord, boolean capitalizeAll) { - if (isEnabled() && StringUtils.isBlank(label)) { - return fallback(field,replaceUnderscore, replaceUppercaseInWord, capitalizeAll); - } - return label; - } - - public String fallbackIfRequired(String field, String label, boolean replaceUnderscore, - boolean replaceUppercaseInWord, boolean capitalizeAll, List prefixesToRemove, List suffixesToRemove) { - if (isEnabled() && StringUtils.isBlank(label)) { - return fallback(field,replaceUnderscore, replaceUppercaseInWord, capitalizeAll, prefixesToRemove, suffixesToRemove); - } - return label; - } - - public String fallback(String field, boolean replaceUnderscore, boolean replaceUppercaseInWord, boolean capitalizeAll) { - String result = null; - if (StringUtils.isNotBlank(field)) { - if (replaceUppercaseInWord) { - result = capitalize(deCamelCase(field), false); - } - if (replaceUnderscore) { - result = capitalize(deUnderScore(result != null ? result : field), capitalizeAll); - } - } - return result; - } - - public String fallback(String field, boolean replaceUnderscore, boolean replaceUppercaseInWord, boolean capitalizeAll, - List prefixesToRemove, List suffixesToRemove) { - String fieldWithoutPrefixAndSuffix = null; - if (!CollectionUtils.isEmpty(prefixesToRemove)) { - for (String prefix : prefixesToRemove) { - if (StringUtils.isNotBlank(field) && field.startsWith(prefix)) { - fieldWithoutPrefixAndSuffix = field.substring(prefix.length()); - } - } - } - if (!CollectionUtils.isEmpty(suffixesToRemove)) { - for (String suffix : suffixesToRemove) { - if (StringUtils.isNotBlank(field) && field.endsWith(suffix)) { - fieldWithoutPrefixAndSuffix = field.substring(0, field.length() - suffix.length()); - } - } - } - return fallback(fieldWithoutPrefixAndSuffix != null ? fieldWithoutPrefixAndSuffix : field, replaceUnderscore, replaceUppercaseInWord, capitalizeAll); - } - - private String deUnderScore(String input) { - return input.replaceAll("_", " "); - } - - private String capitalize(String input, boolean capitalizeAll) { - if (capitalizeAll) { - return WordUtils.capitalizeFully(input); - } else { - Character firstLetter = Character.toUpperCase(input.charAt(0)); - return input.length() > 1 ? firstLetter + input.substring(1) : firstLetter.toString(); - } - } - - private String deCamelCase(String input) { - StringBuilder result = new StringBuilder(); - for(int i=0 ; i < input.length() ; i++) { - char c = input.charAt(i); - if(i != 0 && Character.isUpperCase(c)) { - result.append(' '); - } - result.append(c); - } - return result.toString(); - } - - public boolean isEnabled() { - return uiMappingConfig.isLabelFallbackEnabled(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java deleted file mode 100644 index 7db839f475a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.common; - -public class LogSearchConstants { - - public static final String LOGSEARCH_APPLICATION_NAME = "logsearch"; - public static final String LOGSEARCH_PROPERTIES_FILE = "logsearch.properties"; - public static final String LOGSEARCH_SESSION_ID = "LOGSEARCHSESSIONID"; - - // Log Levels - public static final String INFO = "INFO"; - public static final String WARN = "WARN"; - public static final String DEBUG = "DEBUG"; - public static final String ERROR = "ERROR"; - public static final String TRACE = "TRACE"; - public static final String FATAL = "FATAL"; - public static final String UNKNOWN = "UNKNOWN"; - - public static final String[] SUPPORTED_LOG_LEVELS = {FATAL, ERROR, WARN, INFO, DEBUG, TRACE, UNKNOWN}; - - // Application Constants - public static final String HOST = "H"; - public static final String COMPONENT = "C"; - public static final String SCROLL_TYPE_AFTER = "after"; - public static final String SCROLL_TYPE_BEFORE = "before"; - - // Seprator's - public static final String I_E_SEPRATOR = "\\|i\\:\\:e\\|"; - - //SUFFIX - public static final String NGRAM_PREFIX = "ngram_"; - - //Date Format for SOLR - public static final String SOLR_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss,SSS"; - public static final String SOLR_DATE_FORMAT_PREFIX_Z = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - - //Solr Order By - public static final String ASCENDING_ORDER = "asc"; - public static final String DESCENDING_ORDER = "desc"; - - // logfeeder - public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config"; - - public static final String SORT = "sort"; - - // info features constants - public static final String SHIPPER_CONFIG_API_KEY = "metadata_patterns"; - public static final String AUTH_FEATURE_KEY = "auth"; - - // service field / component label defaults - public static final String SERVICE_GROUP_LABELS_DEFAULTS = ""; - public static final String SERVICE_COMPONENT_LABELS_DEFAULTS = ""; - public static final String SERVICE_FIELD_LABELS_DEFAULTS = "log_message:Message,type:Component,logtime:Log Time,thread_name:Thread"; - public static final String SERVICE_FIELD_VISIBLE_DEFAULTS = "log_message,level,logtime,type"; - public static final String SERVICE_FIELD_EXCLUDES_DEFAULTS = "id,tags,text,message,seq_num,case_id,bundle_id,rowtype,event_count"; - public static final String SERVICE_FIELD_FILTERABLE_EXLUDE_DEFAULTS = ""; - public static final String SERVICE_FIELD_FALLBACK_PREFIX_DEFAULTS = "ws_,sdi_,std_"; - public static final String SERVICE_FIELD_FALLBACK_SUFFIX_DEFAULTS = "_i,_l,_s,_b"; - - // audit field / component label defaults - public static final String AUDIT_COMPONENT_LABELS_DEFAULTS = "ambari:Ambari,hdfs:Hdfs,RangerAudit:Ranger"; - public static final String AUDIT_FIELD_LABELS_DEFAULTS = ""; - public static final String AUDIT_FIELD_COMMON_LABELS_DEFAULTS = "enforcer:Access Enforcer,access:Access Type,cliIP:Client Ip,cliType:Client Type," + - "dst:DST,evtTime:Event Time,ip:IP,logtime:Log Time,sess:Session,ugi:UGI,reqUser:User"; - public static final String AUDIT_FIELD_VISIBLE_DEFAULTS = ""; - public static final String AUDIT_FIELD_VISIBLE_COMMON_DEFAULTS = "access,cliIP,evtTime,repo,resource,result,reqUser"; - public static final String AUDIT_FIELD_EXCLUDES_DEFAULTS = ""; - public static final String AUDIT_FIELD_EXCLUDES_COMMON_DEFAULTS = "tags,tags_str,seq_num"; - public static final String AUDIT_FIELD_FILTERABLE_EXCLUDES_DEFAULTS = ""; - public static final String AUDIT_FIELD_FILTERABLE_EXCLUDES_COMMON_DEFAULTS = ""; - public static final String AUDIT_FIELD_FALLBACK_PREFIX_DEFAULTS = "ws_,std_"; - public static final String AUDIT_FIELD_FALLBACK_SUFFIX_DEFAULTS = "_i,_l,_s,_b"; - - //Facet Constant - public static final String FACET_FIELD = "facet.field"; - public static final String FACET_PIVOT = "facet.pivot"; - public static final String FACET_PIVOT_MINCOUNT = "facet.pivot.mincount"; - public static final String FACET_INDEX = "index"; - - // Request params - public static final String REQUEST_PARAM_XAXIS = "xAxis"; - public static final String REQUEST_PARAM_YAXIS = "yAxis"; - public static final String REQUEST_PARAM_STACK_BY = "stackBy"; - public static final String REQUEST_PARAM_UNIT = "unit"; - public static final String REQUEST_PARAM_TOP = "top"; - public static final String REQUEST_PARAM_CLUSTER_NAMES = "clusters"; - public static final String REQUEST_PARAM_BUNDLE_ID = "bundle_id"; - public static final String REQUEST_PARAM_START_INDEX = "startIndex"; - public static final String REQUEST_PARAM_PAGE = "page"; - public static final String REQUEST_PARAM_PAGE_SIZE = "pageSize"; - public static final String REQUEST_PARAM_SORT_BY = "sortBy"; - public static final String REQUEST_PARAM_SORT_TYPE = "sortType"; - public static final String REQUEST_PARAM_START_TIME = "start_time"; - public static final String REQUEST_PARAM_END_TIME = "end_time"; - public static final String REQUEST_PARAM_FROM = "from"; - public static final String REQUEST_PARAM_TO = "to"; - public static final String REQUEST_PARAM_FIELD = "field"; - public static final String REQUEST_PARAM_FORMAT = "format"; - public static final String REQUEST_PARAM_LAST_PAGE = "lastPage"; - public static final String REQUEST_PARAM_I_MESSAGE = "includeMessage"; - public static final String REQUEST_PARAM_E_MESSAGE = "excludeMessage"; - public static final String REQUEST_PARAM_MUST_BE = "mustBe"; - public static final String REQUEST_PARAM_MUST_NOT = "mustNot"; - public static final String REQUEST_PARAM_INCLUDE_QUERY = "includeQuery"; - public static final String REQUEST_PARAM_EXCLUDE_QUERY = "excludeQuery"; - public static final String REQUEST_PARAM_ID = "id"; - public static final String REQUEST_PARAM_SCROLL_TYPE = "scrollType"; - public static final String REQUEST_PARAM_NUMBER_ROWS = "numberRows"; - public static final String REQUEST_PARAM_LEVEL = "level"; - public static final String REQUEST_PARAM_HOST_NAME = "host_name"; - public static final String REQUEST_PARAM_COMPONENT_NAME = "component_name"; - public static final String REQUEST_PARAM_FILE_NAME = "file_name"; - public static final String REQUEST_PARAM_KEYWORD = "find"; - public static final String REQUEST_PARAM_SOURCE_LOG_ID = "sourceLogId"; - public static final String REQUEST_PARAM_KEYWORD_TYPE = "keywordType"; - public static final String REQUEST_PARAM_TOKEN = "token"; - public static final String REQUEST_PARAM_FILTER_NAME = "filterName"; - public static final String REQUEST_PARAM_ROW_TYPE = "rowType"; - public static final String REQUEST_PARAM_UTC_OFFSET = "utcOffset"; - public static final String REQUEST_PARAM_HOSTS = "hostList"; - public static final String REQUEST_PARAM_USERS = "userList"; - public static final String REQUEST_PARAM_PAGE_DEFAULT_VALUE = "0"; - public static final String REQUEST_PARAM_PAGE_SIZE_DEFAULT_VALUE = "1000"; - public static final String REQUEST_PARAM_SHIPPER_CONFIG = "shipperConfig"; - public static final String REQUEST_PARAM_LOG_ID = "logId"; - public static final String REQUEST_PARAM_TEST_ENTRY = "testEntry"; -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java deleted file mode 100644 index b4b52b303fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import java.io.Serializable; - -import org.apache.ambari.logsearch.web.model.User; - -public class LogSearchContext implements Serializable { - - private static final long serialVersionUID = 1L; - - private User user; - - public User getUser() { - return user; - } - - public void setUser(User user) { - this.user = user; - } - - //------------------------------------------------------------------------------------------------------ - - private static final ThreadLocal contextThreadLocal = new ThreadLocal(); - - public static LogSearchContext getContext() { - return contextThreadLocal.get(); - } - - public static void setContext(LogSearchContext context) { - contextThreadLocal.set(context); - } - - public static void resetContext() { - contextThreadLocal.remove(); - } - - public static String getCurrentUsername() { - LogSearchContext context = LogSearchContext.getContext(); - if (context != null && context.getUser() != null) { - return context.getUser().getUsername(); - } - return null; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapper.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapper.java deleted file mode 100644 index e5d6d7f10d2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapper.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.apache.commons.lang.StringUtils; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; -import org.springframework.security.core.authority.mapping.GrantedAuthoritiesMapper; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -/** - * Class to map multiple LDAP groups to Log Search authorities. (definied in a map) - * Examples: - * LDAP person -> ROLE_USER - * LDAP user -> ROLE_USER - * LDAP admin -> ROLE_ADMIN - * ROLE_LDAP_ADMIN -> ROLE_ADMIN - */ -public class LogSearchLdapAuthorityMapper implements GrantedAuthoritiesMapper { - - private static final String ROLE_PREFIX = "ROLE_"; - - private final Map groupRoleMap; - - public LogSearchLdapAuthorityMapper(Map groupRoleMap) { - this.groupRoleMap = groupRoleMap; - } - - @Override - public Collection mapAuthorities(Collection authorities) { - if (!groupRoleMap.isEmpty() && !authorities.isEmpty()) { - List newAuthorities = new ArrayList<>(); - for (GrantedAuthority authority : authorities) { - String withoutRoleStringLowercase = StringUtils.removeStart(authority.toString(), ROLE_PREFIX).toLowerCase(); - String withoutRoleStringUppercase = StringUtils.removeStart(authority.toString(), ROLE_PREFIX).toUpperCase(); - String simpleRoleLowercaseString = authority.toString().toLowerCase(); - String simpleRoleUppercaseString = authority.toString().toUpperCase(); - if (addAuthoritiy(newAuthorities, withoutRoleStringLowercase)) - continue; - if (addAuthoritiy(newAuthorities, withoutRoleStringUppercase)) - continue; - if (addAuthoritiy(newAuthorities, simpleRoleLowercaseString)) - continue; - addAuthoritiy(newAuthorities, simpleRoleUppercaseString); - } - return newAuthorities; - } - return authorities; - } - - private boolean addAuthoritiy(List newAuthorities, String roleKey) { - if (groupRoleMap.containsKey(roleKey)) { - String role = groupRoleMap.get(roleKey); - if (role.contains(ROLE_PREFIX)) { - if (!containsAuthority(role.toUpperCase(), newAuthorities)) { - newAuthorities.add(new SimpleGrantedAuthority(role.toUpperCase())); - } - } else { - String finalRole = ROLE_PREFIX + role.toUpperCase(); - if (!containsAuthority(finalRole, newAuthorities)) { - newAuthorities.add(new SimpleGrantedAuthority(finalRole)); - } - } - return true; - } - return false; - } - - private boolean containsAuthority(String authorityStr, List authorities) { - boolean result = false; - for (SimpleGrantedAuthority authority : authorities) { - if (authorityStr.equals(authority.toString())) { - result = true; - break; - } - } - return result; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogType.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogType.java deleted file mode 100644 index 2e6cddb268e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/LogType.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -public enum LogType { - SERVICE("Service"), - AUDIT("Audit"); - - private String label; - - private LogType(String label) { - this.label = label; - } - - public String getLabel() { - return label; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java deleted file mode 100644 index b6aa2d058d7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.common; - -import java.util.Date; -import java.util.Timer; -import java.util.TimerTask; - -import org.apache.commons.lang.time.DateUtils; - -public class ManageStartEndTime extends TimerTask { - private static final int UPDATE_TIME_IN_SECONDS = 40; - - private static Date startDate; - private static Date endDate; - - public static void manage() { - Timer timer = new Timer(); - timer.schedule(new ManageStartEndTime(), 0, UPDATE_TIME_IN_SECONDS * 1000); - } - - private ManageStartEndTime() { - endDate = new Date(); - startDate = DateUtils.addHours(endDate, -1); - } - - @Override - public synchronized void run() { - synchronized (ManageStartEndTime.class) { - startDate = DateUtils.addSeconds(startDate, UPDATE_TIME_IN_SECONDS); - endDate = DateUtils.addHours(startDate, 1); - } - } - - public static synchronized Date[] getStartEndTime() { - return new Date[] {startDate, endDate}; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/Marker.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/Marker.java deleted file mode 100644 index 3e088ba9d1a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/Marker.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Target({ElementType.TYPE}) -@Retention(RetentionPolicy.RUNTIME) -public @interface Marker { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageData.java deleted file mode 100644 index 34c83ee2a9a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageData.java +++ /dev/null @@ -1,165 +0,0 @@ -/* -* Licensed to the Apache Software Foundation (ASF) under one -* or more contributor license agreements. See the NOTICE file -* distributed with this work for additional information -* regarding copyright ownership. The ASF licenses this file -* to you under the Apache License, Version 2.0 (the -* "License"); you may not use this file except in compliance -* with the License. You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, -* software distributed under the License is distributed on an -* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -* KIND, either express or implied. See the License for the -* specific language governing permissions and limitations -* under the License. -*/ - -package org.apache.ambari.logsearch.common; - -import javax.xml.bind.annotation.XmlRootElement; - -@XmlRootElement -public class MessageData implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - /** - * Message key - */ - protected String name; - /** - * Resource bundle key - */ - protected String rbKey; - /** - * Message description. Use rbKey for doing localized lookup - */ - protected String message; - /** - * Id of the object to which this message is related to - */ - protected Long objectId; - /** - * Name of the field or attribute to which this message is related to - */ - protected String fieldName; - - /** - * This method sets the value to the member attribute name. You - * cannot set null to the attribute. - * - * @param name - * Value to set member attribute name - */ - public void setName(String name) { - this.name = name; - } - - /** - * Returns the value for the member attribute name - * - * @return String - value of member attribute name. - */ - public String getName() { - return this.name; - } - - /** - * This method sets the value to the member attribute rbKey. You - * cannot set null to the attribute. - * - * @param rbKey - * Value to set member attribute rbKey - */ - public void setRbKey(String rbKey) { - this.rbKey = rbKey; - } - - /** - * Returns the value for the member attribute rbKey - * - * @return String - value of member attribute rbKey. - */ - public String getRbKey() { - return this.rbKey; - } - - /** - * This method sets the value to the member attribute message. You - * cannot set null to the attribute. - * - * @param message - * Value to set member attribute message - */ - public void setMessage(String message) { - this.message = message; - } - - /** - * Returns the value for the member attribute message - * - * @return String - value of member attribute message. - */ - public String getMessage() { - return this.message; - } - - /** - * This method sets the value to the member attribute objectId. You - * cannot set null to the attribute. - * - * @param objectId - * Value to set member attribute objectId - */ - public void setObjectId(Long objectId) { - this.objectId = objectId; - } - - /** - * Returns the value for the member attribute objectId - * - * @return Long - value of member attribute objectId. - */ - public Long getObjectId() { - return this.objectId; - } - - /** - * This method sets the value to the member attribute fieldName. You - * cannot set null to the attribute. - * - * @param fieldName - * Value to set member attribute fieldName - */ - public void setFieldName(String fieldName) { - this.fieldName = fieldName; - } - - /** - * Returns the value for the member attribute fieldName - * - * @return String - value of member attribute fieldName. - */ - public String getFieldName() { - return this.fieldName; - } - - /** - * This return the bean content in string format - * - * @return formatedStr - */ - public String toString() { - String str = "MessageData={"; - str += super.toString(); - str += "name={" + name + "} "; - str += "rbKey={" + rbKey + "} "; - str += "message={" + message + "} "; - str += "objectId={" + objectId + "} "; - str += "fieldName={" + fieldName + "} "; - str += "}"; - return str; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java deleted file mode 100644 index 4683df406f6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -public enum MessageEnums { - - // Common Errors - DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"), - OPER_NOT_ALLOWED_FOR_STATE("logsearch.error.oper_not_allowed_for_state", "Operation not allowed in current state"), - OPER_NOT_ALLOWED_FOR_ENTITY("logsearch.error.oper_not_allowed_for_state", "Operation not allowed for entity"), - OPER_NO_PERMISSION("logsearch.error.oper_no_permission", "User doesn't have permission to perform this operation"), - DATA_NOT_UPDATABLE("logsearch.error.data_not_updatable", "Data not updatable"), - ERROR_CREATING_OBJECT("logsearch.error.create_object", "Error creating object"), - ERROR_DUPLICATE_OBJECT("logsearch.error.duplicate_object", "Error creating duplicate object"), - ERROR_SYSTEM("logsearch.error.system", "System Error. Please try later."), - SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details check the logs or configuration."), - ZNODE_NOT_READY("logsearch.zk.znode.error", "ZNode is not available."), - ZK_CONFIG_NOT_READY("logsearch.zk.config.error", "Collection configuration has not uploaded yet"), - SOLR_COLLECTION_NOT_READY("logsearch.solr.collection.error", "Solr has not accessible yet for collection."), - CONFIGURATION_NOT_AVAILABLE("logsearch.config.not_available", "Log Search configuration is not available"), - CONFIGURATION_API_DISABLED("logsearch.config.api.disabled", "Log Search configuration is not available"), - SOLR_CONFIGURATION_API_SOLR_NOT_AVAILEBLE("logsearch.config.api.solr.not.available", "Solr as log level filter manager source is not available"), - // Common Validations - INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid password"), - INVALID_INPUT_DATA("logsearch.validation.invalid_input_data", "Invalid input data"), - NO_INPUT_DATA("logsearch.validation.no_input_data", "Input data is not provided"), - INPUT_DATA_OUT_OF_BOUND("logsearch.validation.data_out_of_bound", "Input data if out of bound"), - NO_NAME("logsearch.validation.no_name", "Name is not provided"), - NO_OR_INVALID_COUNTRY_ID("logsearch.validation.no_country_id", "Valid Country Id was not provided"), - NO_OR_INVALID_CITY_ID("logsearch.validation.no_city_id", "Valid City Id was not provided"), - NO_OR_INVALID_STATE_ID("logsearch.validation.no_state_id", "Valid State Id was not provided"); - - private String rbKey; - private String messageDesc; - - private MessageEnums(String rbKey, String messageDesc) { - this.rbKey = rbKey; - this.messageDesc = messageDesc; - } - - public MessageData getMessage() { - MessageData msg = new MessageData(); - msg.setName(this.toString()); - msg.setRbKey(rbKey); - msg.setMessage(messageDesc); - return msg; - } - - public MessageData getMessage(Long objectId, String fieldName) { - MessageData msg = new MessageData(); - msg.setName(this.toString()); - msg.setRbKey(rbKey); - msg.setMessage(messageDesc); - msg.setObjectId(objectId); - msg.setFieldName(fieldName); - return msg; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesSplitter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesSplitter.java deleted file mode 100644 index 57e33e9d335..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesSplitter.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import com.google.common.base.Splitter; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; - -import javax.inject.Named; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@Named -public class PropertiesSplitter { - - public List parseList(String listStr) { - return parseList(listStr, ","); - } - - public Map parseMap(String mapStr) { - return parseMap(mapStr, ",", ":"); - } - - private List parseList(String listStr, String separator) { - return StringUtils.isNotBlank(listStr) ? Splitter.on(separator).splitToList(listStr) : new ArrayList<>(); - } - - public Map parseMap(String mapStr, String separator, String keyValueSeparator) { - Map resultMap = new HashMap<>(); - if (StringUtils.isNotBlank(mapStr)) { - List keyValueList = parseList(mapStr, separator); - if (!keyValueList.isEmpty()) { - for (String keyValueElement : keyValueList) { - if (StringUtils.isNotEmpty(keyValueElement)) { - List keyValueElementList = parseList(keyValueElement, keyValueSeparator); - if (!CollectionUtils.isEmpty(keyValueElementList) && keyValueElementList.size() >= 2 - && StringUtils.isNotBlank(keyValueElementList.get(0))) { - resultMap.put(keyValueElementList.get(0), keyValueElementList.get(1)); - } - } - } - } - } - return resultMap; - } - - public Map> parseMapInMap(String mapInMapStr) { - Map> mapInMap = new HashMap<>(); - Map outerMap = parseMap(mapInMapStr, ";", "#"); - if (!outerMap.isEmpty()) { - for (Map.Entry entry : outerMap.entrySet()) { - Map keyValueMap = parseMap(entry.getValue()); - if (!keyValueMap.isEmpty()) { - mapInMap.put(entry.getKey(), keyValueMap); - } - } - } - return mapInMap; - } - - public Map> parseListInMap(String listInMapStr) { - return parseListInMap(listInMapStr, ";", ":", ","); - } - - public Map> parseListInMap(String listInMapStr, String mapSeparator, String keyValueSeparator, String listSeparator) { - Map> listInMap = new HashMap<>(); - Map typeKeyValueMap = parseMap(listInMapStr, mapSeparator, keyValueSeparator); - for (Map.Entry entry : typeKeyValueMap.entrySet()) { - List valuesList = parseList(entry.getValue(), listSeparator); - listInMap.put(entry.getKey(), valuesList); - } - return listInMap; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertyDescriptionStorage.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertyDescriptionStorage.java deleted file mode 100644 index bb40a914fea..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertyDescriptionStorage.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.ambari.logsearch.model.response.PropertyDescriptionData; -import org.reflections.Reflections; -import org.reflections.scanners.FieldAnnotationsScanner; -import org.reflections.scanners.MethodAnnotationsScanner; -import org.springframework.beans.factory.annotation.Value; - -import javax.annotation.PostConstruct; -import javax.inject.Named; -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Named -public class PropertyDescriptionStorage { - - private final Map> propertyDescriptions = new ConcurrentHashMap<>(); - - @Value("#{'${logsearch.doc.scan.prop.packages:org.apache.ambari.logsearch,org.apache.ambari.logfeeder}'.split(',')}") - @LogSearchPropertyDescription( - name = "logsearch.doc.scan.prop.packages", - description = "Comma separated list of packages for scanning @LogSearchPropertyDescription annotations.", - examples = {"org.apache.ambari.logsearch.mypackage"}, - defaultValue = "org.apache.ambari.logsearch,org.apache.ambari.logfeeder", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List packagesToScan; - - @PostConstruct - public void postConstruct() { - Thread loadPropertyDescriptionsThread = new Thread("load_property_descriptions") { - @Override - public void run() { - fillPropertyDescriptions(); - } - }; - loadPropertyDescriptionsThread.setDaemon(true); - loadPropertyDescriptionsThread.start(); - } - - public Map> getPropertyDescriptions() { - return propertyDescriptions; - } - - private void fillPropertyDescriptions() { - List propertyDescriptionsList = getPropertyDescriptions(packagesToScan); - Map> mapToAdd = propertyDescriptionsList.stream() - .sorted((o1, o2) -> o1.getName().compareTo(o2.getName())) - .collect(Collectors.groupingBy(PropertyDescriptionData::getSource)); - propertyDescriptions.putAll(mapToAdd); - } - - private List getPropertyDescriptions(List packagesToScan) { - List result = new ArrayList<>(); - for (String packageToScan : packagesToScan) { - Reflections reflections = new Reflections(packageToScan, new FieldAnnotationsScanner(), new MethodAnnotationsScanner()); - Set fields = reflections.getFieldsAnnotatedWith(LogSearchPropertyDescription.class); - for (Field field : fields) { - LogSearchPropertyDescription propDescription = field.getAnnotation(LogSearchPropertyDescription.class); - for (String source : propDescription.sources()) { - result.add(new PropertyDescriptionData(propDescription.name(), propDescription.description(), propDescription.examples(), propDescription.defaultValue(), source)); - } - } - Set methods = reflections.getMethodsAnnotatedWith(LogSearchPropertyDescription.class); - for (Method method : methods) { - LogSearchPropertyDescription propDescription = method.getAnnotation(LogSearchPropertyDescription.class); - for (String source : propDescription.sources()) { - result.add(new PropertyDescriptionData(propDescription.name(), propDescription.description(), propDescription.examples(), propDescription.defaultValue(), source)); - } - } - } - return result; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ShipperConfigDescriptionStorage.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ShipperConfigDescriptionStorage.java deleted file mode 100644 index accf11a0ff3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/ShipperConfigDescriptionStorage.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.apache.ambari.logsearch.config.api.ShipperConfigElementDescription; -import org.apache.ambari.logsearch.model.response.ShipperConfigDescriptionData; -import org.reflections.Reflections; -import org.reflections.scanners.FieldAnnotationsScanner; - -import javax.annotation.PostConstruct; -import javax.inject.Named; -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -@Named -public class ShipperConfigDescriptionStorage { - - private static final String SHIPPER_CONFIG_PACKAGE = "org.apache.ambari.logsearch.config.json.model.inputconfig.impl"; - - private final List shipperConfigDescription = new ArrayList<>(); - - @PostConstruct - public void postConstruct() { - Thread loadShipperConfigDescriptionThread = new Thread("load_shipper_config_description") { - @Override - public void run() { - fillShipperConfigDescriptions(); - } - }; - loadShipperConfigDescriptionThread.setDaemon(true); - loadShipperConfigDescriptionThread.start(); - } - - public List getShipperConfigDescription() { - return shipperConfigDescription; - } - - private void fillShipperConfigDescriptions() { - Reflections reflections = new Reflections(SHIPPER_CONFIG_PACKAGE, new FieldAnnotationsScanner()); - Set fields = reflections.getFieldsAnnotatedWith(ShipperConfigElementDescription.class); - for (Field field : fields) { - ShipperConfigElementDescription description = field.getAnnotation(ShipperConfigElementDescription.class); - shipperConfigDescription.add(new ShipperConfigDescriptionData(description.path(), description.description(), - description.examples(), description.defaultValue())); - } - - shipperConfigDescription.sort((o1, o2) -> o1.getPath().compareTo(o2.getPath())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/StatusMessage.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/StatusMessage.java deleted file mode 100644 index 94c591fb6bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/StatusMessage.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import javax.ws.rs.core.Response; - -public class StatusMessage { - public static StatusMessage with(int status) { - return new StatusMessage(status, null); - } - - public static StatusMessage with(Response.Status status, String message) { - return new StatusMessage(status.getStatusCode(), message); - } - - private int status; - private String message; - - private StatusMessage(int status, String message) { - this.status = status; - this.message = message; - } - - public int getStatus() { - return status; - } - - public void setStatus(int status) { - this.status = status; - } - - public int getStatusCode() { - return status; - } - - public String getMessage() { - return message; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java deleted file mode 100644 index 2658bcba994..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import io.swagger.jaxrs.config.BeanConfig; -import io.swagger.jaxrs.listing.SwaggerSerializers; - -import io.swagger.models.Info; -import io.swagger.models.License; -import org.apache.ambari.logsearch.rest.ServiceLogsResource; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class ApiDocConfig { - - private static final String DESCRIPTION = "Log aggregation, analysis, and visualization."; - private static final String VERSION = "1.0.0"; - private static final String TITLE = "Log Search REST API"; - private static final String LICENSE = "Apache 2.0"; - private static final String LICENSE_URL = "http://www.apache.org/licenses/LICENSE-2.0.html"; - private static final String BASE_PATH = "/api/v1"; - - @Bean - public SwaggerSerializers swaggerSerializers() { - return new SwaggerSerializers(); - } - - @Bean - public BeanConfig swaggerConfig() { - BeanConfig beanConfig = new BeanConfig(); - beanConfig.setSchemes(new String[]{"http", "https"}); - beanConfig.setBasePath(BASE_PATH); - beanConfig.setTitle(TITLE); - beanConfig.setDescription(DESCRIPTION); - beanConfig.setLicense(LICENSE); - beanConfig.setLicenseUrl(LICENSE_URL); - beanConfig.setScan(true); - beanConfig.setVersion(VERSION); - beanConfig.setResourcePackage(ServiceLogsResource.class.getPackage().getName()); - - License license = new License(); - license.setName(LICENSE); - license.setUrl(LICENSE_URL); - - Info info = new Info(); - info.setDescription(DESCRIPTION); - info.setTitle(TITLE); - info.setVersion(VERSION); - info.setLicense(license); - beanConfig.setInfo(info); - return beanConfig; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java deleted file mode 100644 index b7259d68a13..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import freemarker.template.TemplateException; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.PropertySource; -import org.springframework.context.support.ConversionServiceFactoryBean; -import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; -import org.springframework.ui.freemarker.FreeMarkerConfigurationFactoryBean; - -import java.io.IOException; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -@PropertySource(value = { - "classpath:default.properties", - "classpath:info.properties", - "classpath:"+ LOGSEARCH_PROPERTIES_FILE - } -) -public class ApplicationConfig { - - @Bean - public static PropertySourcesPlaceholderConfigurer propertyConfigurer() { - return new PropertySourcesPlaceholderConfigurer(); - } - - @Bean(name="conversionService") - public ConversionServiceFactoryBean conversionServiceFactoryBean() { - ConversionServiceFactoryBean conversionServiceFactoryBean = new ConversionServiceFactoryBean(); - conversionServiceFactoryBean.afterPropertiesSet(); - return conversionServiceFactoryBean; - } - - @Bean - public freemarker.template.Configuration freemarkerConfiguration() throws IOException, TemplateException { - FreeMarkerConfigurationFactoryBean factoryBean = new FreeMarkerConfigurationFactoryBean(); - factoryBean.setPreferFileSystemAccess(false); - factoryBean.setTemplateLoaderPath("classpath:/templates"); - factoryBean.afterPropertiesSet(); - return factoryBean.getObject(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/AuthPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/AuthPropsConfig.java deleted file mode 100644 index 2b1ce35e76c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/AuthPropsConfig.java +++ /dev/null @@ -1,454 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import javax.inject.Inject; -import java.util.List; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class AuthPropsConfig { - - @Value("${logsearch.auth.file.enabled:true}") - @LogSearchPropertyDescription( - name = "logsearch.auth.file.enabled", - description = "Enable file based authentication (in json file at logsearch configuration folder).", - examples = {"true", "false"}, - defaultValue = "true", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - boolean authFileEnabled; - - @Value("${logsearch.auth.ldap.enabled:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.enabled", - description = "Enable LDAP based authentication (currenty not supported).", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - boolean authLdapEnabled; - - @Value("${logsearch.auth.simple.enabled:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.simple.enabled", - description = "Enable simple authentication. That means you won't require password to log in.", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - boolean authSimpleEnabled; - - @Value("${logsearch.auth.external_auth.enabled:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.external_auth.enabled", - description = "Enable external authentication (currently Ambari acts as an external authentication server).", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - boolean authExternalEnabled; - - @Value("${logsearch.auth.external_auth.host_url:'http://ip:port'}") - @LogSearchPropertyDescription( - name = "logsearch.auth.external_auth.host_url", - description = "External authentication server URL (host and port).", - examples = {"https://c6401.ambari.apache.org:8080"}, - defaultValue = "http://ip:port", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String externalAuthHostUrl; - - @Value("${logsearch.auth.external_auth.login_url:/api/v1/users/$USERNAME/privileges?fields=*}") - @LogSearchPropertyDescription( - name = "logsearch.auth.external_auth.login_url", - description = "Login URL for external authentication server ($USERNAME parameter is replaced with the Login username).", - examples = {"/api/v1/users/$USERNAME/privileges?fields=*"}, - defaultValue = "/api/v1/users/$USERNAME/privileges?fields=*", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String externalAuthLoginUrl; - - @Value("${logsearch.login.credentials.file:user_pass.json}") - @LogSearchPropertyDescription( - name = "logsearch.login.credentials.file", - description = "Name of the credential file which contains the users for file authentication (see: logsearch.auth.file.enabled).", - examples = {"logsearch-admin.json"}, - defaultValue = "user_pass.json", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String credentialsFile; - - @Value("${logsearch.auth.jwt.enabled:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.enabled", - description = "Enable JWT based authentication (e.g.: for KNOX).", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean authJwtEnabled; - @Value("${logsearch.auth.jwt.provider_url:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.provider_url", - description = "URL to the JWT authentication server.", - examples = {"https://c6401.ambari.apache.org:8443/mypath"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String providedUrl; - - @Value("${logsearch.auth.jwt.public_key:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.public_key", - description = "PEM formatted public key for JWT token without the header and the footer.", - examples = {"MIGfMA0GCSqGSIb3DQEBA..."}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String publicKey; - - @Value("${logsearch.auth.jwt.cookie.name:hadoop-jwt}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.cookie.name", - description = "The name of the cookie that contains the JWT token.", - examples = {"hadoop-jwt"}, - defaultValue = "hadoop-jwt", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String cookieName; - @Value("${logsearch.auth.jwt.query.param.original_url:originalUrl}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.query.param.original_url", - description = "Name of the original request URL which is used to redirect to Log Search Portal.", - examples = {"myUrl"}, - defaultValue = "originalUrl", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String originalUrlQueryParam; - - @Value("#{'${logsearch.auth.jwt.audiances:}'.split(',')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.audiances", - description = "Comma separated list of acceptable audiences for the JWT token.", - examples = {"audiance1,audiance2"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List audiences; - - @Value("#{'${logsearch.auth.jwt.user.agents:Mozilla,Opera,Chrome}'.split(',')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.jwt.user.agents", - description = "Comma separated web user agent list. (Used as prefixes)", - examples = {"Mozilla,Chrome"}, - defaultValue = "Mozilla,Opera,Chrome", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List userAgentList; - - @Value("#{'${logsearch.roles.allowed:AMBARI.ADMINISTRATOR,CLUSTER.ADMINISTRATOR}'.split(',')}") - @LogSearchPropertyDescription( - name = "logsearch.roles.allowed", - description = "Comma separated roles for external authentication.", - examples = {"AMBARI.ADMINISTRATOR"}, - defaultValue = "AMBARI.ADMINISTRATOR,CLUSTER.ADMINISTRATOR", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List allowedRoles; - - @Value("${logsearch.auth.redirect.forward:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.redirect.forward", - description = "Forward redirects for HTTP calls. (useful in case of proxies)", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean redirectForward; - - @Value("${logsearch.auth.trusted.proxy:false}") - @LogSearchPropertyDescription( - name = "logsearch.auth.trusted.proxy", - description = "A boolean property to enable/disable trusted-proxy 'knox' authentication", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean trustedProxy; - - @Value("#{propertiesSplitter.parseList('${logsearch.auth.proxyuser.users:knox}')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.proxyuser.users", - description = "List of users which the trusted-proxy user ‘knox’ can proxy for", - examples = {"knox,hdfs"}, - defaultValue = "knox", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List proxyUsers; - - @Value("#{propertiesSplitter.parseList('${logsearch.auth.proxyuser.groups:*}')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.proxyuser.groups", - description = "List of user-groups which trusted-proxy user ‘knox’ can proxy for", - examples = {"admin,user"}, - defaultValue = "*", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List proxyUserGroups; - - @Value("#{propertiesSplitter.parseList('${logsearch.auth.proxyuser.hosts:*}')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.proxyuser.hosts", - description = "List of hosts from which trusted-proxy user ‘knox’ can connect from", - examples = {"host1,host2"}, - defaultValue = "*", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List proxyUserHosts; - - @Value("#{propertiesSplitter.parseList('${logsearch.auth.proxyserver.ip:}')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.proxyserver.ip", - description = "IP of trusted Knox Proxy server(s) that Log Search will trust on", - examples = {"192.168.0.1,192.168.0.2"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List proxyIp; - - @Value("${logsearch.authr.file.enabled:false}") - @LogSearchPropertyDescription( - name = "logsearch.authr.file.enabled", - description = "A boolean property to enable/disable file based authorization", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean fileAuthorization; - - @Value("${logsearch.authr.role.file:roles.json}") - @LogSearchPropertyDescription( - name = "logsearch.authr.role.file", - description = "Simple file that contains user/role mappings.", - examples = {"logsearch-roles.json"}, - defaultValue = "roles.json", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String roleFile; - - @Inject - private LogSearchLdapAuthConfig ldapAuthConfig; - - public boolean isAuthFileEnabled() { - return authFileEnabled; - } - - public void setAuthFileEnabled(boolean authFileEnabled) { - this.authFileEnabled = authFileEnabled; - } - - public boolean isAuthLdapEnabled() { - return authLdapEnabled; - } - - public void setAuthLdapEnabled(boolean authLdapEnabled) { - this.authLdapEnabled = authLdapEnabled; - } - - public boolean isAuthSimpleEnabled() { - return authSimpleEnabled; - } - - public void setAuthSimpleEnabled(boolean authSimpleEnabled) { - this.authSimpleEnabled = authSimpleEnabled; - } - - public String getCredentialsFile() { - return credentialsFile; - } - - public void setCredentialsFile(String credentialsFile) { - this.credentialsFile = credentialsFile; - } - - public String getExternalAuthHostUrl() { - return externalAuthHostUrl; - } - - public void setExternalAuthHostUrl(String externalAuthHostUrl) { - this.externalAuthHostUrl = externalAuthHostUrl; - } - - public String getExternalAuthLoginUrl() { - return externalAuthLoginUrl; - } - - public void setExternalAuthLoginUrl(String externalAuthLoginUrl) { - this.externalAuthLoginUrl = externalAuthLoginUrl; - } - - public boolean isAuthExternalEnabled() { - return authExternalEnabled; - } - - public void setAuthExternalEnabled(boolean authExternalEnabled) { - this.authExternalEnabled = authExternalEnabled; - } - - public boolean isAuthJwtEnabled() { - return authJwtEnabled; - } - - public void setAuthJwtEnabled(boolean authJwtEnabled) { - this.authJwtEnabled = authJwtEnabled; - } - - public String getProvidedUrl() { - return providedUrl; - } - - public void setProvidedUrl(String providedUrl) { - this.providedUrl = providedUrl; - } - - public String getPublicKey() { - return publicKey; - } - - public void setPublicKey(String publicKey) { - this.publicKey = publicKey; - } - - public String getCookieName() { - return cookieName; - } - - public void setCookieName(String cookieName) { - this.cookieName = cookieName; - } - - public String getOriginalUrlQueryParam() { - return originalUrlQueryParam; - } - - public void setOriginalUrlQueryParam(String originalUrlQueryParam) { - this.originalUrlQueryParam = originalUrlQueryParam; - } - - public List getAudiences() { - return audiences; - } - - public void setAudiences(List audiences) { - this.audiences = audiences; - } - - public List getAllowedRoles() { - return allowedRoles; - } - - public void setAllowedRoles(List allowedRoles) { - this.allowedRoles = allowedRoles; - } - - public boolean isRedirectForward() { - return redirectForward; - } - - public void setRedirectForward(boolean redirectForward) { - this.redirectForward = redirectForward; - } - - public List getUserAgentList() { - return this.userAgentList; - } - - public void setUserAgentList(List userAgentList) { - this.userAgentList = userAgentList; - } - - public boolean isTrustedProxy() { - return trustedProxy; - } - - public void setTrustedProxy(boolean trustedProxy) { - this.trustedProxy = trustedProxy; - } - - public List getProxyUsers() { - return proxyUsers; - } - - public void setProxyUsers(List proxyUsers) { - this.proxyUsers = proxyUsers; - } - - public List getProxyUserGroups() { - return proxyUserGroups; - } - - public void setProxyUserGroups(List proxyUserGroups) { - this.proxyUserGroups = proxyUserGroups; - } - - public List getProxyUserHosts() { - return proxyUserHosts; - } - - public void setProxyUserHosts(List proxyUserHosts) { - this.proxyUserHosts = proxyUserHosts; - } - - public List getProxyIp() { - return proxyIp; - } - - public void setProxyIp(List proxyIp) { - this.proxyIp = proxyIp; - } - - public boolean isFileAuthorization() { - return fileAuthorization; - } - - public void setFileAuthorization(boolean fileAuthorization) { - this.fileAuthorization = fileAuthorization; - } - - public String getRoleFile() { - return roleFile; - } - - public void setRoleFile(String roleFile) { - this.roleFile = roleFile; - } - - public LogSearchLdapAuthConfig getLdapAuthConfig() { - return ldapAuthConfig; - } - - public void setLdapAuthConfig(LogSearchLdapAuthConfig ldapAuthConfig) { - this.ldapAuthConfig = ldapAuthConfig; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java deleted file mode 100644 index 2765ebde9ab..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.conf.global.LogLevelFilterManagerState; -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchConfigApiConfig { - - @LogSearchPropertyDescription( - name = "logsearch.config.api.enabled", - description = "Enable config API feature and shipperconfig API endpoints.", - examples = {"false"}, - defaultValue = "true", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.config.api.enabled:true}") - private boolean configApiEnabled; - - @LogSearchPropertyDescription( - name = "logsearch.config.api.filter.solr.enabled", - description = "Use solr as a log level filter storage", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.config.api.filter.solr.enabled:false}") - public boolean solrFilterStorage; - - @LogSearchPropertyDescription( - name = "logsearch.config.api.filter.zk-only.enabled", - description = "Use zookeeper as a log level filter storage", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.config.api.filter.zk.enabled:false}") - public boolean zkFilterStorage; - - @Bean(name = "logLevelFilterManagerState") - public LogLevelFilterManagerState logLevelFilterManagerState() { - return new LogLevelFilterManagerState(); - } - - public boolean isConfigApiEnabled() { - return configApiEnabled; - } - - public void setConfigApiEnabled(boolean configApiEnabled) { - this.configApiEnabled = configApiEnabled; - } - - public boolean isSolrFilterStorage() { - return this.solrFilterStorage; - } - - public void setSolrFilterStorage(boolean solrFilterStorage) { - this.solrFilterStorage = solrFilterStorage; - } - - public boolean isZkFilterStorage() { - return zkFilterStorage; - } - - public void setZkFilterStorage(boolean zkFilterStorage) { - this.zkFilterStorage = zkFilterStorage; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigMapHolder.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigMapHolder.java deleted file mode 100644 index 29d60b2518a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigMapHolder.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.springframework.context.annotation.Configuration; -import org.springframework.core.env.Environment; -import org.springframework.core.env.MapPropertySource; -import org.springframework.core.env.PropertySource; -import org.springframework.web.context.support.StandardServletEnvironment; - -import javax.inject.Inject; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchConfigMapHolder { - - @Inject - private Environment environment; - - private Map logsearchProperties = new HashMap<>(); - - public Map getLogsearchProperties() { - if (logsearchProperties.isEmpty()) { - PropertySource propertySource = ((StandardServletEnvironment) environment) - .getPropertySources().get("class path resource [" + LOGSEARCH_PROPERTIES_FILE + "]"); - setLogsearchProperties(stringifyValues(((MapPropertySource) propertySource).getSource())); - } - return logsearchProperties; - } - - public void setLogsearchProperties(Map logsearchProperties) { - this.logsearchProperties = logsearchProperties; - } - - private Map stringifyValues(Map vars) { - return vars.entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> (String) e.getValue())); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpConfig.java deleted file mode 100644 index 4a7280da861..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpConfig.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchHttpConfig { - - @LogSearchPropertyDescription( - name = "logsearch.http.port", - description = "Log Search http port", - examples = {"61888", "8888"}, - defaultValue = "61888", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.http.port:61888}") - private int httpPort; - - @LogSearchPropertyDescription( - name = "logsearch.https.port", - description = "Log Search https port", - examples = {"61889", "8889"}, - defaultValue = "61889", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.https.port:61889}") - private int httpsPort; - - @LogSearchPropertyDescription( - name = "logsearch.protocol", - description = "Log Search Protocol (http or https)", - examples = {"http", "https"}, - defaultValue = "http", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.protocol:http}") - private String protocol; - - public String getProtocol() { - return protocol; - } - - public void setProtocol(String protocol) { - this.protocol = protocol; - } - - public int getHttpPort() { - return httpPort; - } - - public void setHttpPort(int httpPort) { - this.httpPort = httpPort; - } - - public int getHttpsPort() { - return httpsPort; - } - - public void setHttpsPort(int httpsPort) { - this.httpsPort = httpsPort; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java deleted file mode 100644 index 8d4f760798b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchHttpHeaderConfig { - - @Value("${logsearch.http.header.access-control-allow-origin:*}") - @LogSearchPropertyDescription( - name = "logsearch.http.header.access-control-allow-origin", - description = "Access-Control-Allow-Origin header for Log Search Server.", - examples = {"*", "http://c6401.ambari.apache.org"}, - defaultValue = "*", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String accessControlAllowOrigin; - - @Value("${logsearch.http.header.access-control-allow-headers:origin, content-type, accept, authorization}") - @LogSearchPropertyDescription( - name = "logsearch.http.header.access-control-allow-headers", - description = "Access-Control-Allow-Headers header for Log Search Server.", - examples = {"content-type, authorization"}, - defaultValue = "origin, content-type, accept, authorization", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String accessControlAllowHeaders; - - @Value("${logsearch.http.header.access-control-allow-credentials:true}") - @LogSearchPropertyDescription( - name = "logsearch.http.header.access-control-allow-credentials", - description = "Access-Control-Allow-Credentials header for Log Search Server.", - examples = {"true", "false"}, - defaultValue = "true", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String accessControlAllowCredentials; - - @Value("${logsearch.http.header.access-control-allow-methods:GET, POST, PUT, DELETE, OPTIONS, HEAD}") - @LogSearchPropertyDescription( - name = "logsearch.http.header.access-control-allow-methods", - description = "Access-Control-Allow-Methods header for Log Search Server.", - examples = {"GET, POST"}, - defaultValue = "GET, POST, PUT, DELETE, OPTIONS, HEAD", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String accessControlAllowMethods; - - public String getAccessControlAllowOrigin() { - return accessControlAllowOrigin; - } - - public void setAccessControlAllowOrigin(String accessControlAllowOrigin) { - this.accessControlAllowOrigin = accessControlAllowOrigin; - } - - public String getAccessControlAllowHeaders() { - return accessControlAllowHeaders; - } - - public void setAccessControlAllowHeaders(String accessControlAllowHeaders) { - this.accessControlAllowHeaders = accessControlAllowHeaders; - } - - public String getAccessControlAllowCredentials() { - return accessControlAllowCredentials; - } - - public void setAccessControlAllowCredentials(String accessControlAllowCredentials) { - this.accessControlAllowCredentials = accessControlAllowCredentials; - } - - public String getAccessControlAllowMethods() { - return accessControlAllowMethods; - } - - public void setAccessControlAllowMethods(String accessControlAllowMethods) { - this.accessControlAllowMethods = accessControlAllowMethods; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchJerseyResourceConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchJerseyResourceConfig.java deleted file mode 100644 index a3ce260a40f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchJerseyResourceConfig.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import javax.ws.rs.ApplicationPath; - -import org.apache.ambari.logsearch.rest.ServiceLogsResource; -import org.glassfish.jersey.server.ResourceConfig; -import org.glassfish.jersey.servlet.ServletProperties; - -@ApplicationPath("/api/v1") -public class LogSearchJerseyResourceConfig extends ResourceConfig { - - public LogSearchJerseyResourceConfig() { - packages(ServiceLogsResource.class.getPackage().getName()); - register(com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider.class); - property(ServletProperties.FILTER_FORWARD_ON_404, true); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchLdapAuthConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchLdapAuthConfig.java deleted file mode 100644 index 5218062266a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchLdapAuthConfig.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import java.util.Map; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchLdapAuthConfig { - @Value("${logsearch.auth.ldap.url:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.url", - description = "URL of LDAP database.", - examples = {"ldap://localhost:389"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapUrl; - - @Value("${logsearch.auth.ldap.manager.dn:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.manager.dn", - description = "DN of the LDAP manger user (it is a must if LDAP groups are used).", - examples = {"cn=admin,dc=apache,dc=org"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapManagerDn; - - @Value("${logsearch.auth.ldap.manager.password:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.manager.password", - description = "Password of the LDAP manager user.", - examples = {"mypassword"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapManagerPassword; - - @Value("${logsearch.auth.ldap.base.dn:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.base.dn", - description = "Base DN of LDAP database.", - examples = {"dc=apache,dc=org"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapBaseDn; - - @Value("${logsearch.auth.ldap.user.dn.pattern:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.user.dn.pattern", - description = "DN pattern that is used during login (dn should contain the username), can be used instead of user filter", - examples = {"uid={0},ou=people"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapUserDnPattern; - - @Value("${logsearch.auth.ldap.user.search.base:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.user.search.base", - description = "User search base for user search filter", - examples = {"ou=people"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapUserSearchBase; - - @Value("${logsearch.auth.ldap.user.search.filter:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.user.search.filter", - description = "Used for get a user based on on LDAP search (username is the input), if it is empty, user dn pattern is used.", - examples = {"uid={0}"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapUserSearchFilter; - - @Value("${logsearch.auth.ldap.group.search.base:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.group.search.base", - description = "Group search base - defines where to find LDAP groups. Won't do any authority/role mapping if this field is empty.", - examples = {"ou=people"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapGroupSearchBase; - - @Value("${logsearch.auth.ldap.group.search.filter:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.group.search.filter", - description = "Group search filter which is used to get membership data for a specific user", - examples = {"(memberUid={0})"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapGroupSearchFilter; - - @Value("${logsearch.auth.ldap.group.role.attribute:}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.group.role.attribute", - description = "Attribute for identifying LDAP groups (group name)", - examples = {"cn"}, - defaultValue = "cn", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapGroupRoleAttribute; - - @Value("${logsearch.auth.ldap.role.prefix:ROLE_}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.role.prefix", - description = "Role prefix that is added for LDAP groups (as authorities)", - examples = {"ROLE_"}, - defaultValue = "ROLE_", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapRolePrefix; - - @Value("${logsearch.auth.ldap.password.attribute:userPassword}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.password.attribute", - description = "Password attribute for LDAP authentication", - examples = {"password"}, - defaultValue = "userPassword", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String ldapPasswordAttribute; - - @Value("#{propertiesSplitter.parseMap('${logsearch.auth.ldap.group.role.map:ship_crew:ROLE_USER}')}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.group.role.map", - description = "Map of LDAP groups to Log Search roles", - examples = {"ROLE_CUSTOM1:ROLE_USER,ROLE_CUSTOM2:ROLE_ADMIN"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map ldapGroupRoleMap; - - @Value("${logsearch.auth.ldap.referral.method:ignore}") - @LogSearchPropertyDescription( - name = "logsearch.auth.ldap.referral.method", - description = "Set the method to handle referrals for LDAP", - examples = {"follow"}, - defaultValue = "ignore", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String referralMethod; - - public String getLdapUrl() { - return ldapUrl; - } - - public void setLdapUrl(String ldapUrl) { - this.ldapUrl = ldapUrl; - } - - public String getLdapBaseDn() { - return ldapBaseDn; - } - - public void setLdapBaseDn(String ldapBaseDn) { - this.ldapBaseDn = ldapBaseDn; - } - - public String getLdapUserDnPattern() { - return ldapUserDnPattern; - } - - public void setLdapUserDnPattern(String ldapUserDnPattern) { - this.ldapUserDnPattern = ldapUserDnPattern; - } - - public String getLdapUserSearchBase() { - return ldapUserSearchBase; - } - - public void setLdapUserSearchBase(String ldapUserSearchBase) { - this.ldapUserSearchBase = ldapUserSearchBase; - } - - public String getLdapUserSearchFilter() { - return ldapUserSearchFilter; - } - - public void setLdapUserSearchFilter(String ldapUserSearchFilter) { - this.ldapUserSearchFilter = ldapUserSearchFilter; - } - - public String getLdapGroupSearchBase() { - return ldapGroupSearchBase; - } - - public void setLdapGroupSearchBase(String ldapGroupSearchBase) { - this.ldapGroupSearchBase = ldapGroupSearchBase; - } - - public String getLdapGroupSearchFilter() { - return ldapGroupSearchFilter; - } - - public void setLdapGroupSearchFilter(String ldapGroupSearchFilter) { - this.ldapGroupSearchFilter = ldapGroupSearchFilter; - } - - public String getLdapGroupRoleAttribute() { - return ldapGroupRoleAttribute; - } - - public void setLdapGroupRoleAttribute(String ldapGroupRoleAttribute) { - this.ldapGroupRoleAttribute = ldapGroupRoleAttribute; - } - - public String getLdapRolePrefix() { - return ldapRolePrefix; - } - - public void setLdapRolePrefix(String ldapRolePrefix) { - this.ldapRolePrefix = ldapRolePrefix; - } - - public String getLdapPasswordAttribute() { - return ldapPasswordAttribute; - } - - public void setLdapPasswordAttribute(String ldapPasswordAttribute) { - this.ldapPasswordAttribute = ldapPasswordAttribute; - } - - public String getLdapManagerDn() { - return ldapManagerDn; - } - - public void setLdapManagerDn(String ldapManagerDn) { - this.ldapManagerDn = ldapManagerDn; - } - - public String getLdapManagerPassword() { - return ldapManagerPassword; - } - - public void setLdapManagerPassword(String ldapManagerPassword) { - this.ldapManagerPassword = ldapManagerPassword; - } - - public Map getLdapGroupRoleMap() { - return ldapGroupRoleMap; - } - - public void setLdapGroupRoleMap(Map ldapGroupRoleMap) { - this.ldapGroupRoleMap = ldapGroupRoleMap; - } - - public String getReferralMethod() { - return referralMethod; - } - - public void setReferralMethod(String referralMethod) { - this.referralMethod = referralMethod; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchServletConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchServletConfig.java deleted file mode 100644 index a7a27da9021..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchServletConfig.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.configurer.SslConfigurer; -import org.apache.ambari.logsearch.web.listener.LogSearchSessionListener; -import org.eclipse.jetty.server.Connector; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.util.ssl.SslContextFactory; -import org.glassfish.jersey.servlet.ServletContainer; -import org.glassfish.jersey.servlet.ServletProperties; -import org.springframework.boot.autoconfigure.web.ServerProperties; -import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory; -import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainer; -import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory; -import org.springframework.boot.context.embedded.jetty.JettyServerCustomizer; -import org.springframework.boot.web.servlet.ServletRegistrationBean; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import javax.inject.Inject; -import javax.servlet.http.HttpSessionListener; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_APPLICATION_NAME; -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_SESSION_ID; - -@Configuration -public class LogSearchServletConfig { - - private static final Integer SESSION_TIMEOUT = 60 * 30; - - @Inject - private ServerProperties serverProperties; - - @Inject - private LogSearchHttpConfig logSearchHttpConfig; - - @Inject - private SslConfigurer sslConfigurer; - - @Bean - public HttpSessionListener httpSessionListener() { - return new LogSearchSessionListener(); - } - - @Bean - public ServletRegistrationBean jerseyServlet() { - ServletRegistrationBean registration = new ServletRegistrationBean(new ServletContainer(), "/api/v1/*"); - registration.addInitParameter(ServletProperties.JAXRS_APPLICATION_CLASS, LogSearchJerseyResourceConfig.class.getName()); - return registration; - } - - @Bean - public EmbeddedServletContainerFactory containerFactory() { - final JettyEmbeddedServletContainerFactory jettyEmbeddedServletContainerFactory = new JettyEmbeddedServletContainerFactory() { - @Override - protected JettyEmbeddedServletContainer getJettyEmbeddedServletContainer(Server server) { - return new JettyEmbeddedServletContainer(server); - } - }; - jettyEmbeddedServletContainerFactory.setSessionTimeout(SESSION_TIMEOUT); - serverProperties.getSession().getCookie().setName(LOGSEARCH_SESSION_ID); - serverProperties.setDisplayName(LOGSEARCH_APPLICATION_NAME); - if ("https".equals(logSearchHttpConfig.getProtocol())) { - sslConfigurer.ensureStorePasswords(); - sslConfigurer.loadKeystore(); - jettyEmbeddedServletContainerFactory.addServerCustomizers((JettyServerCustomizer) server -> { - SslContextFactory sslContextFactory = sslConfigurer.getSslContextFactory(); - ServerConnector sslConnector = new ServerConnector(server, sslContextFactory); - sslConnector.setPort(logSearchHttpConfig.getHttpsPort()); - server.setConnectors(new Connector[]{sslConnector}); - }); - } else { - jettyEmbeddedServletContainerFactory.setPort(logSearchHttpConfig.getHttpPort()); - } - return jettyEmbeddedServletContainerFactory; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSpnegoConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSpnegoConfig.java deleted file mode 100644 index 16326a6a79e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSpnegoConfig.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchSpnegoConfig { - - @LogSearchPropertyDescription( - name = "logsearch.hadoop.security.auth_to_local", - description = "Rules that will be applied on authentication names and map them into local usernames.", - examples = {"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//", "DEFAULT"}, - defaultValue = "DEFAULT", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.hadoop.security.auth_to_local:DEFAULT}") - private String nameRules; - - @LogSearchPropertyDescription( - name = "logsearch.admin.kerberos.token.valid.seconds", - description = "Kerberos token validity in seconds.", - examples = {"30"}, - defaultValue = "30", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.admin.kerberos.token.valid.seconds:30}") - private String tokenValid; - - @LogSearchPropertyDescription( - name = "logsearch.admin.kerberos.cookie.domain", - description = "Domain for Kerberos cookie.", - examples = {"c6401.ambari.apache.org", "localhost"}, - defaultValue = "localhost", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.admin.kerberos.cookie.domain:localhost}") - private String cookieDomain; - - @LogSearchPropertyDescription( - name = "logsearch.admin.kerberos.cookie.path", - description = "Cookie path of the kerberos cookie", - examples = {"/"}, - defaultValue = "/", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.admin.kerberos.cookie.path:/}") - private String cookiePath; - - @LogSearchPropertyDescription( - name = "logsearch.spnego.kerberos.principal", - description = "Principal for SPNEGO authentication for Http requests", - examples = {"myuser@EXAMPLE.COM"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.spnego.kerberos.principal:}") - private String principal; - - @LogSearchPropertyDescription( - name = "logsearch.spnego.kerberos.keytab", - description = "Keytab for SPNEGO authentication for Http requests.", - examples = {"/etc/security/keytabs/mykeytab.keytab"}, - defaultValue = "", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.spnego.kerberos.keytab:}") - private String keyTab; - - @LogSearchPropertyDescription( - name = "logsearch.spnego.kerberos.host", - description = "", - examples = {"c6401.ambari.apache.org", "localhost"}, - defaultValue = "localhost", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.spnego.kerberos.host:localhost}") - private String hostName; - - @LogSearchPropertyDescription( - name = "logsearch.spnego.kerberos.enabled", - description = "Enable SPNEGO based authentication for Log Search Server.", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.spnego.kerberos.enabled:false}") - private boolean kerberosEnabled; - - public String getNameRules() { - return nameRules; - } - - public void setNameRules(String nameRules) { - this.nameRules = nameRules; - } - - public String getTokenValid() { - return tokenValid; - } - - public void setTokenValid(String tokenValid) { - this.tokenValid = tokenValid; - } - - public String getCookieDomain() { - return cookieDomain; - } - - public void setCookieDomain(String cookieDomain) { - this.cookieDomain = cookieDomain; - } - - public String getCookiePath() { - return cookiePath; - } - - public void setCookiePath(String cookiePath) { - this.cookiePath = cookiePath; - } - - public String getPrincipal() { - return principal; - } - - public void setPrincipal(String principal) { - this.principal = principal; - } - - public String getKeyTab() { - return keyTab; - } - - public void setKeyTab(String keyTab) { - this.keyTab = keyTab; - } - - public String getHostName() { - return hostName; - } - - public void setHostName(String hostName) { - this.hostName = hostName; - } - - public boolean isKerberosEnabled() { - return kerberosEnabled; - } - - public void setKerberosEnabled(boolean kerberosEnabled) { - this.kerberosEnabled = kerberosEnabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSslConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSslConfig.java deleted file mode 100644 index 15579b62397..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchSslConfig.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class LogSearchSslConfig { - - public static final String LOGSEARCH_CERT_DEFAULT_FOLDER = "/usr/lib/ambari-logsearch-portal/conf/keys"; - public static final String LOGSEARCH_CERT_DEFAULT_ALGORITHM = "sha256WithRSA"; - public static final String CREDENTIAL_STORE_PROVIDER_PATH = "hadoop.security.credential.provider.path"; - - @LogSearchPropertyDescription( - name = "logsearch.cert.algorithm", - description = "Algorithm to generate certificates for SSL (if needed).", - examples = {"sha256WithRSA"}, - defaultValue = LOGSEARCH_CERT_DEFAULT_ALGORITHM, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.cert.algorithm:" + LOGSEARCH_CERT_DEFAULT_ALGORITHM + "}") - private String certAlgorithm; - - @LogSearchPropertyDescription( - name = "logsearch.cert.folder.location", - description = "Folder where the generated certificates (SSL) will be located. Make sure the user of Log Search Server can access it.", - examples = {"/etc/mypath/keys"}, - defaultValue = LOGSEARCH_CERT_DEFAULT_FOLDER, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.cert.folder.location:" + LOGSEARCH_CERT_DEFAULT_FOLDER + "}") - private String certFolder; - - @LogSearchPropertyDescription( - name = CREDENTIAL_STORE_PROVIDER_PATH, - description = "Path to interrogate for protected credentials. (see: https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html)", - examples = {"localjceks://file/home/mypath/my.jceks"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${hadoop.security.credential.provider.path:}") - private String credentialStoreProviderPath; - - public String getCertAlgorithm() { - return certAlgorithm; - } - - public void setCertAlgorithm(String certAlgorithm) { - this.certAlgorithm = certAlgorithm; - } - - public String getCertFolder() { - return certFolder; - } - - public void setCertFolder(String certFolder) { - this.certFolder = certFolder; - } - - public String getCredentialStoreProviderPath() { - return credentialStoreProviderPath; - } - - public void setCredentialStoreProviderPath(String credentialStoreProviderPath) { - this.credentialStoreProviderPath = credentialStoreProviderPath; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java deleted file mode 100644 index 87dc22f5386..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java +++ /dev/null @@ -1,348 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_SESSION_ID; - -import java.util.ArrayList; -import java.util.List; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.common.LogSearchLdapAuthorityMapper; -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.global.LogLevelFilterManagerState; -import org.apache.ambari.logsearch.conf.global.LogSearchConfigState; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.dao.RoleDao; -import org.apache.ambari.logsearch.web.authenticate.LogsearchAuthFailureHandler; -import org.apache.ambari.logsearch.web.authenticate.LogsearchAuthSuccessHandler; -import org.apache.ambari.logsearch.web.authenticate.LogsearchLogoutSuccessHandler; -import org.apache.ambari.logsearch.web.filters.ConfigStateProvider; -import org.apache.ambari.logsearch.web.filters.GlobalStateProvider; -import org.apache.ambari.logsearch.web.filters.LogsearchAuthenticationEntryPoint; -import org.apache.ambari.logsearch.web.filters.LogsearchCorsFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchJWTFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchKRBAuthenticationFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchSecurityContextFormationFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchTrustedProxyFilter; -import org.apache.ambari.logsearch.web.filters.LogsearchUsernamePasswordAuthenticationFilter; -import org.apache.ambari.logsearch.web.security.LogsearchAuthenticationProvider; -import org.apache.ambari.logsearch.web.security.LogsearchLdapAuthenticationProvider; -import org.apache.commons.lang.StringUtils; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.ldap.core.support.LdapContextSource; -import org.springframework.security.config.annotation.web.builders.HttpSecurity; -import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; -import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; -import org.springframework.security.ldap.authentication.BindAuthenticator; -import org.springframework.security.ldap.authentication.NullLdapAuthoritiesPopulator; -import org.springframework.security.ldap.search.FilterBasedLdapUserSearch; -import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator; -import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; -import org.springframework.security.web.access.intercept.FilterSecurityInterceptor; -import org.springframework.security.web.authentication.www.BasicAuthenticationFilter; -import org.springframework.security.web.util.matcher.AntPathRequestMatcher; -import org.springframework.security.web.util.matcher.OrRequestMatcher; -import org.springframework.security.web.util.matcher.RequestMatcher; - -import com.google.common.collect.Lists; - -@Configuration -@EnableWebSecurity -public class SecurityConfig extends WebSecurityConfigurerAdapter { - - @Inject - private AuthPropsConfig authPropsConfig; - - @Inject - private LogSearchHttpHeaderConfig logSearchHttpHeaderConfig; - - @Inject - private SolrServiceLogPropsConfig solrServiceLogPropsConfig; - - @Inject - private SolrAuditLogPropsConfig solrAuditLogPropsConfig; - - @Inject - private SolrEventHistoryPropsConfig solrEventHistoryPropsConfig; - - @Inject - @Named("solrServiceLogsState") - private SolrCollectionState solrServiceLogsState; - - @Inject - @Named("solrAuditLogsState") - private SolrCollectionState solrAuditLogsState; - - @Inject - @Named("solrEventHistoryState") - private SolrCollectionState solrEventHistoryState; - - @Inject - @Named("logLevelFilterManagerState") - private LogLevelFilterManagerState logLevelFilterManagerState; - - @Inject - private LogSearchConfigState logSearchConfigState; - - @Inject - private LogSearchConfigApiConfig logSearchConfigApiConfig; - - @Inject - private RoleDao roleDao; - - @Override - protected void configure(HttpSecurity http) throws Exception { - http - .csrf().disable() - .authorizeRequests() - .requestMatchers(requestMatcher()) - .permitAll() - .antMatchers("/**") - .hasRole("USER") - .and() - .authenticationProvider(logsearchAuthenticationProvider()) - .httpBasic() - .authenticationEntryPoint(logsearchAuthenticationEntryPoint()) - .and() - .addFilterBefore(logsearchTrustedProxyFilter(), BasicAuthenticationFilter.class) - .addFilterAfter(logsearchKRBAuthenticationFilter(), LogsearchTrustedProxyFilter.class) - .addFilterBefore(logsearchUsernamePasswordAuthenticationFilter(), LogsearchKRBAuthenticationFilter.class) - .addFilterAfter(securityContextFormationFilter(), FilterSecurityInterceptor.class) - .addFilterAfter(logsearchEventHistoryFilter(), LogsearchSecurityContextFormationFilter.class) - .addFilterAfter(logsearchAuditLogFilter(), LogsearchSecurityContextFormationFilter.class) - .addFilterAfter(logsearchServiceLogFilter(), LogsearchSecurityContextFormationFilter.class) - .addFilterAfter(logSearchConfigStateFilter(), LogsearchSecurityContextFormationFilter.class) - .addFilterBefore(logsearchCorsFilter(), LogsearchSecurityContextFormationFilter.class) - .addFilterBefore(logsearchJwtFilter(), LogsearchSecurityContextFormationFilter.class) - .logout() - .logoutUrl("/logout") - .deleteCookies(getCookies()) - .logoutSuccessHandler(new LogsearchLogoutSuccessHandler()); - - if ((logSearchConfigApiConfig.isSolrFilterStorage() || logSearchConfigApiConfig.isZkFilterStorage()) - && !logSearchConfigApiConfig.isConfigApiEnabled()) - http.addFilterAfter(logSearchLogLevelFilterManagerFilter(), LogsearchSecurityContextFormationFilter.class); - } - - @Bean - public LdapContextSource ldapContextSource() { - if (authPropsConfig.isAuthLdapEnabled()) { - final LdapContextSource ldapContextSource = new LdapContextSource(); - ldapContextSource.setUrl(authPropsConfig.getLdapAuthConfig().getLdapUrl()); - ldapContextSource.setBase(authPropsConfig.getLdapAuthConfig().getLdapBaseDn()); - if (StringUtils.isNotBlank(authPropsConfig.getLdapAuthConfig().getLdapManagerDn())) { - ldapContextSource.setUserDn(authPropsConfig.getLdapAuthConfig().getLdapManagerDn()); - } - if (StringUtils.isNotBlank(authPropsConfig.getLdapAuthConfig().getLdapManagerPassword())) { - ldapContextSource.setPassword(authPropsConfig.getLdapAuthConfig().getLdapManagerPassword()); - } - ldapContextSource.setReferral(authPropsConfig.getLdapAuthConfig().getReferralMethod()); - ldapContextSource.setAnonymousReadOnly(true); - ldapContextSource.afterPropertiesSet(); - return ldapContextSource; - } - return null; - } - - @Bean - public BindAuthenticator bindAuthenticator() { - if (authPropsConfig.isAuthLdapEnabled()) { - final BindAuthenticator bindAuthenticator = new BindAuthenticator(ldapContextSource()); - if (StringUtils.isNotBlank(authPropsConfig.getLdapAuthConfig().getLdapUserDnPattern())) { - bindAuthenticator.setUserDnPatterns(new String[]{authPropsConfig.getLdapAuthConfig().getLdapUserDnPattern()}); - } - if (StringUtils.isNotBlank(authPropsConfig.getLdapAuthConfig().getLdapUserSearchFilter())) { - bindAuthenticator.setUserSearch(new FilterBasedLdapUserSearch( - authPropsConfig.getLdapAuthConfig().getLdapUserSearchBase(), - authPropsConfig.getLdapAuthConfig().getLdapUserSearchFilter(), - ldapContextSource())); - } - - return bindAuthenticator; - } - return null; - } - - @Bean - public LdapAuthoritiesPopulator ldapAuthoritiesPopulator() { - if (StringUtils.isNotBlank(authPropsConfig.getLdapAuthConfig().getLdapGroupSearchBase())) { - final DefaultLdapAuthoritiesPopulator ldapAuthoritiesPopulator = - new DefaultLdapAuthoritiesPopulator(ldapContextSource(), authPropsConfig.getLdapAuthConfig().getLdapGroupSearchBase()); - ldapAuthoritiesPopulator.setGroupSearchFilter(authPropsConfig.getLdapAuthConfig().getLdapGroupSearchFilter()); - ldapAuthoritiesPopulator.setGroupRoleAttribute(authPropsConfig.getLdapAuthConfig().getLdapGroupRoleAttribute()); - ldapAuthoritiesPopulator.setSearchSubtree(true); - ldapAuthoritiesPopulator.setConvertToUpperCase(true); - return ldapAuthoritiesPopulator; - } - return new NullLdapAuthoritiesPopulator(); - } - - @Bean - public LogsearchLdapAuthenticationProvider ldapAuthenticationProvider() { - if (authPropsConfig.isAuthLdapEnabled()) { - LogsearchLdapAuthenticationProvider provider = new LogsearchLdapAuthenticationProvider(bindAuthenticator(), ldapAuthoritiesPopulator()); - provider.setAuthoritiesMapper(new LogSearchLdapAuthorityMapper(authPropsConfig.getLdapAuthConfig().getLdapGroupRoleMap())); - return provider; - } - return null; - } - - @Bean - public LogsearchCorsFilter logsearchCorsFilter() { - return new LogsearchCorsFilter(logSearchHttpHeaderConfig); - } - - @Bean - public LogsearchSecurityContextFormationFilter securityContextFormationFilter() { - return new LogsearchSecurityContextFormationFilter(); - } - - @Bean - public LogsearchKRBAuthenticationFilter logsearchKRBAuthenticationFilter() { - return new LogsearchKRBAuthenticationFilter(requestMatcher()); - } - - @Bean - public LogsearchAuthenticationProvider logsearchAuthenticationProvider() { - return new LogsearchAuthenticationProvider(); - } - - @Bean - public LogsearchTrustedProxyFilter logsearchTrustedProxyFilter() throws Exception { - LogsearchTrustedProxyFilter filter = new LogsearchTrustedProxyFilter(requestMatcher(), authPropsConfig); - filter.setAuthenticationManager(authenticationManagerBean()); - return filter; - } - - @Bean - public LogsearchJWTFilter logsearchJwtFilter() throws Exception { - LogsearchJWTFilter filter = new LogsearchJWTFilter(requestMatcher(), authPropsConfig, roleDao); - filter.setAuthenticationManager(authenticationManagerBean()); - filter.setAuthenticationSuccessHandler(new LogsearchAuthSuccessHandler()); - filter.setAuthenticationFailureHandler(new LogsearchAuthFailureHandler()); - return filter; - } - - @Bean - public LogsearchAuthenticationEntryPoint logsearchAuthenticationEntryPoint() { - LogsearchAuthenticationEntryPoint entryPoint = new LogsearchAuthenticationEntryPoint("/login", authPropsConfig); - entryPoint.setForceHttps(false); - entryPoint.setUseForward(authPropsConfig.isRedirectForward()); - return entryPoint; - } - - @Bean - public LogsearchUsernamePasswordAuthenticationFilter logsearchUsernamePasswordAuthenticationFilter() throws Exception { - LogsearchUsernamePasswordAuthenticationFilter filter = new LogsearchUsernamePasswordAuthenticationFilter(); - filter.setAuthenticationSuccessHandler(new LogsearchAuthSuccessHandler()); - filter.setAuthenticationFailureHandler(new LogsearchAuthFailureHandler()); - filter.setAuthenticationManager(authenticationManagerBean()); - return filter; - } - - private LogsearchFilter logsearchServiceLogFilter() { - return new LogsearchFilter(serviceLogsRequestMatcher(), new GlobalStateProvider(solrServiceLogsState, solrServiceLogPropsConfig)); - } - - private LogsearchFilter logsearchAuditLogFilter() { - return new LogsearchFilter(auditLogsRequestMatcher(), new GlobalStateProvider(solrAuditLogsState, solrAuditLogPropsConfig)); - } - - private LogsearchFilter logsearchEventHistoryFilter() { - return new LogsearchFilter(eventHistoryRequestMatcher(), new GlobalStateProvider(solrEventHistoryState, solrEventHistoryPropsConfig)); - } - - private LogsearchFilter logSearchConfigStateFilter() { - RequestMatcher requestMatcher; - if (logSearchConfigApiConfig.isSolrFilterStorage() || logSearchConfigApiConfig.isZkFilterStorage()) { - requestMatcher = shipperConfigInputRequestMatcher(); - } else { - requestMatcher = logsearchConfigRequestMatcher(); - } - - return new LogsearchFilter(requestMatcher, new ConfigStateProvider(logSearchConfigState, logSearchConfigApiConfig.isConfigApiEnabled())); - } - - private LogsearchFilter logSearchLogLevelFilterManagerFilter() { - return new LogsearchFilter(logLevelFilterRequestMatcher(), requestUri -> - logLevelFilterManagerState.isLogLevelFilterManagerIsReady() ? null : StatusMessage.with(SERVICE_UNAVAILABLE, "Solr log level filter manager is not available")); - } - - @Bean - public RequestMatcher requestMatcher() { - List matchers = Lists.newArrayList(); - matchers.add(new AntPathRequestMatcher("/docs/**")); - matchers.add(new AntPathRequestMatcher("/swagger-ui/**")); - matchers.add(new AntPathRequestMatcher("/swagger.html")); - if (!authPropsConfig.isAuthJwtEnabled()) { - matchers.add(new AntPathRequestMatcher("/")); - } - matchers.add(new AntPathRequestMatcher("/login")); - matchers.add(new AntPathRequestMatcher("/logout")); - matchers.add(new AntPathRequestMatcher("/resources/**")); - matchers.add(new AntPathRequestMatcher("/index.html")); - matchers.add(new AntPathRequestMatcher("/favicon.ico")); - matchers.add(new AntPathRequestMatcher("/assets/**")); - matchers.add(new AntPathRequestMatcher("/templates/**")); - matchers.add(new AntPathRequestMatcher("/api/v1/info/**")); - matchers.add(new AntPathRequestMatcher("/api/v1/swagger.json")); - matchers.add(new AntPathRequestMatcher("/api/v1/swagger.yaml")); - return new OrRequestMatcher(matchers); - } - - public RequestMatcher serviceLogsRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/service/logs/**"); - } - - public RequestMatcher auditLogsRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/audit/logs/**"); - } - - public RequestMatcher eventHistoryRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/history/**"); - } - - public RequestMatcher logsearchConfigRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/shipper/**"); - } - - public RequestMatcher logLevelFilterRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/shipper/filters/**"); - } - - public RequestMatcher shipperConfigInputRequestMatcher() { - return new AntPathRequestMatcher("/api/v1/shipper/input/**"); - } - - private String[] getCookies() { - List cookies = new ArrayList<>(); - cookies.add(LOGSEARCH_SESSION_ID); - if (authPropsConfig.isAuthJwtEnabled()) { - cookies.add(authPropsConfig.getCookieName()); - } - return cookies.toArray(new String[0]); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java deleted file mode 100644 index 43349a658c0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.zookeeper.data.ACL; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import java.util.List; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class SolrAuditLogPropsConfig implements SolrPropsConfig { - - @Value("${logsearch.solr.audit.logs.url:}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.url", - description = "URL of Solr (non cloud mode) - currently unsupported.", - examples = {"localhost1:8868"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String solrUrl; - - @Value("${logsearch.solr.audit.logs.zk_connect_string:}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.zk_connect_string", - description = "Zookeeper connection string for Solr (used for audit log collection).", - examples = {"localhost1:2181,localhost2:2181/mysolr_znode"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String zkConnectString; - - @Value("${logsearch.solr.collection.audit.logs:audit_logs}") - @LogSearchPropertyDescription( - name = "logsearch.solr.collection.audit.logs", - description = "Name of Log Search audit collection.", - examples = {"audit_logs"}, - defaultValue = "audit_logs", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String collection; - - @Value("${logsearch.ranger.audit.logs.collection.name:}") - @LogSearchPropertyDescription( - name = "logsearch.ranger.audit.logs.collection.name", - description = "Name of Ranger audit collections (can be used if ranger audits managed by the same Solr which is used for Log Search).", - examples = {"ranger_audits"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String rangerCollection; - - @Value("${logsearch.solr.audit.logs.config.name:audit_logs}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.config.name", - description = "Solr configuration name of the audit collection.", - examples = {"audit_logs"}, - defaultValue = "audit_logs", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String configName; - - @Value("${logsearch.solr.audit.logs.alias.name:audit_logs_alias}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.alias.name", - description = "Alias name for audit log collection (can be used for Log Search audit collection and ranger collection as well).", - examples = {"audit_logs_alias"}, - defaultValue = "audit_logs_alias", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String aliasNameIn; - - @Value("${logsearch.audit.logs.split.interval.mins:none}") - @LogSearchPropertyDescription( - name = "logsearch.audit.logs.split.interval.mins", - description = "Will create multiple collections and use alias. (not supported right now, use implicit routingif the value is not none)", - examples = {"none", "15"}, - defaultValue = "none", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String splitInterval; - - @Value("${logsearch.collection.audit.logs.numshards:1}") - @LogSearchPropertyDescription( - name = "logsearch.collection.audit.logs.numshards", - description = "Number of Solr shards for audit collection (bootstrapping).", - examples = {"2"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer numberOfShards; - - @Value("${logsearch.collection.audit.logs.replication.factor:1}") - @LogSearchPropertyDescription( - name = "logsearch.collection.audit.logs.replication.factor", - description = "Solr replication factor for audit collection (bootstrapping).", - examples = {"2"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer replicationFactor; - - @Value("#{ACLPropertiesSplitter.parseAcls('${logsearch.solr.audit.logs.zk.acls:}')}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.zk.acls", - description = "List of Zookeeper ACLs for Log Search audit collection (Log Search and Solr must be able to read/write collection details)", - examples = {"world:anyone:r,sasl:solr:cdrwa,sasl:logsearch:cdrwa"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List zkAcls; - - @Value("${logsearch.solr.audit.logs.config_set.folder:/usr/lib/ambari-logsearch-portal/conf/solr_configsets}") - @LogSearchPropertyDescription( - name = "logsearch.solr.audit.logs.config_set.folder", - description = "Location of Log Search audit collection configs for Solr.", - examples = {"/usr/lib/ambari-logsearch-portal/conf/solr_configsets"}, - defaultValue = "/usr/lib/ambari-logsearch-portal/conf/solr_configsets", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String configSetFolder; - - @LogSearchPropertyDescription( - name = "logsearch.solr.implicit.routing", - description = "Use implicit routing for Solr Collections.", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.solr.implicit.routing:false}") - private boolean solrImplicitRouting; - - @Override - public String getSolrUrl() { - return solrUrl; - } - - @Override - public void setSolrUrl(String solrUrl) { - this.solrUrl = solrUrl; - } - - @Override - public String getCollection() { - return collection; - } - - @Override - public void setCollection(String collection) { - this.collection = collection; - } - - @Override - public String getZkConnectString() { - return zkConnectString; - } - - @Override - public void setZkConnectString(String zkConnectString) { - this.zkConnectString = zkConnectString; - } - - @Override - public String getConfigName() { - return configName; - } - - @Override - public void setConfigName(String configName) { - this.configName = configName; - } - - @Override - public Integer getNumberOfShards() { - return numberOfShards; - } - - @Override - public void setNumberOfShards(Integer numberOfShards) { - this.numberOfShards = numberOfShards; - } - - @Override - public Integer getReplicationFactor() { - return replicationFactor; - } - - @Override - public void setReplicationFactor(Integer replicationFactor) { - this.replicationFactor = replicationFactor; - } - - @Override - public String getSplitInterval() { - return splitInterval; - } - - @Override - public void setSplitInterval(String splitInterval) { - this.splitInterval = splitInterval; - } - - @Override - public List getZkAcls() { - return zkAcls; - } - - @Override - public void setZkAcls(List zkAcls) { - this.zkAcls = zkAcls; - } - - @Override - public String getConfigSetFolder() { - return configSetFolder; - } - - @Override - public void setConfigSetFolder(String configSetFolder) { - this.configSetFolder = configSetFolder; - } - - public String getRangerCollection() { - return rangerCollection; - } - - public void setRangerCollection(String rangerCollection) { - this.rangerCollection = rangerCollection; - } - - public String getAliasNameIn() { - return aliasNameIn; - } - - public void setAliasNameIn(String aliasNameIn) { - this.aliasNameIn = aliasNameIn; - } - - @Override - public boolean isSolrImplicitRouting() { - return solrImplicitRouting; - } - - @Override - public void setSolrImplicitRouting(boolean solrImplicitRouting) { - this.solrImplicitRouting = solrImplicitRouting; - } - - @Override - public String getLogType() { - return "audit"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrClientsHolder.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrClientsHolder.java deleted file mode 100644 index 913472f5b1a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrClientsHolder.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.solr.client.solrj.SolrClient; - -import java.util.EnumMap; -import java.util.Map; - -public class SolrClientsHolder { - - private Map clientsMap = new EnumMap<>(CollectionType.class); - - public enum CollectionType { - SERVICE, - AUDIT, - HISTORY - } - - public SolrClientsHolder() { - clientsMap.put(CollectionType.SERVICE, null); - clientsMap.put(CollectionType.AUDIT, null); - clientsMap.put(CollectionType.HISTORY, null); - } - - public SolrClient getSolrClient(CollectionType type) { - return clientsMap.get(type); - } - - public synchronized void setSolrClient(SolrClient solrClient, CollectionType type) { - clientsMap.put(type, solrClient); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java deleted file mode 100644 index 33f4f6fc435..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.conf.global.SolrAuditLogsState; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.conf.global.LogLevelFilterManagerState; -import org.apache.ambari.logsearch.conf.global.SolrServiceLogsState; -import org.apache.ambari.logsearch.conf.global.SolrEventHistoryState; -import org.apache.ambari.logsearch.dao.SolrSchemaFieldDao; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.solr.repository.config.EnableSolrRepositories; -import org.springframework.scheduling.annotation.EnableScheduling; - -@Configuration -@EnableSolrRepositories -@EnableScheduling -public class SolrConfig { - - @Bean - public SolrSchemaFieldDao solrSchemaFieldDao() { - return new SolrSchemaFieldDao(); - } - - @Bean(name = "solrServiceLogsState") - public SolrCollectionState solrServiceLogsState() { - return new SolrServiceLogsState(); - } - - @Bean(name = "solrAuditLogsState") - public SolrCollectionState solrAuditLogsState() { - return new SolrAuditLogsState(); - } - - @Bean(name = "solrEventHistoryState") - public SolrCollectionState solrEventHistoryState() { - return new SolrEventHistoryState(); - } - - @Bean - public SolrClientsHolder solrClientsHolder() { - return new SolrClientsHolder(); - } -} - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionPropsConfig.java deleted file mode 100644 index 87b77bf3f52..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionPropsConfig.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.apache.zookeeper.data.ACL; -import org.springframework.beans.factory.annotation.Value; - -import java.util.List; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -public abstract class SolrConnectionPropsConfig implements SolrPropsConfig { - @Value("${logsearch.solr.url:}") - @LogSearchPropertyDescription( - name = "logsearch.solr.url", - description = "URL of Solr (non cloud mode) - currently unsupported.", - examples = {"localhost1:8868"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String solrUrl; - - @Value("${logsearch.solr.zk_connect_string:}") - @LogSearchPropertyDescription( - name = "logsearch.solr.zk_connect_string", - description = "Zookeeper connection string for Solr.", - examples = {"localhost1:2181,localhost2:2181/mysolr_znode"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String zkConnectString; - - @Value("#{ACLPropertiesSplitter.parseAcls('${logsearch.solr.zk.acls:}')}") - @LogSearchPropertyDescription( - name = "logsearch.solr.zk.acls", - description = "List of Zookeeper ACLs for Log Search Collections (Log Search and Solr must be able to read/write collection details)", - examples = {"world:anyone:r,sasl:solr:cdrwa,sasl:logsearch:cdrwa"}, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List zkAcls; - - @Value("${logsearch.solr.config_set.folder:/usr/lib/ambari-logsearch-portal/conf/solr_configsets}") - @LogSearchPropertyDescription( - name = "logsearch.solr.config_set.folder", - description = "Location of Solr collection configs.", - examples = {"/usr/lib/ambari-logsearch-portal/conf/solr_configsets"}, - defaultValue = "/usr/lib/ambari-logsearch-portal/conf/solr_configsets", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String configSetFolder; - - @LogSearchPropertyDescription( - name = "logsearch.solr.implicit.routing", - description = "Use implicit routing for Solr Collections.", - examples = {"true"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - @Value("${logsearch.solr.implicit.routing:false}") - private boolean solrImplicitRouting; - - @Override - public String getSolrUrl() { - return solrUrl; - } - - @Override - public void setSolrUrl(String solrUrl) { - this.solrUrl = solrUrl; - } - - @Override - public String getZkConnectString() { - return zkConnectString; - } - - @Override - public void setZkConnectString(String zkConnectString) { - this.zkConnectString = zkConnectString; - } - - @Override - public List getZkAcls() { - return zkAcls; - } - - @Override - public void setZkAcls(List zkAcls) { - this.zkAcls = zkAcls; - } - - @Override - public String getConfigSetFolder() { - return configSetFolder; - } - - @Override - public void setConfigSetFolder(String configSetFolder) { - this.configSetFolder = configSetFolder; - } - - @Override - public boolean isSolrImplicitRouting() { - return solrImplicitRouting; - } - - @Override - public void setSolrImplicitRouting(boolean solrImplicitRouting) { - this.solrImplicitRouting = solrImplicitRouting; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrEventHistoryPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrEventHistoryPropsConfig.java deleted file mode 100644 index 822cea469f4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrEventHistoryPropsConfig.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class SolrEventHistoryPropsConfig extends SolrConnectionPropsConfig { - - @Value("${logsearch.solr.collection.history:history}") - @LogSearchPropertyDescription( - name = "logsearch.solr.collection.history", - description = "Name of Log Search event history collection.", - examples = {"history"}, - defaultValue = "history", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String collection; - - @Value("${logsearch.history.split.interval.mins:none}") - @LogSearchPropertyDescription( - name = "logsearch.history.split.interval.mins", - description = "Will create multiple collections and use alias. (not supported right now, use implicit routingif the value is not none)", - examples = {"none", "15"}, - defaultValue = "none", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String splitInterval; - - @Value("${logsearch.solr.history.config.name:history}") - @LogSearchPropertyDescription( - name = "logsearch.solr.history.config.name", - description = "Solr configuration name of the event history collection.", - examples = {"history"}, - defaultValue = "history", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String configName; - - @Value("${logsearch.collection.history.numshards:1}") - @LogSearchPropertyDescription( - name = "logsearch.collection.history.numshards", - description = "Number of Solr shards for event history collection (bootstrapping).", - examples = {"2"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer numberOfShards; - - @Value("${logsearch.collection.history.replication.factor:2}") - @LogSearchPropertyDescription( - name = "logsearch.collection.history.replication.factor", - description = "Solr replication factor for event history collection (bootstrapping).", - examples = {"3"}, - defaultValue = "2", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer replicationFactor; - - @Value("${logsearch.schema.fields.populate.interval.mins:1}") - @LogSearchPropertyDescription( - name = "logsearch.schema.fields.populate.interval.mins", - description = "Interval in minutes for populating schema fiels for event history collections.", - examples = {"10"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer populateIntervalMins; - - @Override - public String getCollection() { - return collection; - } - - @Override - public void setCollection(String collection) { - this.collection = collection; - } - - @Override - public String getSplitInterval() { - return splitInterval; - } - - @Override - public void setSplitInterval(String splitInterval) { - this.splitInterval = splitInterval; - } - - @Override - public String getConfigName() { - return configName; - } - - @Override - public void setConfigName(String configName) { - this.configName = configName; - } - - @Override - public Integer getNumberOfShards() { - return numberOfShards; - } - - @Override - public void setNumberOfShards(Integer numberOfShards) { - this.numberOfShards = numberOfShards; - } - - @Override - public Integer getReplicationFactor() { - return replicationFactor; - } - - @Override - public void setReplicationFactor(Integer replicationFactor) { - this.replicationFactor = replicationFactor; - } - - - public Integer getPopulateIntervalMins() { - return populateIntervalMins; - } - - void setPopulateIntervalMins(Integer populateIntervalMins) { - this.populateIntervalMins = populateIntervalMins; - } - - @Override - public String getLogType() { - return null; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java deleted file mode 100644 index 5140d287bf7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class SolrKerberosConfig { - - @Value("${logsearch.solr.jaas.file:/usr/lib/ambari-logsearch-portal/logsearch_solr_jaas.conf}") - @LogSearchPropertyDescription( - name = "logsearch.solr.jaas.file", - description = "Path of the JAAS file for Kerberos based Solr Cloud authentication.", - examples = {"/my/path/jaas_file.conf"}, - defaultValue = "/usr/lib/ambari-logsearch-portal/logsearch_solr_jaas.conf", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String jaasFile; - - @Value("${logsearch.solr.kerberos.enable:false}") - @LogSearchPropertyDescription( - name = "logsearch.solr.kerberos.enable", - description = "Enable Kerberos Authentication for Solr Cloud.", - examples = {"true", "false"}, - defaultValue = "false", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean enabled; - - public String getJaasFile() { - return jaasFile; - } - - public void setJaasFile(String jaasFile) { - this.jaasFile = jaasFile; - } - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrPropsConfig.java deleted file mode 100644 index ebb1acbadac..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrPropsConfig.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.zookeeper.data.ACL; - -import java.util.List; - -public interface SolrPropsConfig { - String getSolrUrl(); - - void setSolrUrl(String solrUrl); - - String getZkConnectString(); - - void setZkConnectString(String zkConnectString); - - String getCollection(); - - void setCollection(String collection); - - String getConfigName(); - - void setConfigName(String configName); - - Integer getNumberOfShards(); - - void setNumberOfShards(Integer numberOfShards); - - Integer getReplicationFactor(); - - void setReplicationFactor(Integer replicationFactor); - - String getSplitInterval(); - - void setSplitInterval(String splitInterval); - - List getZkAcls(); - - void setZkAcls(List zkAcls); - - String getConfigSetFolder(); - - void setConfigSetFolder(String configSetFolder); - - String getLogType(); - - boolean isSolrImplicitRouting(); - - void setSolrImplicitRouting(boolean solrImplicitRouting); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java deleted file mode 100644 index 6a0e6b11ef6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; - -@Configuration -public class SolrServiceLogPropsConfig extends SolrConnectionPropsConfig { - - @Value("${logsearch.solr.collection.service.logs:hadoop_logs}") - @LogSearchPropertyDescription( - name = "logsearch.solr.collection.service.logs", - description = "Name of Log Search service log collection.", - examples = {"hadoop_logs"}, - defaultValue = "hadoop_logs", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String collection; - - @Value("${logsearch.service.logs.split.interval.mins:none}") - @LogSearchPropertyDescription( - name = "logsearch.service.logs.split.interval.mins", - description = "Will create multiple collections and use alias. (not supported right now, use implicit routingif the value is not none)", - examples = {"none", "15"}, - defaultValue = "none", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String splitInterval; - - @Value("${logsearch.solr.service.logs.config.name:hadoop_logs}") - @LogSearchPropertyDescription( - name = "logsearch.solr.service.logs.config.name", - description = "Solr configuration name of the service log collection.", - examples = {"hadoop_logs"}, - defaultValue = "hadoop_logs", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private String configName; - - @Value("${logsearch.collection.service.logs.numshards:1}") - @LogSearchPropertyDescription( - name = "logsearch.collection.service.logs.numshards", - description = "Number of Solr shards for service log collection (bootstrapping).", - examples = {"2"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer numberOfShards; - - @Value("${logsearch.collection.service.logs.replication.factor:1}") - @LogSearchPropertyDescription( - name = "logsearch.collection.service.logs.replication.factor", - description = "Solr replication factor for service log collection (bootstrapping).", - examples = {"2"}, - defaultValue = "1", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Integer replicationFactor; - - @Override - public String getCollection() { - return collection; - } - - @Override - public void setCollection(String collection) { - this.collection = collection; - } - - @Override - public String getSplitInterval() { - return splitInterval; - } - - @Override - public void setSplitInterval(String splitInterval) { - this.splitInterval = splitInterval; - } - - @Override - public String getConfigName() { - return configName; - } - - @Override - public void setConfigName(String configName) { - this.configName = configName; - } - - @Override - public Integer getNumberOfShards() { - return numberOfShards; - } - - @Override - public void setNumberOfShards(Integer numberOfShards) { - this.numberOfShards = numberOfShards; - } - - @Override - public Integer getReplicationFactor() { - return replicationFactor; - } - - @Override - public void setReplicationFactor(Integer replicationFactor) { - this.replicationFactor = replicationFactor; - } - - @Override - public String getLogType() { - return "service"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/StaticResourceConfiguration.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/StaticResourceConfiguration.java deleted file mode 100644 index bf03aa72562..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/StaticResourceConfiguration.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.springframework.context.annotation.Configuration; -import org.springframework.web.servlet.config.annotation.EnableWebMvc; -import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; -import org.springframework.web.servlet.config.annotation.ViewControllerRegistry; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; - -@EnableWebMvc -@Configuration -public class StaticResourceConfiguration extends WebMvcConfigurerAdapter { - - private static final String[] CLASSPATH_RESOURCE_LOCATIONS = { - "classpath:/static/", "classpath:/swagger/","classpath:META-INF/resources/webjars/" - }; - - @Override - public void addResourceHandlers(ResourceHandlerRegistry registry) { - registry.addResourceHandler("/**") - .addResourceLocations(CLASSPATH_RESOURCE_LOCATIONS); - } - - @Override - public void addViewControllers(ViewControllerRegistry registry) { - registry.addViewController("/").setViewName( - "forward:/index.html"); - registry.addViewController("/docs").setViewName( - "forward:/swagger.html"); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/UIMappingConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/UIMappingConfig.java deleted file mode 100644 index 1fb9a51d7c5..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/UIMappingConfig.java +++ /dev/null @@ -1,468 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf; - -import org.apache.ambari.logsearch.config.api.LogSearchPropertyDescription; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -import javax.annotation.PostConstruct; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_COMPONENT_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_COMMON_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_EXCLUDES_COMMON_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_EXCLUDES_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_FALLBACK_PREFIX_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_FALLBACK_SUFFIX_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_FILTERABLE_EXCLUDES_COMMON_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_FILTERABLE_EXCLUDES_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_VISIBLE_COMMON_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.AUDIT_FIELD_VISIBLE_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_PROPERTIES_FILE; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_FALLBACK_PREFIX_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_FALLBACK_SUFFIX_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_FILTERABLE_EXLUDE_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_GROUP_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_COMPONENT_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_LABELS_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_EXCLUDES_DEFAULTS; -import static org.apache.ambari.logsearch.common.LogSearchConstants.SERVICE_FIELD_VISIBLE_DEFAULTS; - -@Configuration -public class UIMappingConfig { - - @Value("#{propertiesSplitter.parseMap('${logsearch.web.service_logs.group.labels:" + SERVICE_GROUP_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.group.labels", - description = "Map of serivce group labels", - examples = {"ambari:Ambari,yarn:YARN"}, - defaultValue = SERVICE_GROUP_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map serviceGroupLabels; - - @Value("#{propertiesSplitter.parseMap('${logsearch.web.service_logs.component.labels:" + SERVICE_COMPONENT_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.component.labels", - description = "Map of serivce component labels.", - examples = {"ambari_agent:Ambari Agent,ambari_server:Ambari Servcer"}, - defaultValue = SERVICE_COMPONENT_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map serviceComponentLabels; - - @Value("#{propertiesSplitter.parseMap('${logsearch.web.service_logs.field.labels:" + SERVICE_FIELD_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.field.labels", - description = "Map of serivce field labels.", - examples = {"log_message:Message,ip:IP Address"}, - defaultValue = SERVICE_FIELD_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map serviceFieldLabels; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.service_logs.field.excludes:" + SERVICE_FIELD_EXCLUDES_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.field.excludes", - description = "List of fields that will be excluded from metadata schema responses.", - examples = {"seq_num,tag"}, - defaultValue = SERVICE_FIELD_EXCLUDES_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List serviceFieldExcludeList; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.service_logs.field.visible:" + SERVICE_FIELD_VISIBLE_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.field.visible", - description = "List of fields that will be displayed by default on the UI.", - examples = {"log_message,path,logtime"}, - defaultValue = SERVICE_FIELD_VISIBLE_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List serviceFieldVisibleList; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.service_logs.field.filterable.excludes:" + SERVICE_FIELD_FILTERABLE_EXLUDE_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.service_logs.field.filterable.excludes", - description = "List of fields that will be excluded from filter selection on the UI.", - examples = {"path,method,logger_name"}, - defaultValue = SERVICE_FIELD_FILTERABLE_EXLUDE_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List serviceFieldFilterableExcludesList; - - @Value("#{propertiesSplitter.parseMap('${logsearch.web.audit_logs.component.labels:" + AUDIT_COMPONENT_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.component.labels", - description = "Map of component component labels.", - examples = {"ambari:Ambari,RangerAudit:ranger"}, - defaultValue = AUDIT_COMPONENT_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map auditComponentLabels; - - @Value("#{propertiesSplitter.parseMapInMap('${logsearch.web.audit_logs.field.labels:" + AUDIT_FIELD_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.labels", - description = "Map of fields (key-value pairs) labels for different component types.", - examples = {"ambari#reqUser:Ambari User,ws_response:Response;RangerAudit#reqUser:Req User"}, - defaultValue = AUDIT_FIELD_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map> auditFieldLabels; - - @Value("#{propertiesSplitter.parseMap('${logsearch.web.audit_logs.field.common.labels:" + AUDIT_FIELD_COMMON_LABELS_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.common.labels", - description = "Map of fields labels for audits (common).", - examples = {"reqUser:Req User,resp:Response"}, - defaultValue = AUDIT_FIELD_COMMON_LABELS_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map auditFieldCommonLabels; - - @Value("#{propertiesSplitter.parseListInMap('${logsearch.web.audit_logs.field.visible:" + AUDIT_FIELD_VISIBLE_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.visible", - description = "List of fields that will be displayed by default on the UI for different audit components.", - examples = {"ambari:reqUser,resp;RangerAudit:reqUser,repo"}, - defaultValue = AUDIT_FIELD_VISIBLE_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map> auditFieldVisibleleMap; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.audit_logs.field.common.visible:" + AUDIT_FIELD_VISIBLE_COMMON_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.common.visible", - description = "List of fields that will be displayed by default on the UI for every audit components.", - examples = {"reqUser,resp"}, - defaultValue = AUDIT_FIELD_VISIBLE_COMMON_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List auditFieldCommonVisibleList; - - @Value("#{propertiesSplitter.parseListInMap('${logsearch.web.audit_logs.field.excludes:" + AUDIT_FIELD_EXCLUDES_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.excludes", - description = "List of fields that will be excluded from metadata schema responses for different audit components.", - examples = {"ambari:reqUser,resp,hdfs:ws_user,ws_role"}, - defaultValue = AUDIT_FIELD_EXCLUDES_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map> auditFieldExcludeMap; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.audit_logs.field.common.excludes:" + AUDIT_FIELD_EXCLUDES_COMMON_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.common.excludes", - description = "List of fields that will be excluded from metadata schema responses for every audit components.", - examples = {"reqUser,resp,tag_str"}, - defaultValue = AUDIT_FIELD_EXCLUDES_COMMON_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List auditFieldCommonExcludeList; - - @Value("#{propertiesSplitter.parseListInMap('${logsearch.web.audit_logs.field.filterable.excludes:" + AUDIT_FIELD_FILTERABLE_EXCLUDES_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.filterable.excludes", - description = "List of fields that will be excluded from filter selection on the UI for different audit components.", - examples = {"ambari:tag_str,resp,tag_str;RangerAudit:path,ip"}, - defaultValue = AUDIT_FIELD_FILTERABLE_EXCLUDES_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private Map> auditFieldFilterableExcludeMap; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.audit_logs.field.common.filterable.common.excludes:" + AUDIT_FIELD_FILTERABLE_EXCLUDES_COMMON_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.common.filterable.common.excludes", - description = "List of fields that will be excluded from filter selection on the UI for every audit components.", - examples = {"tag_str,resp,tag_str"}, - defaultValue = AUDIT_FIELD_FILTERABLE_EXCLUDES_COMMON_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List auditFieldCommonFilterableExcludeList; - - @Value("${logsearch.web.labels.fallback.enabled:true}") - @LogSearchPropertyDescription( - name = "logsearch.web.audit_logs.field.filterable.excludes", - description = "Enable label fallback. (replace _ with spaces and capitalize properly)", - examples = {"false"}, - defaultValue = "true", - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private boolean labelFallbackEnabled; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.labels.service_logs.field.fallback.prefixes:" + SERVICE_FIELD_FALLBACK_PREFIX_DEFAULTS +"}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.labels.service_logs.field.fallback.prefixes", - description = "List of prefixes that should be removed during fallback of service field labels.", - examples = {"ws_,std_,sdi_"}, - defaultValue = SERVICE_FIELD_FALLBACK_PREFIX_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List serviceFieldFallbackPrefixes; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.labels.audit_logs.field.fallback.prefixes:" + AUDIT_FIELD_FALLBACK_PREFIX_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.labels.service_logs.field.fallback.prefixes", - description = "List of prefixes that should be removed during fallback of audit field labels.", - examples = {"ws_,std_,sdi_"}, - defaultValue = AUDIT_FIELD_FALLBACK_PREFIX_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List auditFieldFallbackPrefixes; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.labels.service_logs.field.fallback.suffixes:" + SERVICE_FIELD_FALLBACK_PREFIX_DEFAULTS +"}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.labels.service_logs.field.fallback.suffixes", - description = "List of suffixes that should be removed during fallback of service field labels.", - examples = {"_i,_l,_s,_b"}, - defaultValue = SERVICE_FIELD_FALLBACK_SUFFIX_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List serviceFieldFallbackSuffixes; - - @Value("#{propertiesSplitter.parseList('${logsearch.web.labels.audit_logs.field.fallback.suffixes:" + AUDIT_FIELD_FALLBACK_PREFIX_DEFAULTS + "}')}") - @LogSearchPropertyDescription( - name = "logsearch.web.labels.service_logs.field.fallback.suffixes", - description = "List of suffixes that should be removed during fallback of audit field labels.", - examples = {"_i,_l,_s,_b"}, - defaultValue = AUDIT_FIELD_FALLBACK_SUFFIX_DEFAULTS, - sources = {LOGSEARCH_PROPERTIES_FILE} - ) - private List auditFieldFallbackSuffixes; - - private final Map> mergedAuditFieldLabelMap = new HashMap<>(); - - private final Map> mergedAuditFieldVisibleMap = new HashMap<>(); - - private final Map> mergedAuditFieldExcludeMap = new HashMap<>(); - - private final Map> mergedAuditFieldFilterableExcludesMap = new HashMap<>(); - - public Map getServiceGroupLabels() { - return serviceGroupLabels; - } - - public void setServiceGroupLabels(Map serviceGroupLabels) { - this.serviceGroupLabels = serviceGroupLabels; - } - - public Map getServiceComponentLabels() { - return serviceComponentLabels; - } - - public void setServiceComponentLabels(Map serviceComponentLabels) { - this.serviceComponentLabels = serviceComponentLabels; - } - - public Map getAuditComponentLabels() { - return auditComponentLabels; - } - - public void setAuditComponentLabels(Map auditComponentLabels) { - this.auditComponentLabels = auditComponentLabels; - } - - public Map getServiceFieldLabels() { - return serviceFieldLabels; - } - - public void setServiceFieldLabels(Map serviceFieldLabels) { - this.serviceFieldLabels = serviceFieldLabels; - } - - public Map> getAuditFieldLabels() { - return auditFieldLabels; - } - - public void setAuditFieldLabels(Map> auditFieldLabels) { - this.auditFieldLabels = auditFieldLabels; - } - - public List getServiceFieldExcludeList() { - return serviceFieldExcludeList; - } - - public void setServiceFieldExcludeList(List serviceFieldExcludeList) { - this.serviceFieldExcludeList = serviceFieldExcludeList; - } - - public List getServiceFieldVisibleList() { - return serviceFieldVisibleList; - } - - public void setServiceFieldVisibleList(List serviceFieldVisibleList) { - this.serviceFieldVisibleList = serviceFieldVisibleList; - } - - public Map> getAuditFieldVisibleleMap() { - return auditFieldVisibleleMap; - } - - public void setAuditFieldVisibleleMap(Map> auditFieldVisibleleMap) { - this.auditFieldVisibleleMap = auditFieldVisibleleMap; - } - - public List getAuditFieldCommonVisibleList() { - return auditFieldCommonVisibleList; - } - - public void setAuditFieldCommonVisibleList(List auditFieldCommonVisibleList) { - this.auditFieldCommonVisibleList = auditFieldCommonVisibleList; - } - - public Map> getAuditFieldExcludeMap() { - return auditFieldExcludeMap; - } - - public void setAuditFieldExcludeMap(Map> auditFieldExcludeMap) { - this.auditFieldExcludeMap = auditFieldExcludeMap; - } - - public List getAuditFieldCommonExcludeList() { - return auditFieldCommonExcludeList; - } - - public void setAuditFieldCommonExcludeList(List auditFieldCommonExcludeList) { - this.auditFieldCommonExcludeList = auditFieldCommonExcludeList; - } - - public Map getAuditFieldCommonLabels() { - return auditFieldCommonLabels; - } - - public void setAuditFieldCommonLabels(Map auditFieldCommonLabels) { - this.auditFieldCommonLabels = auditFieldCommonLabels; - } - - public boolean isLabelFallbackEnabled() { - return labelFallbackEnabled; - } - - public void setLabelFallbackEnabled(boolean labelFallbackEnabled) { - this.labelFallbackEnabled = labelFallbackEnabled; - } - - public List getServiceFieldFallbackPrefixes() { - return serviceFieldFallbackPrefixes; - } - - public void setServiceFieldFallbackPrefixes(List serviceFieldFallbackPrefixes) { - this.serviceFieldFallbackPrefixes = serviceFieldFallbackPrefixes; - } - - public List getAuditFieldFallbackPrefixes() { - return auditFieldFallbackPrefixes; - } - - public void setAuditFieldFallbackPrefixes(List auditFieldFallbackPrefixes) { - this.auditFieldFallbackPrefixes = auditFieldFallbackPrefixes; - } - - public List getServiceFieldFilterableExcludesList() { - return serviceFieldFilterableExcludesList; - } - - public void setServiceFieldFilterableExcludesList(List serviceFieldFilterableExcludesList) { - this.serviceFieldFilterableExcludesList = serviceFieldFilterableExcludesList; - } - - public List getServiceFieldFallbackSuffixes() { - return serviceFieldFallbackSuffixes; - } - - public void setServiceFieldFallbackSuffixes(List serviceFieldFallbackSuffixes) { - this.serviceFieldFallbackSuffixes = serviceFieldFallbackSuffixes; - } - - public List getAuditFieldFallbackSuffixes() { - return auditFieldFallbackSuffixes; - } - - public void setAuditFieldFallbackSuffixes(List auditFieldFallbackSuffixes) { - this.auditFieldFallbackSuffixes = auditFieldFallbackSuffixes; - } - - public Map> getMergedAuditFieldVisibleMap() { - return mergedAuditFieldVisibleMap; - } - - public Map> getMergedAuditFieldExcludeMap() { - return mergedAuditFieldExcludeMap; - } - - public Map> getMergedAuditFieldLabelMap() { - return mergedAuditFieldLabelMap; - } - - public Map> getMergedAuditFieldFilterableExcludesMap() { - return mergedAuditFieldFilterableExcludesMap; - } - - @PostConstruct - public void init() { - mergeCommonAndSpecMapValues(auditFieldLabels, auditFieldCommonLabels, mergedAuditFieldLabelMap); - mergeCommonAndSpecListValues(auditFieldVisibleleMap, auditFieldCommonVisibleList, mergedAuditFieldVisibleMap); - mergeCommonAndSpecListValues(auditFieldExcludeMap, auditFieldCommonExcludeList, mergedAuditFieldExcludeMap); - mergeCommonAndSpecListValues(auditFieldFilterableExcludeMap, auditFieldCommonFilterableExcludeList, mergedAuditFieldFilterableExcludesMap); - } - - private void mergeCommonAndSpecListValues(Map> specMap, List commonList, - Map> mergedMap) { - Set componentFilterableKeys = specMap.keySet(); - for (String component : componentFilterableKeys) { - List specAuditDataList = specMap.get(component); - List mergedDataList = new ArrayList<>(); - if (specAuditDataList != null) { - mergedDataList.addAll(specAuditDataList); - for (String commonData : commonList) { - if (!specAuditDataList.contains(commonData)) { - mergedDataList.add(commonData); - } - } - mergedMap.put(component, mergedDataList); - } - } - } - - private void mergeCommonAndSpecMapValues(Map> specMap, Map commonMap, - Map> mergedMap) { - Set componentFilterableKeys = specMap.keySet(); - for (String component : componentFilterableKeys) { - Map specAuditDataMap = specMap.get(component); - Map mergedAuditDataMap = new HashMap<>(); - if (specAuditDataMap != null) { - mergedAuditDataMap.putAll(specAuditDataMap); - for (Map.Entry entry : commonMap.entrySet()) { - if (!specAuditDataMap.containsKey(entry.getKey())) { - mergedAuditDataMap.put(entry.getKey(), entry.getValue()); - } - } - mergedMap.put(component, mergedAuditDataMap); - } - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogLevelFilterManagerState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogLevelFilterManagerState.java deleted file mode 100644 index afd5313d84a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogLevelFilterManagerState.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf.global; - -import javax.inject.Named; - -@Named -public class LogLevelFilterManagerState { - - private volatile boolean logLevelFilterManagerIsReady; - - public boolean isLogLevelFilterManagerIsReady() { - return logLevelFilterManagerIsReady; - } - - public void setLogLevelFilterManagerIsReady(boolean logLevelFilterManagerIsReady) { - this.logLevelFilterManagerIsReady = logLevelFilterManagerIsReady; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java deleted file mode 100644 index 7ca701d119f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.conf.global; - -import javax.inject.Named; - -@Named -public class LogSearchConfigState { - private volatile boolean logSearchConfigAvailable; - - public boolean isLogSearchConfigAvailable() { - return logSearchConfigAvailable; - } - - public void setLogSearchConfigAvailable(boolean logSearchConfigAvailable) { - this.logSearchConfigAvailable = logSearchConfigAvailable; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrAuditLogsState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrAuditLogsState.java deleted file mode 100644 index 546a5dc3a2e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrAuditLogsState.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf.global; - -import javax.inject.Named; - -@Named -public class SolrAuditLogsState implements SolrCollectionState { - - private volatile boolean znodeReady; - private volatile boolean solrCollectionReady; - private volatile boolean solrAliasReady; - private volatile boolean configurationUploaded; - - @Override - public boolean isZnodeReady() { - return znodeReady; - } - - @Override - public void setZnodeReady(boolean znodeAvailable) { - this.znodeReady = znodeAvailable; - } - - @Override - public boolean isSolrCollectionReady() { - return solrCollectionReady; - } - - @Override - public void setSolrCollectionReady(boolean solrCollectionReady) { - this.solrCollectionReady = solrCollectionReady; - } - - @Override - public boolean isConfigurationUploaded() { - return configurationUploaded; - } - - @Override - public void setConfigurationUploaded(boolean configurationUploaded) { - this.configurationUploaded = configurationUploaded; - } - - public boolean isSolrAliasReady() { - return solrAliasReady; - } - - public void setSolrAliasReady(boolean solrAliasReady) { - this.solrAliasReady = solrAliasReady; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrCollectionState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrCollectionState.java deleted file mode 100644 index 5885611aa2a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrCollectionState.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf.global; - -public interface SolrCollectionState { - boolean isZnodeReady(); - - void setZnodeReady(boolean znodeAvailable); - - boolean isSolrCollectionReady(); - - void setSolrCollectionReady(boolean solrCollectionCreated); - - boolean isConfigurationUploaded(); - - void setConfigurationUploaded(boolean configurationUploaded); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrEventHistoryState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrEventHistoryState.java deleted file mode 100644 index c31e69e34bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrEventHistoryState.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf.global; - -import javax.inject.Named; - -@Named -public class SolrEventHistoryState implements SolrCollectionState { - - private volatile boolean znodeReady; - private volatile boolean solrCollectionReady; - private volatile boolean configurationUploaded; - - @Override - public boolean isZnodeReady() { - return znodeReady; - } - - @Override - public void setZnodeReady(boolean znodeAvailable) { - this.znodeReady = znodeAvailable; - } - - @Override - public boolean isSolrCollectionReady() { - return solrCollectionReady; - } - - @Override - public void setSolrCollectionReady(boolean solrCollectionReady) { - this.solrCollectionReady = solrCollectionReady; - } - - @Override - public boolean isConfigurationUploaded() { - return configurationUploaded; - } - - @Override - public void setConfigurationUploaded(boolean configurationUploaded) { - this.configurationUploaded = configurationUploaded; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrServiceLogsState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrServiceLogsState.java deleted file mode 100644 index 60eafc5ce9d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/SolrServiceLogsState.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.conf.global; - -import javax.inject.Named; - -@Named -public class SolrServiceLogsState implements SolrCollectionState { - - private volatile boolean znodeReady; - private volatile boolean solrCollectionReady; - private volatile boolean configurationUploaded; - - @Override - public boolean isZnodeReady() { - return znodeReady; - } - - @Override - public void setZnodeReady(boolean znodeAvailable) { - this.znodeReady = znodeAvailable; - } - - @Override - public boolean isSolrCollectionReady() { - return solrCollectionReady; - } - - @Override - public void setSolrCollectionReady(boolean solrCollectionReady) { - this.solrCollectionReady = solrCollectionReady; - } - - @Override - public boolean isConfigurationUploaded() { - return configurationUploaded; - } - - @Override - public void setConfigurationUploaded(boolean configurationUploaded) { - this.configurationUploaded = configurationUploaded; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java deleted file mode 100644 index 141299cc240..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.configurer; - -interface Configurer { - void start(); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogLevelManagerFilterConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogLevelManagerFilterConfigurer.java deleted file mode 100644 index 5efca853e82..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogLevelManagerFilterConfigurer.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.configurer; - -import org.apache.ambari.logsearch.conf.LogSearchConfigApiConfig; -import org.apache.ambari.logsearch.conf.LogSearchConfigMapHolder; -import org.apache.ambari.logsearch.conf.global.LogLevelFilterManagerState; -import org.apache.ambari.logsearch.config.solr.LogLevelFilterManagerSolr; -import org.apache.ambari.logsearch.config.zookeeper.LogLevelFilterManagerZK; -import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZKHelper; -import org.apache.ambari.logsearch.dao.EventHistorySolrDao; -import org.apache.curator.framework.CuratorFramework; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class LogLevelManagerFilterConfigurer implements Configurer { - private static final Logger logger = LoggerFactory.getLogger(LogLevelManagerFilterConfigurer.class); - - private static final int RETRY_INTERVAL_SECONDS = 10; - - private final EventHistorySolrDao eventHistorySolrDao; - private final LogLevelFilterManagerState logLevelFilterManagerState; - private final LogSearchConfigApiConfig logSearchConfigApiConfig; - private final LogSearchConfigMapHolder logSearchConfigMapHolder; - - private LogLevelFilterManagerSolr logLevelFilterManagerSolr; - private LogLevelFilterManagerZK logLevelFilterManagerZK; - - @Inject - public LogLevelManagerFilterConfigurer(final LogSearchConfigApiConfig logSearchConfigApiConfig, - final LogLevelFilterManagerState logLevelFilterManagerState, - final EventHistorySolrDao eventHistorySolrDao, - final LogSearchConfigMapHolder logSearchConfigMapHolder) { - this.logSearchConfigApiConfig = logSearchConfigApiConfig; - this.logLevelFilterManagerState = logLevelFilterManagerState; - this.eventHistorySolrDao = eventHistorySolrDao; - this.logSearchConfigMapHolder = logSearchConfigMapHolder; - } - - @PostConstruct - @Override - public void start() { - Thread setupThread = new Thread("setup_solr_loglevel_filter_manager") { - @Override - public void run() { - logger.info("Start initializing log level filter manager ..."); - if (logSearchConfigApiConfig.isSolrFilterStorage() || logSearchConfigApiConfig.isZkFilterStorage()) { - while (true) { - try { - if (logSearchConfigApiConfig.isSolrFilterStorage()) { - if (eventHistorySolrDao.getSolrCollectionState().isSolrCollectionReady()) { - setLogLevelFilterManagerSolr(new LogLevelFilterManagerSolr(eventHistorySolrDao.getSolrClient())); - logLevelFilterManagerState.setLogLevelFilterManagerIsReady(true); - logger.info("Log level filter manager (solr) successfully initialized."); - break; - } - } - if (logSearchConfigApiConfig.isZkFilterStorage()) { - CuratorFramework client = LogSearchConfigZKHelper.createZKClient(logSearchConfigMapHolder.getLogsearchProperties()); - client.start(); - if (client.checkExists().forPath("/") == null) { - client.create().creatingParentContainersIfNeeded().forPath("/"); - } - LogLevelFilterManagerZK logLevelFilterManagerZK = new LogLevelFilterManagerZK( - logSearchConfigMapHolder.getLogsearchProperties(), client); - setLogLevelFilterManagerZK(logLevelFilterManagerZK); - logLevelFilterManagerState.setLogLevelFilterManagerIsReady(true); - logger.info("Log level filter manager (zookeeper) successfully initialized."); - break; - } - } catch (Exception ex) { - logger.warn("Could not initialize log level Solr filter manager, going to sleep for " + RETRY_INTERVAL_SECONDS + " seconds ", ex); - } - try { - Thread.sleep(RETRY_INTERVAL_SECONDS * 1000); - } catch (Exception e) {/* ignore */} - } - } else { - logger.info("Solr is not used as a log level filter storage."); - } - } - }; - setupThread.setDaemon(true); - setupThread.start(); - } - - public LogLevelFilterManagerSolr getLogLevelFilterManagerSolr() { - return logLevelFilterManagerSolr; - } - - public void setLogLevelFilterManagerSolr(final LogLevelFilterManagerSolr logLevelFilterManagerSolr) { - this.logLevelFilterManagerSolr = logLevelFilterManagerSolr; - } - - public LogLevelFilterManagerZK getLogLevelFilterManagerZK() { - return logLevelFilterManagerZK; - } - - public void setLogLevelFilterManagerZK(LogLevelFilterManagerZK logLevelFilterManagerZK) { - this.logLevelFilterManagerZK = logLevelFilterManagerZK; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java deleted file mode 100644 index fc71409d872..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.configurer; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.conf.LogSearchConfigApiConfig; -import org.apache.ambari.logsearch.conf.LogSearchConfigMapHolder; -import org.apache.ambari.logsearch.conf.global.LogSearchConfigState; -import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory; -import org.apache.ambari.logsearch.config.api.LogSearchConfigServer; -import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigServerZK; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Named -public class LogSearchConfigConfigurer implements Configurer { - private static final Logger logger = LoggerFactory.getLogger(LogSearchConfigConfigurer.class); - - private static final int RETRY_INTERVAL_SECONDS = 10; - - private LogSearchConfigServer logSearchConfig; - public LogSearchConfigServer getConfig() { - return logSearchConfig; - } - - @Inject - private LogSearchConfigState logSearchConfigState; - - @Inject - private LogSearchConfigMapHolder logSearchConfigMapHolder; - - @Inject - private LogSearchConfigApiConfig logSearchConfigApiConfig; - - @PostConstruct - @Override - public void start() { - Thread setupThread = new Thread("setup_logsearch_config") { - @Override - public void run() { - logger.info("Started thread to set up log search config"); - while (true) { - try { - if (logSearchConfigApiConfig.isConfigApiEnabled()) { - logSearchConfig = LogSearchConfigFactory.createLogSearchConfigServer(logSearchConfigMapHolder.getLogsearchProperties(), - LogSearchConfigServerZK.class); - logSearchConfigState.setLogSearchConfigAvailable(true); - } else { - logger.info("Config API is disabled. Shipper configs won't be accessible from the Rest API."); - } - break; - } catch (Exception e) { - logger.warn("Could not initialize Log Search config, going to sleep for " + RETRY_INTERVAL_SECONDS + " seconds ", e); - try { Thread.sleep(RETRY_INTERVAL_SECONDS * 1000); } catch (Exception e2) {/* ignore */} - } - } - } - }; - setupThread.setDaemon(true); - setupThread.start(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java deleted file mode 100644 index 679c1f53dbb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.configurer; - -import org.apache.ambari.logsearch.conf.SolrAuditLogPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrAuditLogsState; -import org.apache.ambari.logsearch.dao.AuditSolrDao; -import org.apache.ambari.logsearch.handler.ListCollectionHandler; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -public class SolrAuditAliasConfigurer implements Configurer { - - private static final Logger LOG = LoggerFactory.getLogger(SolrAuditAliasConfigurer.class); - - private static final int ALIAS_SETUP_RETRY_SECOND = 30 * 60; - - private final AuditSolrDao auditSolrDao; - - public SolrAuditAliasConfigurer(final AuditSolrDao auditSolrDao) { - this.auditSolrDao = auditSolrDao; - } - - @Override - public void start() { - final SolrAuditLogPropsConfig solrPropsConfig = (SolrAuditLogPropsConfig) auditSolrDao.getSolrPropsConfig(); - final SolrAuditLogsState state = (SolrAuditLogsState) auditSolrDao.getSolrCollectionState(); - final Collection collectionListIn = - Arrays.asList(solrPropsConfig.getCollection(), solrPropsConfig.getRangerCollection().trim()); - - if (solrPropsConfig.getAliasNameIn() == null || collectionListIn.size() == 0) { - LOG.info("Will not create alias {} for {}", solrPropsConfig.getAliasNameIn(), collectionListIn.toString()); - return; - } - - LOG.info("setupAlias " + solrPropsConfig.getAliasNameIn() + " for " + collectionListIn.toString()); - // Start a background thread to do setup - Thread setupThread = new Thread("setup_alias_" + solrPropsConfig.getAliasNameIn()) { - @Override - public void run() { - LOG.info("Started monitoring thread to check availability of Solr server. alias=" + solrPropsConfig.getAliasNameIn() + - ", collections=" + collectionListIn.toString()); - int retryCount = 0; - while (true) { - if (state.isSolrCollectionReady()) { - try { - CloudSolrClient solrClient = auditSolrDao.getSolrClient(); - int count = createAlias(solrClient, solrPropsConfig.getAliasNameIn(), collectionListIn); - if (count > 0) { - solrClient.setDefaultCollection(solrPropsConfig.getAliasNameIn()); - if (count == collectionListIn.size()) { - LOG.info("Setup for alias " + solrPropsConfig.getAliasNameIn() + " is successful. Exiting setup retry thread. " + - "Collections=" + collectionListIn); - state.setSolrAliasReady(true); - break; - } - } else { - LOG.warn("Not able to create alias=" + solrPropsConfig.getAliasNameIn() + ", retryCount=" + retryCount); - } - } catch (Exception e) { - LOG.error("Error setting up alias=" + solrPropsConfig.getAliasNameIn(), e); - } - } - try { - Thread.sleep(ALIAS_SETUP_RETRY_SECOND * 1000); - } catch (InterruptedException sleepInterrupted) { - LOG.info("Sleep interrupted while setting up alias " + solrPropsConfig.getAliasNameIn()); - break; - } - retryCount++; - } - } - }; - setupThread.setDaemon(true); - setupThread.start(); - } - - private int createAlias(final CloudSolrClient solrClient, String aliasNameIn, Collection collectionListIn) - throws SolrServerException, IOException { - List collectionToAdd = new ArrayList<>(); - try { - collectionToAdd = new ListCollectionHandler().handle(solrClient, null); - } catch (Exception e) { - LOG.error("Invalid state during getting collections for creating alias"); - } - collectionToAdd.retainAll(collectionListIn); - - String collectionsCSV = null; - if (!collectionToAdd.isEmpty()) { - collectionsCSV = StringUtils.join(collectionToAdd, ','); - CollectionAdminRequest.CreateAlias aliasCreateRequest = CollectionAdminRequest.createAlias(aliasNameIn, collectionsCSV); - CollectionAdminResponse createResponse = aliasCreateRequest.process(solrClient); - if (createResponse.getStatus() != 0) { - LOG.error("Error creating alias. alias=" + aliasNameIn + ", collectionList=" + collectionsCSV - + ", response=" + createResponse); - return 0; - } - } - if (collectionToAdd.size() == collectionListIn.size()) { - LOG.info("Created alias for all collections. alias=" + aliasNameIn + ", collectionsCSV=" + collectionsCSV); - } else { - LOG.info("Created alias for " + collectionToAdd.size() + " out of " + collectionListIn.size() + " collections. " + - "alias=" + aliasNameIn + ", collectionsCSV=" + collectionsCSV); - } - return collectionToAdd.size(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java deleted file mode 100644 index 96257366453..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.configurer; - -import org.apache.ambari.logsearch.conf.SolrClientsHolder; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.dao.SolrDaoBase; -import org.apache.ambari.logsearch.handler.ACLHandler; -import org.apache.ambari.logsearch.handler.CreateCollectionHandler; -import org.apache.ambari.logsearch.handler.ListCollectionHandler; -import org.apache.ambari.logsearch.handler.ReloadCollectionHandler; -import org.apache.ambari.logsearch.handler.UploadConfigurationHandler; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.zookeeper.WatchedEvent; -import org.apache.zookeeper.Watcher; -import org.apache.zookeeper.ZooKeeper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.data.solr.core.SolrTemplate; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -public class SolrCollectionConfigurer implements Configurer { - - private Logger LOG = LoggerFactory.getLogger(SolrCollectionConfigurer.class); - - private static final int SETUP_RETRY_SECOND = 10; - private static final int SESSION_TIMEOUT = 15000; - private static final int CONNECTION_TIMEOUT = 30000; - private static final String JAVA_SECURITY_AUTH_LOGIN_CONFIG = "java.security.auth.login.config"; - private static final String SOLR_HTTPCLIENT_BUILDER_FACTORY = "solr.httpclient.builder.factory"; - - private final SolrDaoBase solrDaoBase; - private final boolean hasEnumConfig; // enumConfig.xml for solr collection - private final SolrClientsHolder solrClientsHolder; - private final SolrClientsHolder.CollectionType collectionType; - - public SolrCollectionConfigurer(SolrDaoBase solrDaoBase, boolean hasEnumConfig, - SolrClientsHolder solrClientsHolder, SolrClientsHolder.CollectionType collectionType) { - this.solrDaoBase = solrDaoBase; - this.hasEnumConfig = hasEnumConfig; - this.solrClientsHolder = solrClientsHolder; - this.collectionType = collectionType; - } - - @Override - public void start() { - setupSecurity(); - final SolrPropsConfig solrPropsConfig = solrDaoBase.getSolrPropsConfig(); - final SolrCollectionState state = solrDaoBase.getSolrCollectionState(); - final String separator = FileSystems.getDefault().getSeparator(); - final String localConfigSetLocation = String.format("%s%s%s%sconf", solrPropsConfig.getConfigSetFolder(), separator, - solrPropsConfig.getConfigName(), separator); - final File configSetFolder = new File(localConfigSetLocation); - if (!configSetFolder.exists()) { // show exception only once during startup - throw new RuntimeException(String.format("Cannot load config set location: %s", localConfigSetLocation)); - } - Thread setupThread = new Thread("setup_collection_" + solrPropsConfig.getCollection()) { - @Override - public void run() { - LOG.info("Started monitoring thread to check availability of Solr server. collection=" + solrPropsConfig.getCollection()); - while (!stopSetupCondition(state)) { - int retryCount = 0; - try { - retryCount++; - Thread.sleep(SETUP_RETRY_SECOND * 1000); - openZkConnectionAndUpdateStatus(state, solrPropsConfig); - if (solrDaoBase.getSolrTemplate() == null) { - solrDaoBase.setSolrTemplate(createSolrTemplate(solrPropsConfig)); - } - CloudSolrClient cloudSolrClient = (CloudSolrClient) solrClientsHolder.getSolrClient(collectionType); - boolean reloadCollectionNeeded = uploadConfigurationsIfNeeded(cloudSolrClient, configSetFolder, state, solrPropsConfig); - checkSolrStatus(cloudSolrClient); - createCollectionsIfNeeded(cloudSolrClient, state, solrPropsConfig, reloadCollectionNeeded); - } catch (Exception e) { - retryCount++; - LOG.error("Error setting collection. collection=" + solrPropsConfig.getCollection() + ", retryCount=" + retryCount, e); - } - } - } - }; - setupThread.setDaemon(true); - setupThread.start(); - } - - private boolean uploadConfigurationsIfNeeded(CloudSolrClient cloudSolrClient, File configSetFolder, SolrCollectionState state, SolrPropsConfig solrPropsConfig) throws Exception { - boolean reloadCollectionNeeded = new UploadConfigurationHandler(configSetFolder, hasEnumConfig).handle(cloudSolrClient, solrPropsConfig); - if (!state.isConfigurationUploaded()) { - state.setConfigurationUploaded(true); - } - return reloadCollectionNeeded; - } - - public boolean stopSetupCondition(SolrCollectionState state) { - return state.isSolrCollectionReady(); - } - - public SolrTemplate createSolrTemplate(SolrPropsConfig solrPropsConfig) { - SolrClient solrClient = createClient( - solrPropsConfig.getSolrUrl(), - solrPropsConfig.getZkConnectString(), - solrPropsConfig.getCollection()); - solrClientsHolder.setSolrClient(solrClient, collectionType); - return new SolrTemplate(solrClient); - } - - private CloudSolrClient createClient(String solrUrl, String zookeeperConnectString, String defaultCollection) { - if (StringUtils.isNotEmpty(zookeeperConnectString)) { - CloudSolrClient cloudSolrClient = new CloudSolrClient.Builder().withZkHost(zookeeperConnectString).build(); - cloudSolrClient.setDefaultCollection(defaultCollection); - return cloudSolrClient; - } else if (StringUtils.isNotEmpty(solrUrl)) { - throw new UnsupportedOperationException("Currently only cloud mode is supported. Set zookeeper connect string."); - } - throw new IllegalStateException( - "Solr url or zookeeper connection string is missing. collection: " + defaultCollection); - } - - private void setupSecurity() { - boolean securityEnabled = solrDaoBase.getSolrKerberosConfig().isEnabled(); - if (securityEnabled) { - String javaSecurityConfig = System.getProperty(JAVA_SECURITY_AUTH_LOGIN_CONFIG); - String solrHttpBuilderFactory = System.getProperty(SOLR_HTTPCLIENT_BUILDER_FACTORY); - LOG.info("setupSecurity() called for kerberos configuration, jaas file: {}, solr http client factory: {}", - javaSecurityConfig, solrHttpBuilderFactory); - } - } - - private void openZkConnectionAndUpdateStatus(final SolrCollectionState state, final SolrPropsConfig solrPropsConfig) throws Exception { - ZooKeeper zkClient = null; - try { - LOG.info("Checking that Znode ('{}') is ready or not... ", solrPropsConfig.getZkConnectString()); - zkClient = openZookeeperConnection(solrPropsConfig); - if (!state.isZnodeReady()) { - LOG.info("State change: Zookeeper ZNode is available for {}", solrPropsConfig.getZkConnectString()); - state.setZnodeReady(true); - } - } catch (Exception e) { - LOG.error("Error occurred during the creation of zk client (connection string: {})", solrPropsConfig.getZkConnectString()); - throw e; - } finally { - try { - if (zkClient != null) { - zkClient.close(); - } - } catch (Exception e) { - LOG.error("Could not close zk connection properly.", e); - } - } - } - - private ZooKeeper openZookeeperConnection(final SolrPropsConfig solrPropsConfig) throws InterruptedException, IOException { - final CountDownLatch connSignal = new CountDownLatch(1); - ZooKeeper zooKeeper = new ZooKeeper(solrPropsConfig.getZkConnectString(), SESSION_TIMEOUT, new Watcher() { - public void process(WatchedEvent event) { - if (event.getState() == Event.KeeperState.SyncConnected) { - connSignal.countDown(); - } - } - }); - connSignal.await(CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS); - return zooKeeper; - } - - private boolean checkSolrStatus(CloudSolrClient cloudSolrClient) { - int waitDurationMS = 3 * 60 * 1000; - boolean status = false; - try { - long beginTimeMS = System.currentTimeMillis(); - long waitIntervalMS = 2000; - int pingCount = 0; - while (true) { - pingCount++; - try { - List collectionList = new ListCollectionHandler().handle(cloudSolrClient, null); - if (collectionList != null) { - LOG.info("checkSolrStatus(): Solr getCollections() is success. collectionList=" + collectionList); - status = true; - break; - } - } catch (Exception ex) { - LOG.error("Error while doing Solr check", ex); - } - if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) { - LOG.error("Solr is not reachable even after " + (System.currentTimeMillis() - beginTimeMS) + " ms. " + - "If you are using alias, then you might have to restart LogSearch after Solr is up and running."); - break; - } else { - LOG.warn("Solr is not not reachable yet. getCollections() attempt count=" + pingCount + ". " + - "Will sleep for " + waitIntervalMS + " ms and try again."); - } - Thread.sleep(waitIntervalMS); - - } - } catch (Throwable t) { - LOG.error("Seems Solr is not up."); - } - return status; - } - - private void createCollectionsIfNeeded(CloudSolrClient solrClient, SolrCollectionState state, SolrPropsConfig solrPropsConfig, - boolean reloadCollectionNeeded) { - try { - List allCollectionList = new ListCollectionHandler().handle(solrClient, null); - solrDaoBase.waitForLogSearchConfig(); - CreateCollectionHandler handler = new CreateCollectionHandler(allCollectionList); - boolean collectionCreated = handler.handle(solrClient, solrPropsConfig); - boolean collectionReloaded = true; - if (reloadCollectionNeeded) { - collectionReloaded = new ReloadCollectionHandler().handle(solrClient, solrPropsConfig); - } - boolean aclsUpdated = new ACLHandler().handle(solrClient, solrPropsConfig); - if (!state.isSolrCollectionReady() && collectionCreated && collectionReloaded && aclsUpdated) { - state.setSolrCollectionReady(true); - } - } catch (Exception ex) { - LOG.error("Error during creating/updating collection. collectionName=" + solrPropsConfig.getCollection(), ex); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SslConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SslConfigurer.java deleted file mode 100644 index f4e294716db..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SslConfigurer.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.configurer; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.net.ssl.SSLContext; - -import org.apache.ambari.logsearch.conf.LogSearchSslConfig; -import org.apache.ambari.logsearch.util.FileUtil; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.ArrayUtils; -import org.apache.hadoop.conf.Configuration; -import org.bouncycastle.asn1.ASN1InputStream; -import org.bouncycastle.asn1.x500.X500Name; -import org.bouncycastle.asn1.x509.AlgorithmIdentifier; -import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; -import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.bouncycastle.operator.ContentSigner; -import org.bouncycastle.operator.DefaultDigestAlgorithmIdentifierFinder; -import org.bouncycastle.operator.DefaultSignatureAlgorithmIdentifierFinder; -import org.bouncycastle.operator.OperatorCreationException; -import org.bouncycastle.operator.bc.BcContentSignerBuilder; -import org.bouncycastle.operator.bc.BcRSAContentSignerBuilder; -import org.bouncycastle.cert.X509CertificateHolder; -import org.bouncycastle.cert.X509v3CertificateBuilder; -import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; -import org.bouncycastle.crypto.params.RSAKeyParameters; -import org.eclipse.jetty.util.ssl.SslContextFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.math.BigInteger; -import java.net.InetAddress; -import java.security.InvalidKeyException; -import java.security.KeyPair; -import java.security.KeyPairGenerator; -import java.security.KeyStore; -import java.security.NoSuchAlgorithmException; -import java.security.NoSuchProviderException; -import java.security.SecureRandom; -import java.security.Security; -import java.security.SignatureException; -import java.security.cert.Certificate; -import java.security.cert.CertificateException; -import java.security.cert.CertificateFactory; -import java.security.cert.X509Certificate; -import java.security.interfaces.RSAPrivateKey; -import java.security.interfaces.RSAPublicKey; -import java.util.Date; - -import static org.apache.ambari.logsearch.conf.LogSearchSslConfig.CREDENTIAL_STORE_PROVIDER_PATH; -import static org.apache.ambari.logsearch.conf.LogSearchSslConfig.LOGSEARCH_CERT_DEFAULT_FOLDER; - -@Named -public class SslConfigurer { - private static final Logger LOG = LoggerFactory.getLogger(SslConfigurer.class); - - private static final String KEYSTORE_LOCATION_ARG = "javax.net.ssl.keyStore"; - private static final String KEYSTORE_PASSWORD_ARG = "javax.net.ssl.keyStorePassword"; - private static final String KEYSTORE_TYPE_ARG = "javax.net.ssl.keyStoreType"; - private static final String DEFAULT_KEYSTORE_TYPE = "JKS"; - private static final String TRUSTSTORE_LOCATION_ARG = "javax.net.ssl.trustStore"; - private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword"; - private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType"; - private static final String DEFAULT_TRUSTSTORE_TYPE = "JKS"; - private static final String KEYSTORE_PASSWORD_PROPERTY_NAME = "logsearch_keystore_password"; - private static final String TRUSTSTORE_PASSWORD_PROPERTY_NAME = "logsearch_truststore_password"; - private static final String KEYSTORE_PASSWORD_FILE = "ks_pass.txt"; - private static final String TRUSTSTORE_PASSWORD_FILE = "ts_pass.txt"; - - private static final String LOGSEARCH_CERT_FILENAME = "logsearch.crt"; - private static final String LOGSEARCH_KEYSTORE_FILENAME = "logsearch.jks"; - private static final String LOGSEARCH_KEYSTORE_PRIVATE_KEY = "logsearch.private.key"; - private static final String LOGSEARCH_KEYSTORE_PUBLIC_KEY = "logsearch.public.key"; - - private static final String LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD = "bigdata"; - - @Inject - private LogSearchSslConfig logSearchSslConfig; - - private String getKeyStoreLocation() { - return System.getProperty(KEYSTORE_LOCATION_ARG); - } - - private String getKeyStorePassword() { - return System.getProperty(KEYSTORE_PASSWORD_ARG); - } - - private String getKeyStoreType() { - return System.getProperty(KEYSTORE_TYPE_ARG, DEFAULT_KEYSTORE_TYPE); - } - - private String getTrustStoreLocation() { - return System.getProperty(TRUSTSTORE_LOCATION_ARG); - } - - private String getTrustStorePassword() { - return System.getProperty(TRUSTSTORE_PASSWORD_ARG); - } - - private String getTrustStoreType() { - return System.getProperty(TRUSTSTORE_TYPE_ARG, DEFAULT_TRUSTSTORE_TYPE); - } - - public boolean isKeyStoreSpecified() { - return StringUtils.isNotEmpty(getKeyStoreLocation()); - } - - private boolean isTrustStoreSpecified() { - return StringUtils.isNotEmpty(getTrustStoreLocation()); - } - - public SslContextFactory getSslContextFactory() { - SslContextFactory sslContextFactory = new SslContextFactory(); - sslContextFactory.setKeyStorePath(getKeyStoreLocation()); - sslContextFactory.setKeyStorePassword(getKeyStorePassword()); - sslContextFactory.setKeyStoreType(getKeyStoreType()); - if (isTrustStoreSpecified()) { - sslContextFactory.setTrustStorePath(getTrustStoreLocation()); - sslContextFactory.setTrustStorePassword(getTrustStorePassword()); - sslContextFactory.setTrustStoreType(getTrustStoreType()); - } - - return sslContextFactory; - } - - public SSLContext getSSLContext() { - SslContextFactory sslContextFactory = getSslContextFactory(); - - try { - sslContextFactory.start(); - return sslContextFactory.getSslContext(); - } catch (Exception e) { - LOG.error("Could not create SSL Context", e); - return null; - } finally { - try { - sslContextFactory.stop(); - } catch (Exception e) { - LOG.error("Could not stop sslContextFactory", e); - } - } - } - - private String getPasswordFromFile(String fileName) { - try { - File pwdFile = new File(LOGSEARCH_CERT_DEFAULT_FOLDER, fileName); - if (!pwdFile.exists()) { - FileUtils.writeStringToFile(pwdFile, LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD); - return LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD; - } else { - return FileUtils.readFileToString(pwdFile); - } - } catch (Exception e) { - LOG.warn("Exception occurred during read/write password file for keystore/truststore.", e); - return null; - } - } - - private String getPasswordFromCredentialStore(String propertyName) { - try { - String providerPath = logSearchSslConfig.getCredentialStoreProviderPath(); - if (StringUtils.isEmpty(providerPath)) { - return null; - } - - Configuration config = new Configuration(); - config.set(CREDENTIAL_STORE_PROVIDER_PATH, providerPath); - char[] passwordChars = config.getPassword(propertyName); - return (ArrayUtils.isNotEmpty(passwordChars)) ? new String(passwordChars) : null; - } catch (Exception e) { - LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName), e); - return null; - } - } - - private String getPassword(String propertyName, String fileName) { - String credentialStorePassword = getPasswordFromCredentialStore(propertyName); - if (credentialStorePassword != null) { - return credentialStorePassword; - } - - String filePassword = getPasswordFromFile(fileName); - if (filePassword != null) { - return filePassword; - } - - return LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD; - } - - /** - * Put private key into in-memory keystore and write it to a file (JKS file) - */ - private void setKeyAndCertInKeystore(X509Certificate cert, KeyPair keyPair, KeyStore keyStore, String keyStoreLocation, char[] password) - throws Exception { - Certificate[] certChain = new Certificate[1]; - certChain[0] = cert; - try (FileOutputStream fos = new FileOutputStream(keyStoreLocation)) { - keyStore.setKeyEntry("logsearch.alias", keyPair.getPrivate(), password, certChain); - keyStore.store(fos, password); - } catch (Exception e) { - LOG.error("Could not write certificate to Keystore", e); - throw e; - } - } - - /** - * Create in-memory keypair with bouncy castle - */ - private KeyPair createKeyPair(String encryptionType, int byteCount) - throws NoSuchProviderException, NoSuchAlgorithmException { - Security.addProvider(new BouncyCastleProvider()); - KeyPairGenerator keyPairGenerator = createKeyPairGenerator(encryptionType, byteCount); - return keyPairGenerator.genKeyPair(); - } - - /** - * Generate X509 certificate if it does not exist - */ - private X509Certificate generateCertificate(String certificateLocation, KeyPair keyPair, String algorithm) throws Exception { - try { - File certFile = new File(certificateLocation); - if (certFile.exists()) { - LOG.info("Certificate file exists ({}), skip the generation.", certificateLocation); - return getCertFile(certificateLocation); - } else { - Security.addProvider(new BouncyCastleProvider()); - X509Certificate cert = createCert(keyPair, algorithm, InetAddress.getLocalHost().getCanonicalHostName()); - FileUtils.writeByteArrayToFile(certFile, cert.getEncoded()); - return cert; - } - } catch (Exception e) { - LOG.error("Could not create certificate.", e); - throw e; - } - } - - private void ensureStorePassword(String locationArg, String pwdArg, String propertyName, String fileName) { - if (StringUtils.isNotEmpty(System.getProperty(locationArg)) && StringUtils.isEmpty(System.getProperty(pwdArg))) { - String password = getPassword(propertyName, fileName); - System.setProperty(pwdArg, password); - } - } - - public void ensureStorePasswords() { - ensureStorePassword(KEYSTORE_LOCATION_ARG, KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE); - ensureStorePassword(TRUSTSTORE_LOCATION_ARG, TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE); - } - - private X509Certificate getCertFile(String location) throws Exception { - try (FileInputStream fos = new FileInputStream(location)) { - CertificateFactory factory = CertificateFactory.getInstance("X.509"); - return (X509Certificate) factory.generateCertificate(fos); - } catch (Exception e) { - LOG.error("Cannot read cert file. ('" + location + "')", e); - throw e; - } - } - - private X509Certificate createCert(KeyPair keyPair, String signatureAlgoritm, String domainName) - throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, OperatorCreationException, CertificateException, IOException { - - RSAPublicKey rsaPublicKey = (RSAPublicKey) keyPair.getPublic(); - RSAPrivateKey rsaPrivateKey = (RSAPrivateKey) keyPair.getPrivate(); - - AlgorithmIdentifier sigAlgId = new DefaultSignatureAlgorithmIdentifierFinder().find(signatureAlgoritm); - AlgorithmIdentifier digAlgId = new DefaultDigestAlgorithmIdentifierFinder().find(sigAlgId); - BcContentSignerBuilder sigGen = new BcRSAContentSignerBuilder(sigAlgId, digAlgId); - - ASN1InputStream publicKeyStream = new ASN1InputStream(rsaPublicKey.getEncoded()); - SubjectPublicKeyInfo pubKey = SubjectPublicKeyInfo.getInstance(publicKeyStream.readObject()); - publicKeyStream.close(); - - X509v3CertificateBuilder v3CertBuilder = new X509v3CertificateBuilder( - new X500Name("CN=" + domainName + ", OU=None, O=None L=None, C=None"), - BigInteger.valueOf(Math.abs(new SecureRandom().nextInt())), - new Date(System.currentTimeMillis() - 1000L * 60 * 60 * 24 * 30), - new Date(System.currentTimeMillis() + (1000L * 60 * 60 * 24 * 365*10)), - new X500Name("CN=" + domainName + ", OU=None, O=None L=None, C=None"), - pubKey); - - RSAKeyParameters keyParams = new RSAKeyParameters(true, rsaPrivateKey.getPrivateExponent(), rsaPrivateKey.getModulus()); - ContentSigner contentSigner = sigGen.build(keyParams); - - X509CertificateHolder certificateHolder = v3CertBuilder.build(contentSigner); - - JcaX509CertificateConverter certConverter = new JcaX509CertificateConverter().setProvider("BC"); - return certConverter.getCertificate(certificateHolder); - } - - private KeyPairGenerator createKeyPairGenerator(String algorithmIdentifier, int bitCount) - throws NoSuchProviderException, NoSuchAlgorithmException { - KeyPairGenerator kpg = KeyPairGenerator.getInstance(algorithmIdentifier, BouncyCastleProvider.PROVIDER_NAME); - kpg.initialize(bitCount); - return kpg; - } - - /** - * Create keystore with keys and certificate (only if the keystore does not exist or if you have no permissions on the keystore file) - */ - public void loadKeystore() { - try { - String certFolder = logSearchSslConfig.getCertFolder(); - String certAlgorithm = logSearchSslConfig.getCertAlgorithm(); - String certLocation = String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_CERT_FILENAME); - String keyStoreLocation = StringUtils.isNotEmpty(getKeyStoreLocation()) ? getKeyStoreLocation() - : String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_KEYSTORE_FILENAME); - char[] password = StringUtils.isNotEmpty(getKeyStorePassword()) ? - getKeyStorePassword().toCharArray() : LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD.toCharArray(); - boolean keyStoreFileExists = new File(keyStoreLocation).exists(); - if (!keyStoreFileExists) { - FileUtil.createDirectory(certFolder); - LOG.warn("Keystore file ('{}') does not exist, creating new one. " + - "If the file exists, make sure you have proper permissions on that.", keyStoreLocation); - if (isKeyStoreSpecified() && !"JKS".equalsIgnoreCase(getKeyStoreType())) { - throw new RuntimeException(String.format("Keystore does not exist. Only JKS keystore can be auto generated. (%s)", keyStoreLocation)); - } - LOG.info("SSL keystore is not specified. Generating it with certificate ... (using default format: JKS)"); - Security.addProvider(new BouncyCastleProvider()); - KeyPair keyPair = createKeyPair("RSA", 2048); - File privateKeyFile = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PRIVATE_KEY)); - if (!privateKeyFile.exists()) { - FileUtils.writeByteArrayToFile(privateKeyFile, keyPair.getPrivate().getEncoded()); - } - File file = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PUBLIC_KEY)); - if (!file.exists()) { - FileUtils.writeByteArrayToFile(file, keyPair.getPublic().getEncoded()); - } - X509Certificate cert = generateCertificate(certLocation, keyPair, certAlgorithm); - KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); - keyStore.load(null, password); - setKeyAndCertInKeystore(cert, keyPair, keyStore, keyStoreLocation, password); - FileUtil.setPermissionOnDirectory(certFolder, "600"); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractAuditLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractAuditLogRequestQueryConverter.java deleted file mode 100644 index 5656c889e44..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractAuditLogRequestQueryConverter.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; -import org.apache.commons.lang3.StringUtils; -import org.springframework.data.domain.Sort; -import org.springframework.data.solr.core.query.Query; - -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.SEQUENCE_ID; - -public abstract class AbstractAuditLogRequestQueryConverter - extends AbstractLogRequestQueryConverter{ - - @Override - public Sort sort(SOURCE request) { - String sortBy = request.getSortBy(); - String sortType = request.getSortType(); - Sort.Order defaultSortOrder; - if (StringUtils.isNotBlank(sortBy)) { - Sort.Direction direction = StringUtils.equals(sortType , LogSearchConstants.ASCENDING_ORDER) ? Sort.Direction.ASC : Sort.Direction.DESC; - defaultSortOrder = new Sort.Order(direction, sortBy); - } else { - defaultSortOrder = new Sort.Order(Sort.Direction.DESC, AUDIT_EVTTIME); - } - Sort.Order sequenceIdOrder = new Sort.Order(Sort.Direction.DESC, SEQUENCE_ID); - return new Sort(defaultSortOrder, sequenceIdOrder); - } - - @Override - public void addComponentFilters(SOURCE request, RESULT query) { - List includeTypes = splitValueAsList(request.getMustBe(), ","); - List excludeTypes = splitValueAsList(request.getMustNot(), ","); - addInFilterQuery(query, AUDIT_COMPONENT, includeTypes); - addInFilterQuery(query, AUDIT_COMPONENT, excludeTypes, true); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractConverterAware.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractConverterAware.java deleted file mode 100644 index a4db91fea3a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractConverterAware.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.springframework.core.convert.ConversionService; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.convert.converter.ConverterRegistry; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -public abstract class AbstractConverterAware implements Converter { - - @Inject - @Named("conversionService") - private ConversionService conversionService; - - public ConversionService getConversionService() { - return conversionService; - } - - @PostConstruct - private void register() { - if (conversionService instanceof ConverterRegistry) { - ((ConverterRegistry) conversionService).addConverter(this); - } else { - throw new IllegalStateException("Can't register Converter to ConverterRegistry"); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractDateRangeFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractDateRangeFacetQueryConverter.java deleted file mode 100644 index 2143f560229..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractDateRangeFacetQueryConverter.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition; -import org.apache.ambari.logsearch.model.request.UnitParamDefinition; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrQuery; - -import java.util.Locale; - -public abstract class AbstractDateRangeFacetQueryConverter - extends AbstractOperationHolderConverter { - - @Override - public SolrQuery convert(SOURCE request) { - SolrQuery solrQuery = new SolrQuery(); - String unit = StringUtils.defaultIfEmpty(request.getUnit(), "+1HOUR"); - solrQuery.setQuery("*:*"); - solrQuery.setFacet(true); - solrQuery.addFacetPivotField("{!range=r1}" + getTypeFieldName()); - solrQuery.setFacetMinCount(1); - solrQuery.setFacetLimit(-1); - solrQuery.setFacetSort(LogSearchConstants.FACET_INDEX); - solrQuery.add("facet.range", "{!tag=r1}" + getDateFieldName()); - solrQuery.add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{getDateFieldName(), "facet.range.start"}), request.getFrom()); - solrQuery.add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{getDateFieldName(), "facet.range.end"}), request.getTo()); - solrQuery.add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{getDateFieldName(), "facet.range.gap"}), unit); - solrQuery.remove("sort"); - solrQuery.setRows(0); - solrQuery.setStart(0); - return solrQuery; - } - - public abstract String getDateFieldName(); - - public abstract String getTypeFieldName(); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestFacetQueryConverter.java deleted file mode 100644 index db346844f4c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestFacetQueryConverter.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.Criteria; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; -import org.springframework.data.solr.core.query.SimpleFilterQuery; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; - -public abstract class AbstractLogRequestFacetQueryConverter extends AbstractOperationHolderConverter{ - - @Override - public SimpleFacetQuery convert(SOURCE request) { - String fromValue = StringUtils.isNotEmpty(request.getFrom()) ? request.getFrom() : "*"; - String toValue = StringUtils.isNotEmpty(request.getTo()) ? request.getTo() : "*"; - Criteria criteria = new SimpleStringCriteria("*:*"); - SimpleFacetQuery facetQuery = new SimpleFacetQuery(); - facetQuery.addCriteria(criteria); - SimpleFilterQuery simpleFilterQuery = new SimpleFilterQuery(); - simpleFilterQuery.addCriteria(new SimpleStringCriteria(getDateTimeField() + ":[" + fromValue +" TO "+ toValue+ "]" )); - facetQuery.addFilterQuery(simpleFilterQuery); - FacetOptions facetOptions = new FacetOptions(); - facetOptions.setFacetMinCount(1); - facetOptions.setFacetSort(getFacetSort()); - facetOptions.setFacetLimit(-1); - appendFacetOptions(facetOptions, request); - addIncludeFieldValues(facetQuery, StringEscapeUtils.unescapeXml(request.getIncludeQuery())); - addExcludeFieldValues(facetQuery, StringEscapeUtils.unescapeXml(request.getExcludeQuery())); - facetQuery.setFacetOptions(facetOptions); - facetQuery.setRows(0); - addComponentFilters(facetQuery, request); - appendFacetQuery(facetQuery, request); - addInFilterQuery(facetQuery, CLUSTER, splitValueAsList(request.getClusters(), ",")); - return facetQuery; - } - - public abstract FacetOptions.FacetSort getFacetSort(); - - public abstract String getDateTimeField(); - - public abstract LogType getLogType(); - - @SuppressWarnings("unused") - public void appendFacetQuery(SimpleFacetQuery facetQuery, SOURCE request) { - } - - @SuppressWarnings("unused") - public void appendFacetOptions(FacetOptions facetOptions, SOURCE request) { - facetOptions.setFacetLimit(-1); - } - - private void addComponentFilters(SimpleFacetQuery query, SOURCE request) { - List includeTypes = splitValueAsList(request.getMustBe(), ","); - List excludeTypes = splitValueAsList(request.getMustNot(), ","); - if (LogType.AUDIT.equals(getLogType())) { - addInFilterQuery(query, AUDIT_COMPONENT, includeTypes); - addInFilterQuery(query, AUDIT_COMPONENT, excludeTypes, true); - } else if (LogType.SERVICE.equals(getLogType())) { - addInFilterQuery(query, COMPONENT, includeTypes); - addInFilterQuery(query, COMPONENT, excludeTypes, true); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestQueryConverter.java deleted file mode 100644 index d9f51f417c7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractLogRequestQueryConverter.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; -import org.apache.commons.lang.StringEscapeUtils; -import org.springframework.data.solr.core.query.Query; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; - -public abstract class AbstractLogRequestQueryConverter - extends AbstractSearchRequestQueryConverter { - - @Override - public QUERY_TYPE extendSolrQuery(REQUEST_TYPE request, QUERY_TYPE query) { - addComponentFilters(request, query); - addIncludeFieldValues(query, StringEscapeUtils.unescapeXml(request.getIncludeQuery())); - addExcludeFieldValues(query, StringEscapeUtils.unescapeXml(request.getExcludeQuery())); - addInFilterQuery(query, CLUSTER, splitValueAsList(request.getClusters(), ",")); - return extendLogQuery(request, query); - } - - public abstract QUERY_TYPE extendLogQuery(REQUEST_TYPE request, QUERY_TYPE query); - - public abstract void addComponentFilters(REQUEST_TYPE request, QUERY_TYPE query); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java deleted file mode 100644 index 5e7f440d38d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import com.google.common.base.Splitter; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.dao.SolrSchemaFieldDao; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.text.StrTokenizer; -import org.apache.solr.client.solrj.SolrQuery; -import org.springframework.data.solr.core.query.Criteria; -import org.springframework.data.solr.core.query.Query; -import org.springframework.data.solr.core.query.SimpleFilterQuery; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -import javax.inject.Inject; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOG_MESSAGE; - -public abstract class AbstractOperationHolderConverter - extends AbstractConverterAware { - - @Inject - private SolrSchemaFieldDao solrSchemaFieldDao; - - public List splitValueAsList(String value, String separator) { - return StringUtils.isNotEmpty(value) ? Splitter.on(separator).omitEmptyStrings().splitToList(value) : null; - } - - public Query addEqualsFilterQuery(Query query, String field, String value) { - return this.addEqualsFilterQuery(query, field, value, false); - } - - public Query addEqualsFilterQuery(Query query, String field, String value, boolean negate) { - if (StringUtils.isNotEmpty(value)) { - addFilterQuery(query, new Criteria(field).is(value), negate); - } - return query; - } - - public Query addContainsFilterQuery(Query query, String field, String value) { - return this.addContainsFilterQuery(query, field, value, false); - } - - public Query addContainsFilterQuery(Query query, String field, String value, boolean negate) { - if (StringUtils.isNotEmpty(value)) { - addFilterQuery(query, new Criteria(field).contains(value), negate); - } - return query; - } - - public Query addInFilterQuery(Query query, String field, List values) { - return this.addInFilterQuery(query, field, values, false); - } - - public Query addInFiltersIfNotNullAndEnabled(Query query, String value, String field, boolean condition) { - if (value != null && condition) { - List values = value.length() == 0 ? Arrays.asList("-1") : splitValueAsList(value, ","); - addInFilterQuery(query, field, values); - } - return query; - } - - public SolrQuery addInFiltersIfNotNullAndEnabled(SolrQuery query, String value, String field, boolean condition) { - if (condition) { - List valuesList = value.length() == 0 ? Arrays.asList("\\-1") : splitValueAsList(value, ","); - if (valuesList.size() > 1) { - query.addFilterQuery(String.format("%s:(%s)", field, StringUtils.join(valuesList, " OR "))); - } else { - query.addFilterQuery(String.format("%s:%s", field, valuesList.get(0))); - } - } - return query; - } - - public Query addInFilterQuery(Query query, String field, List values, boolean negate) { - if (CollectionUtils.isNotEmpty(values)) { - String orQueryStr = StringUtils.join(values, " OR "); - addFilterQuery(query, new Criteria(field).in(orQueryStr.split(" ")), negate); - } - return query; - } - - public Query addRangeFilter(Query query, String field, String from, String to) { - return this.addRangeFilter(query, field, from, to, false); - } - - public Query addRangeFilter(Query query, String field, String from, String to, boolean negate) { // TODO use criteria.between without escaping - String fromValue = StringUtils.defaultIfEmpty(from, "*"); - String toValue = StringUtils.defaultIfEmpty(to, "*"); - addFilterQuery(query, new SimpleStringCriteria(field + ":[" + fromValue +" TO "+ toValue + "]" ), negate); - return query; - } - - public void addFilterQuery(Query query, Criteria criteria, boolean negate) { - if (negate) { - criteria.not(); - } - query.addFilterQuery(new SimpleFilterQuery(criteria)); - } - - public Query addIncludeFieldValues(Query query, String fieldValuesMapStr) { - if (StringUtils.isNotEmpty(fieldValuesMapStr)) { - List> criterias = new Gson().fromJson(fieldValuesMapStr, - new TypeToken>>(){}.getType()); - for (Map criteriaMap : criterias) { - for (Map.Entry fieldEntry : criteriaMap.entrySet()) { - if (fieldEntry.getKey().equalsIgnoreCase(LOG_MESSAGE)) { - addLogMessageFilter(query, fieldEntry.getValue(), false); - } else { - addFilterQuery(query, new Criteria(fieldEntry.getKey()).is(escapeNonLogMessageField(fieldEntry)), false); - } - } - } - } - return query; - } - - public SolrQuery addIncludeFieldValues(SolrQuery query, String fieldValuesMapStr) { - if (StringUtils.isNotEmpty(fieldValuesMapStr)) { - List> criterias = new Gson().fromJson(fieldValuesMapStr, - new TypeToken>>(){}.getType()); - for (Map criteriaMap : criterias) { - for (Map.Entry fieldEntry : criteriaMap.entrySet()) { - if (fieldEntry.getKey().equalsIgnoreCase(LOG_MESSAGE)) { - addLogMessageFilter(query, fieldEntry.getValue(), false); - } else { - query.addFilterQuery(String.format("%s:%s", fieldEntry.getKey(), escapeNonLogMessageField(fieldEntry))); - } - } - } - } - return query; - } - - public Query addExcludeFieldValues(Query query, String fieldValuesMapStr) { - if (StringUtils.isNotEmpty(fieldValuesMapStr)) { - List> criterias = new Gson().fromJson(fieldValuesMapStr, - new TypeToken>>(){}.getType()); - for (Map criteriaMap : criterias) { - for (Map.Entry fieldEntry : criteriaMap.entrySet()) { - if (fieldEntry.getKey().equalsIgnoreCase(LOG_MESSAGE)) { - addLogMessageFilter(query, fieldEntry.getValue(), true); - } else { - addFilterQuery(query, new Criteria(fieldEntry.getKey()).is(escapeNonLogMessageField(fieldEntry)), true); - } - } - } - } - return query; - } - - public SolrQuery addExcludeFieldValues(SolrQuery query, String fieldValuesMapStr) { - if (StringUtils.isNotEmpty(fieldValuesMapStr)) { - List> criterias = new Gson().fromJson(fieldValuesMapStr, - new TypeToken>>(){}.getType()); - for (Map criteriaMap : criterias) { - for (Map.Entry fieldEntry : criteriaMap.entrySet()) { - if (fieldEntry.getKey().equalsIgnoreCase(LOG_MESSAGE)) { - addLogMessageFilter(query, fieldEntry.getValue(), true); - } else { - query.addFilterQuery(String.format("-%s:%s", fieldEntry.getKey(), escapeNonLogMessageField(fieldEntry))); - } - } - } - } - return query; - } - - public SolrQuery addListFilterToSolrQuery(SolrQuery solrQuery, String fieldName, String fieldValue) { - return SolrUtil.addListFilterToSolrQuery(solrQuery, fieldName, fieldValue); - } - - public abstract LogType getLogType(); - - private void addLogMessageFilter(Query query, String value, boolean negate) { - StrTokenizer tokenizer = new StrTokenizer(value, ' ', '"'); - for (String token : tokenizer.getTokenArray()) { - token = token.trim(); - if (token.contains(" ") || !token.startsWith("*") && !token.endsWith("*")) { - addFilterQuery(query, new Criteria(LOG_MESSAGE).is(SolrUtil.escapeQueryChars(token)), negate); - } else if (token.startsWith("*") && token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 1, -1); - addFilterQuery(query, new Criteria(LOG_MESSAGE).contains(SolrUtil.escapeQueryChars(plainToken)), negate); - } else if (token.startsWith("*") && !token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 1); - addFilterQuery(query, new Criteria(LOG_MESSAGE).endsWith(SolrUtil.escapeQueryChars(plainToken)), negate); - } else if (!token.startsWith("*") && token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 0, -1); - addFilterQuery(query, new Criteria(LOG_MESSAGE).startsWith(SolrUtil.escapeQueryChars(plainToken)), negate); - } - } - } - - private void addLogMessageFilter(SolrQuery query, String value, boolean negate) { - StrTokenizer tokenizer = new StrTokenizer(value, ' ', '"'); - String negateToken = negate ? "-" : ""; - for (String token : tokenizer.getTokenArray()) { - token = token.trim(); - if (token.contains(" ") || !token.startsWith("*") && !token.endsWith("*")) { - query.addFilterQuery(String.format("%s%s:%s", negateToken, LOG_MESSAGE, SolrUtil.escapeQueryChars(token))); - } else if (token.startsWith("*") && token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 1, -1); - query.addFilterQuery(String.format("%s%s:%s", negateToken, LOG_MESSAGE, SolrUtil.escapeQueryChars(plainToken))); - } else if (token.startsWith("*") && !token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 1); - query.addFilterQuery(String.format("%s%s:%s", negateToken, LOG_MESSAGE, SolrUtil.escapeQueryChars(plainToken))); - } else if (!token.startsWith("*") && token.endsWith("*")) { - String plainToken = StringUtils.substring(token, 0, -1); - query.addFilterQuery(String.format("%s%s:%s", negateToken, LOG_MESSAGE, SolrUtil.escapeQueryChars(plainToken))); - } - } - } - - private String escapeNonLogMessageField(Map.Entry fieldEntry) { - Map schemaFieldNameMap = solrSchemaFieldDao.getSchemaFieldNameMap(getLogType()); - Map schemaFieldTypeMap = solrSchemaFieldDao.getSchemaFieldTypeMap(getLogType()); - String fieldType = schemaFieldNameMap.get(fieldEntry.getKey()); - String fieldTypeMetaData = schemaFieldTypeMap.get(fieldType); - return SolrUtil.putWildCardByType(fieldEntry.getValue(), fieldType, fieldTypeMetaData); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractSearchRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractSearchRequestQueryConverter.java deleted file mode 100644 index d4fc48beb81..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractSearchRequestQueryConverter.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.ambari.logsearch.model.request.LastPageParamDefinition; -import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.solr.core.query.Criteria; -import org.springframework.data.solr.core.query.Query; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -public abstract class AbstractSearchRequestQueryConverter - extends AbstractOperationHolderConverter { - - @Override - public QUERY_TYPE convert(REQUEST_TYPE request) { - QUERY_TYPE query = createQuery(); - addPageRequest(request, query); - Criteria criteria = new SimpleStringCriteria("*:*"); - query.addCriteria(criteria); - return extendSolrQuery(request, query); - } - - private void addPageRequest(REQUEST_TYPE request, QUERY_TYPE query) { - int page = StringUtils.isNumeric(request.getPage()) ? new Integer(request.getPage()) : 0; - int pageSize = StringUtils.isNumeric(request.getPageSize()) ? new Integer(request.getPageSize()) : 99999; - Sort sort = sort(request); - - boolean isLastPage = (request instanceof LastPageParamDefinition) ? - ((LastPageParamDefinition)request).isLastPage() : - false; - if (isLastPage) { - page = 0; - List newOrders = new ArrayList<>(); - for (Sort.Order order : sort) { - newOrders.add(new Sort.Order(order.getDirection() == Direction.ASC ? Direction.DESC : Direction.ASC, order.getProperty())); - } - sort = new Sort(newOrders); - } - - PageRequest pageRequest = new PageRequest(page, pageSize, sort); - query.setPageRequest(pageRequest); - } - - public abstract QUERY_TYPE extendSolrQuery(REQUEST_TYPE request, QUERY_TYPE query); - - public abstract Sort sort(REQUEST_TYPE request); - - public abstract QUERY_TYPE createQuery(); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java deleted file mode 100644 index 0fc12e83ac1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BUNDLE_ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH; - -public abstract class AbstractServiceLogRequestFacetQueryConverter - extends AbstractLogRequestFacetQueryConverter { - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, SOURCE request) { - addEqualsFilterQuery(facetQuery, HOST, SolrUtil.escapeQueryChars(request.getHostName())); - addEqualsFilterQuery(facetQuery, PATH, SolrUtil.escapeQueryChars(request.getFileName())); - addEqualsFilterQuery(facetQuery, COMPONENT, SolrUtil.escapeQueryChars(request.getComponentName())); - addEqualsFilterQuery(facetQuery, BUNDLE_ID, request.getBundleId()); - addInFiltersIfNotNullAndEnabled(facetQuery, request.getLevel(), LEVEL, true); - addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestQueryConverter.java deleted file mode 100644 index e1cd0772ed9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestQueryConverter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; -import org.springframework.data.solr.core.query.Query; - -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; - -public abstract class AbstractServiceLogRequestQueryConverter - extends AbstractLogRequestQueryConverter { - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - @Override - public void addComponentFilters(REQUEST_TYPE request, QUERY_TYPE query) { - List includeTypes = splitValueAsList(request.getMustBe(), ","); - List excludeTypes = splitValueAsList(request.getMustNot(), ","); - addInFilterQuery(query, COMPONENT, includeTypes); - addInFilterQuery(query, COMPONENT, excludeTypes, true); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverter.java deleted file mode 100644 index 6a6765f251d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverter.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrQuery; - -import javax.inject.Named; - -import java.util.Arrays; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; - -@Named -public class AuditBarGraphRequestQueryConverter extends AbstractDateRangeFacetQueryConverter { - - @Override - public String getDateFieldName() { - return AUDIT_EVTTIME; - } - - @Override - public String getTypeFieldName() { - return AUDIT_COMPONENT; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public SolrQuery convert(AuditBarGraphRequest request) { - SolrQuery query = super.convert(request); - addListFilterToSolrQuery(query, CLUSTER, request.getClusters()); - addInFiltersIfNotNullAndEnabled(query, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - addIncludeFieldValues(query, request.getIncludeQuery()); - addExcludeFieldValues(query, request.getExcludeQuery()); - return query; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java deleted file mode 100644 index 55280a8ff09..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest; -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; - -@Named -public class AuditComponentsRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.INDEX; - } - - @Override - public String getDateTimeField() { - return AUDIT_EVTTIME; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, AuditComponentRequest request) { - facetOptions.addFacetOnField(AUDIT_COMPONENT); - facetOptions.setFacetLimit(-1); - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, AuditComponentRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditLogRequestQueryConverter.java deleted file mode 100644 index eba91ebafae..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditLogRequestQueryConverter.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrQuery; -import org.springframework.data.solr.core.query.SimpleQuery; - -import javax.inject.Named; - -@Named -public class AuditLogRequestQueryConverter extends AbstractAuditLogRequestQueryConverter { - - @Override - public SimpleQuery extendLogQuery(AuditLogRequest request, SimpleQuery query) { - addInFiltersIfNotNullAndEnabled( - query, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - return query; - } - - @Override - public SimpleQuery createQuery() { - return new SimpleQuery(); - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverter.java deleted file mode 100644 index 26a8d268904..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverter.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; - -@Named -public class AuditServiceLoadRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return AUDIT_EVTTIME; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, AuditServiceLoadRequest request) { - facetOptions.addFacetOnField(AUDIT_COMPONENT); - facetOptions.setFacetLimit(10); - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, AuditServiceLoadRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java deleted file mode 100644 index 8accd714a83..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.springframework.data.domain.Sort; -import org.springframework.data.solr.core.query.SimpleQuery; -import javax.inject.Named; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.KEY_LOG_MESSAGE; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BUNDLE_ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.SEQUENCE_ID; - -@Named -public class BaseServiceLogRequestQueryConverter extends AbstractServiceLogRequestQueryConverter { - - @Override - public SimpleQuery extendLogQuery(BaseServiceLogRequest request, SimpleQuery query) { - List levels = splitValueAsList(request.getLevel(), ","); - addContainsFilterQuery(query, KEY_LOG_MESSAGE, SolrUtil.escapeForStandardTokenizer(request.getIncludeMessage())); - addContainsFilterQuery(query, KEY_LOG_MESSAGE, SolrUtil.escapeForStandardTokenizer(request.getExcludeMessage()), true); - addEqualsFilterQuery(query, HOST, SolrUtil.escapeQueryChars(request.getHostName())); - addEqualsFilterQuery(query, PATH, SolrUtil.escapeQueryChars(request.getFileName())); - addEqualsFilterQuery(query, COMPONENT, SolrUtil.escapeQueryChars(request.getComponentName())); - addEqualsFilterQuery(query, BUNDLE_ID, request.getBundleId()); - if (CollectionUtils.isNotEmpty(levels)){ - addInFilterQuery(query, LEVEL, levels); - } - addInFiltersIfNotNullAndEnabled(query, request.getHostList(), HOST, org.apache.commons.lang.StringUtils.isEmpty(request.getHostName())); - addRangeFilter(query, LOGTIME, request.getFrom(), request.getTo()); - return query; - } - - @Override - public Sort sort(BaseServiceLogRequest request) { - String sortBy = request.getSortBy(); - String sortType = request.getSortType(); - Sort.Order defaultSortOrder; - if (StringUtils.isNotBlank(sortBy)) { - Sort.Direction direction = StringUtils.equals(sortType, LogSearchConstants.ASCENDING_ORDER) ? Sort.Direction.ASC : Sort.Direction.DESC; - defaultSortOrder = new Sort.Order(direction, sortBy); - } else { - defaultSortOrder = new Sort.Order(Sort.Direction.DESC, LOGTIME); - } - Sort.Order sequenceIdOrder = new Sort.Order(Sort.Direction.DESC, SEQUENCE_ID); - return new Sort(defaultSortOrder, sequenceIdOrder); - } - - @Override - public SimpleQuery createQuery() { - return new SimpleQuery(); - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverter.java deleted file mode 100644 index c67c31b54c7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverter.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.EventHistoryRequest; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrQuery; - -import javax.inject.Named; - -import java.util.ArrayList; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.FILTER_NAME; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.ROW_TYPE; - -@Named -public class EventHistoryRequestQueryConverter extends AbstractConverterAware { - - @Override - public SolrQuery convert(EventHistoryRequest eventHistoryRequest) { - SolrQuery eventHistoryQuery = new SolrQuery(); - eventHistoryQuery.setQuery("*:*"); - - int startIndex = StringUtils.isNotEmpty(eventHistoryRequest.getStartIndex()) && StringUtils.isNumeric(eventHistoryRequest.getStartIndex()) - ? Integer.parseInt(eventHistoryRequest.getStartIndex()) : 0; - int maxRows = StringUtils.isNotEmpty(eventHistoryRequest.getPageSize()) && StringUtils.isNumeric(eventHistoryRequest.getPageSize()) - ? Integer.parseInt(eventHistoryRequest.getPageSize()) : 10; - - SolrQuery.ORDER order = eventHistoryRequest.getSortType() != null && SolrQuery.ORDER.desc.equals(SolrQuery.ORDER.valueOf(eventHistoryRequest.getSortType())) - ? SolrQuery.ORDER.desc : SolrQuery.ORDER.asc; - String sortBy = StringUtils.isNotEmpty(eventHistoryRequest.getSortBy()) ? eventHistoryRequest.getSortBy() : FILTER_NAME; - String filterName = StringUtils.isBlank(eventHistoryRequest.getFilterName()) ? "*" : "*" + eventHistoryRequest.getFilterName() + "*"; - - eventHistoryQuery.addFilterQuery(String.format("%s:%s", ROW_TYPE, eventHistoryRequest.getRowType())); - eventHistoryQuery.addFilterQuery(String.format("%s:%s", FILTER_NAME, SolrUtil.makeSearcableString(filterName))); - eventHistoryQuery.setStart(startIndex); - eventHistoryQuery.setRows(maxRows); - - SolrQuery.SortClause sortOrder = SolrQuery.SortClause.create(sortBy, order); - List sort = new ArrayList<>(); - sort.add(sortOrder); - eventHistoryQuery.setSorts(sort); - - SolrUtil.addListFilterToSolrQuery(eventHistoryQuery, CLUSTER, eventHistoryRequest.getClusters()); - - return eventHistoryQuery; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverter.java deleted file mode 100644 index c4b8ae0139a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverter.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; - -@Named -public class FieldAuditLogRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public void appendFacetOptions(FacetOptions facetOptions, FieldAuditLogRequest request) { - facetOptions.addFacetOnPivot(request.getField(), AUDIT_COMPONENT); - } - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return AUDIT_EVTTIME; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, FieldAuditLogRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverter.java deleted file mode 100644 index e1e7418246b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverter.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.HostLogFilesRequest; -import org.apache.commons.lang3.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.FacetOptions.FacetSort; -import org.springframework.data.solr.core.query.SimpleFacetQuery; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH; - -import javax.inject.Named; - -@Named -public class HostLogFilesRequestQueryConverter extends AbstractOperationHolderConverter{ - - @Override - public SimpleFacetQuery convert(HostLogFilesRequest request) { - SimpleFacetQuery facetQuery = new SimpleFacetQuery(); - facetQuery.addCriteria(new SimpleStringCriteria(String.format("%s:(%s)", HOST, request.getHostName()))); - if (StringUtils.isNotEmpty(request.getComponentName())) { - facetQuery.addCriteria(new SimpleStringCriteria(String.format("%s:(%s)", COMPONENT, request.getComponentName()))); - } - FacetOptions facetOptions = new FacetOptions(); - facetOptions.setFacetMinCount(1); - facetOptions.setFacetLimit(-1); - facetOptions.setFacetSort(FacetSort.COUNT); - facetOptions.addFacetOnPivot(COMPONENT, PATH); - facetQuery.setFacetOptions(facetOptions); - addInFilterQuery(facetQuery, CLUSTER, splitValueAsList(request.getClusters(), ",")); - facetQuery.setRows(0); - return facetQuery; - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java deleted file mode 100644 index b194df3c012..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; - -import javax.inject.Named; - -@Named -public class ServiceLogAnyGraphRequestQueryConverter extends AbstractLogRequestFacetQueryConverter{ - - @Override - public void appendFacetOptions(FacetOptions facetOptions, ServiceAnyGraphRequest request) { - facetOptions.addFacetOnField(LEVEL); - } - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return LOGTIME; - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceAnyGraphRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java deleted file mode 100644 index 3b4bb788690..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest; -import org.springframework.data.solr.core.query.FacetOptions; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -@Named -public class ServiceLogComponentLevelRequestQueryConverter extends AbstractServiceLogRequestFacetQueryConverter { - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.INDEX; - } - - @Override - public String getDateTimeField() { - return LOGTIME; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, ServiceLogComponentLevelRequest request) { - facetOptions.addFacetOnPivot(COMPONENT, LEVEL); - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java deleted file mode 100644 index 6271ca8f83a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest; -import org.springframework.data.solr.core.query.FacetOptions; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -@Named -public class ServiceLogComponentRequestFacetQueryConverter extends AbstractServiceLogRequestFacetQueryConverter { - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.INDEX; - } - - @Override - public String getDateTimeField() { - return LOGTIME; - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, ServiceLogComponentHostRequest request) { - facetOptions.addFacetOnPivot(COMPONENT, HOST, LEVEL); - facetOptions.addFacetOnPivot(COMPONENT, LEVEL); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java deleted file mode 100644 index 982d2a124fa..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BUNDLE_ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH; - -@Named -public class ServiceLogLevelCountRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return LOGTIME; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, ServiceLogLevelCountRequest request) { - facetOptions.addFacetOnField(LEVEL); - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceLogLevelCountRequest request) { - addEqualsFilterQuery(facetQuery, HOST, SolrUtil.escapeQueryChars(request.getHostName())); - addEqualsFilterQuery(facetQuery, PATH, SolrUtil.escapeQueryChars(request.getFileName())); - addEqualsFilterQuery(facetQuery, COMPONENT, SolrUtil.escapeQueryChars(request.getComponentName())); - addEqualsFilterQuery(facetQuery, BUNDLE_ID, request.getBundleId()); - addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java deleted file mode 100644 index db9c60e3488..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.SolrQuery; - -import javax.inject.Named; - -import java.util.Arrays; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -@Named -public class ServiceLogLevelDateRangeRequestQueryConverter extends AbstractDateRangeFacetQueryConverter{ - - @Override - public String getDateFieldName() { - return LOGTIME; - } - - @Override - public String getTypeFieldName() { - return LEVEL; - } - - @Override - public SolrQuery convert(ServiceGraphRequest request) { - SolrQuery solrQuery = super.convert(request); - addListFilterToSolrQuery(solrQuery, LEVEL, request.getLevel()); - if (request.getHostList() != null && StringUtils.isEmpty(request.getHostName())) { - List hosts = request.getHostList().length() == 0 ? Arrays.asList("\\-1") : splitValueAsList(request.getHostList(), ","); - if (hosts.size() > 1) { - solrQuery.addFilterQuery(String.format("%s:(%s)", HOST, StringUtils.join(hosts, " OR "))); - } else { - solrQuery.addFilterQuery(String.format("%s:%s", HOST, hosts.get(0))); - } - } - addListFilterToSolrQuery(solrQuery, CLUSTER, request.getClusters()); - addListFilterToSolrQuery(solrQuery, COMPONENT, request.getMustBe()); - addIncludeFieldValues(solrQuery, request.getIncludeQuery()); - addExcludeFieldValues(solrQuery, request.getExcludeQuery()); - return solrQuery; - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java deleted file mode 100644 index 7cb8f9181dd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest; -import org.springframework.data.solr.core.query.FacetOptions; -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -@Named -public class ServiceLogTreeRequestFacetQueryConverter extends AbstractServiceLogRequestFacetQueryConverter{ - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.INDEX; - } - - @Override - public String getDateTimeField() { - return LOGTIME; - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - @Override - public void appendFacetOptions(FacetOptions facetOptions, ServiceLogHostComponentRequest request) { - facetOptions.addFacetOnPivot(HOST, COMPONENT, LEVEL); - facetOptions.addFacetOnPivot(HOST, LEVEL); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverter.java deleted file mode 100644 index d0273acdf1b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverter.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest; -import org.springframework.data.domain.Sort; -import org.springframework.data.solr.core.query.SimpleQuery; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.SEQUENCE_ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -public class ServiceLogTruncatedRequestQueryConverter extends AbstractServiceLogRequestQueryConverter{ - - private String sequenceId; - - private String logTime; - - @Override - public SimpleQuery extendLogQuery(ServiceLogTruncatedRequest request, SimpleQuery query) { - addEqualsFilterQuery(query, COMPONENT, request.getComponentName()); - addEqualsFilterQuery(query, HOST, request.getHostName()); - String scrollType = request.getScrollType(); - if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType)) { - Integer secuenceIdNum = Integer.parseInt(getSequenceId()) - 1; - addRangeFilter(query, LOGTIME, null, getLogTime()); - addRangeFilter(query, SEQUENCE_ID, null, secuenceIdNum.toString()); - } else if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) { - Integer secuenceIdNum = Integer.parseInt(getSequenceId()) + 1; - addRangeFilter(query, LOGTIME, getLogTime(), null); - addRangeFilter(query, SEQUENCE_ID, secuenceIdNum.toString(), null); - } - query.setRows(request.getNumberRows()); - return query; - } - - @Override - public Sort sort(ServiceLogTruncatedRequest request) { - String scrollType = request.getScrollType(); - Sort.Direction direction; - if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) { - direction = Sort.Direction.ASC; - } else { - direction = Sort.Direction.DESC; - } - Sort.Order logtimeSortOrder = new Sort.Order(direction, LOGTIME); - Sort.Order secuqnceIdSortOrder = new Sort.Order(direction, SEQUENCE_ID); - return new Sort(logtimeSortOrder, secuqnceIdSortOrder); - } - - @Override - public SimpleQuery createQuery() { - return new SimpleQuery(); - } - - @Override - public LogType getLogType() { - return LogType.SERVICE; - } - - public String getSequenceId() { - return sequenceId; - } - - public void setSequenceId(String sequenceId) { - this.sequenceId = sequenceId; - } - - public String getLogTime() { - return logTime; - } - - public void setLogTime(String logTime) { - this.logTime = logTime; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverter.java deleted file mode 100644 index faff0bef890..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.springframework.data.solr.core.query.Criteria; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -import javax.inject.Named; - -@Named -public class StringFieldFacetQueryConverter extends AbstractConverterAware { - - @Override - public SimpleFacetQuery convert(String fieldName) { - Criteria criteria = new SimpleStringCriteria("*:*"); - SimpleFacetQuery facetQuery = new SimpleFacetQuery(); - facetQuery.addCriteria(criteria); - facetQuery.setRows(0); - FacetOptions facetOptions = new FacetOptions(); - facetOptions.setFacetMinCount(1); - facetOptions.addFacetOnField(fieldName); - facetOptions.setFacetLimit(-1); - facetQuery.setFacetOptions(facetOptions); - return facetQuery; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverter.java deleted file mode 100644 index 636c7e9e73b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverter.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.TopFieldAuditLogRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; - -@Named -public class TopFieldAuditLogRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public void appendFacetOptions(FacetOptions facetOptions, TopFieldAuditLogRequest request) { - facetOptions.addFacetOnPivot(request.getField(), AUDIT_COMPONENT); - facetOptions.setFacetLimit(request.getTop()); - } - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return AUDIT_EVTTIME; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, TopFieldAuditLogRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverter.java deleted file mode 100644 index 2a423103483..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverter.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.model.request.impl.UserExportRequest; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.commons.lang.StringUtils; -import org.springframework.data.solr.core.query.FacetOptions; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER; -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_RESOURCE; - -@Named -public class UserExportRequestQueryConverter extends AbstractLogRequestFacetQueryConverter { - - @Override - public void appendFacetOptions(FacetOptions facetOptions, UserExportRequest request) { - facetOptions.addFacetOnPivot(AUDIT_REQUEST_USER, AUDIT_COMPONENT); - facetOptions.addFacetOnPivot(AUDIT_RESOURCE, AUDIT_COMPONENT); - } - - @Override - public FacetOptions.FacetSort getFacetSort() { - return FacetOptions.FacetSort.COUNT; - } - - @Override - public String getDateTimeField() { - return AUDIT_EVTTIME; - } - - @Override - public LogType getLogType() { - return LogType.AUDIT; - } - - @Override - public void appendFacetQuery(SimpleFacetQuery facetQuery, UserExportRequest request) { - addInFiltersIfNotNullAndEnabled(facetQuery, request.getUserList(), - SolrConstants.AuditLogConstants.AUDIT_REQUEST_USER, - StringUtils.isNotBlank(request.getUserList())); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java deleted file mode 100644 index 4bef51eb4bb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.dao; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.conf.SolrAuditLogPropsConfig; -import org.apache.ambari.logsearch.conf.SolrClientsHolder; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.configurer.SolrAuditAliasConfigurer; -import org.apache.ambari.logsearch.configurer.SolrCollectionConfigurer; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.data.solr.core.SolrTemplate; - -@Named -public class AuditSolrDao extends SolrDaoBase { - - private static final Logger LOG = Logger.getLogger(AuditSolrDao.class); - - @Inject - private SolrAuditLogPropsConfig solrAuditLogPropsConfig; - - private SolrTemplate auditSolrTemplate; - - @Inject - @Named("solrAuditLogsState") - private SolrCollectionState solrAuditLogsState; - - @Inject - private SolrClientsHolder solrClientsHolder; - - public AuditSolrDao() { - super(LogType.AUDIT); - } - - @Override - public SolrTemplate getSolrTemplate() { - return auditSolrTemplate; - } - - @Override - public void setSolrTemplate(SolrTemplate solrTemplate) { - this.auditSolrTemplate = solrTemplate; - } - - @PostConstruct - public void postConstructor() { - String aliasNameIn = solrAuditLogPropsConfig.getAliasNameIn(); - String rangerAuditCollection = solrAuditLogPropsConfig.getRangerCollection(); - - try { - new SolrCollectionConfigurer(this, true, solrClientsHolder, SolrClientsHolder.CollectionType.AUDIT).start(); - boolean createAlias = (aliasNameIn != null && StringUtils.isNotBlank(rangerAuditCollection)); - if (createAlias) { - new SolrAuditAliasConfigurer(this).start(); - } - } catch (Exception e) { - LOG.error("Error while connecting to Solr for audit logs : solrUrl=" + solrAuditLogPropsConfig.getSolrUrl() + ", zkConnectString=" + - solrAuditLogPropsConfig.getZkConnectString() + ", collection=" + solrAuditLogPropsConfig.getCollection(), e); - } - } - - @Override - public SolrCollectionState getSolrCollectionState() { - return this.solrAuditLogsState; - } - - @Override - public SolrPropsConfig getSolrPropsConfig() { - return this.solrAuditLogPropsConfig; - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/EventHistorySolrDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/EventHistorySolrDao.java deleted file mode 100644 index e3754248eed..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/EventHistorySolrDao.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.dao; - -import java.io.IOException; -import java.io.UncheckedIOException; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.common.LogSearchContext; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.conf.SolrClientsHolder; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.SolrEventHistoryPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.configurer.SolrCollectionConfigurer; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.response.UpdateResponse; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrInputDocument; - -import org.apache.log4j.Logger; -import org.springframework.data.solr.core.SolrTemplate; - -@Named -public class EventHistorySolrDao extends SolrDaoBase { - - private static final Logger LOG = Logger.getLogger(EventHistorySolrDao.class); - - private static final Logger LOG_PERFORMANCE = Logger.getLogger("org.apache.ambari.logsearch.performance"); - - @Inject - private SolrEventHistoryPropsConfig solrEventHistoryPropsConfig; - - private SolrTemplate eventHistorySolrTemplate; - - @Inject - @Named("solrEventHistoryState") - private SolrCollectionState solrEventHistoryState; - - @Inject - private SolrClientsHolder solrClientsHolder; - - public EventHistorySolrDao() { - super(LogType.SERVICE); - } - - @Override - public SolrTemplate getSolrTemplate() { - return eventHistorySolrTemplate; - } - - @Override - public void setSolrTemplate(SolrTemplate solrTemplate) { - this.eventHistorySolrTemplate = solrTemplate; - } - - @PostConstruct - public void postConstructor() { - String solrUrl = solrEventHistoryPropsConfig.getSolrUrl(); - String zkConnectString = solrEventHistoryPropsConfig.getZkConnectString(); - String collection = solrEventHistoryPropsConfig.getCollection(); - - try { - new SolrCollectionConfigurer(this, false, solrClientsHolder, SolrClientsHolder.CollectionType.HISTORY).start(); - } catch (Exception e) { - LOG.error("error while connecting to Solr for history logs : solrUrl=" + solrUrl + ", zkConnectString=" + zkConnectString + - ", collection=" + collection, e); - } - } - - public UpdateResponse deleteEventHistoryData(String id) { - return removeDoc("id:" + id); - } - - private UpdateResponse removeDoc(String query) { - try { - UpdateResponse updateResoponse = getSolrClient().deleteByQuery(query); - getSolrClient().commit(); - LOG_PERFORMANCE.info("\n Username :- " + LogSearchContext.getCurrentUsername() + - " Remove Time Execution :- " + updateResoponse.getQTime() + " Total Time Elapsed is :- " + updateResoponse.getElapsedTime()); - return updateResoponse; - } catch (SolrServerException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public UpdateResponse addDocs(SolrInputDocument doc) { - try { - UpdateResponse updateResoponse = getSolrClient().add(doc); - LOG_PERFORMANCE.info("\n Username :- " + LogSearchContext.getCurrentUsername() + - " Update Time Execution :- " + updateResoponse.getQTime() + " Total Time Elapsed is :- " + updateResoponse.getElapsedTime()); - getSolrClient().commit(); - return updateResoponse; - } catch (SolrServerException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public SolrCollectionState getSolrCollectionState() { - return solrEventHistoryState; - } - - @Override - public SolrPropsConfig getSolrPropsConfig() { - return solrEventHistoryPropsConfig; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/RoleDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/RoleDao.java deleted file mode 100644 index d6dbd913779..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/RoleDao.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.dao; - -import com.google.common.annotations.VisibleForTesting; -import io.jsonwebtoken.lang.Collections; -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.util.FileUtil; -import org.apache.ambari.logsearch.util.JSONUtil; -import org.apache.ambari.logsearch.web.model.Privilege; -import org.apache.ambari.logsearch.web.model.Role; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.core.GrantedAuthority; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.singletonList; - -/** - * Helper class to assign roles for authenticated users, can be used only by JWT and file based authentication. - */ -@Named -public class RoleDao { - - private static final Logger LOG = LoggerFactory.getLogger(RoleDao.class); - - @Inject - private AuthPropsConfig authPropsConfig; - - private final Map> simpleRolesMap = new HashMap<>(); - - @SuppressWarnings("unchecked") - @PostConstruct - public void init() { - if (authPropsConfig.isFileAuthorization()) { - try { - String userRoleFileName = authPropsConfig.getRoleFile(); - LOG.info("USER ROLE JSON file NAME:" + userRoleFileName); - File jsonFile = FileUtil.getFileFromClasspath(userRoleFileName); - if (jsonFile == null || !jsonFile.exists()) { - LOG.error("Role json file not found on the classpath :" + userRoleFileName); - System.exit(1); - } - Map userRoleInfo = JSONUtil.readJsonFromFile(jsonFile); - Map roles = (Map) userRoleInfo.get("roles"); - for (Map.Entry roleEntry : roles.entrySet()) { - simpleRolesMap.put(roleEntry.getKey(), (List) roleEntry.getValue()); - } - } catch (Exception e) { - LOG.error("Error while reading user role file: {}", e.getMessage()); - } - } else { - LOG.info("File authorization is disabled"); - } - } - - public List getRolesForUser(String user) { - List authorities = new ArrayList<>(); - if (authPropsConfig.isFileAuthorization()) { - List roles = simpleRolesMap.get(user); - if (!Collections.isEmpty(roles)) { - for (String role : roles) { - String roleName = "ROLE_" + role; - LOG.debug("Found role '{}' for user '{}'", roleName, user); - authorities.add(createRoleWithReadPrivilage(roleName)); - } - } else { - LOG.warn("Not found roles for user '{}'", user); - } - return authorities; - } else { - return createDefaultAuthorities(); - } - } - - public Map> getSimpleRolesMap() { - return simpleRolesMap; - } - - @VisibleForTesting - public void setAuthPropsConfig(AuthPropsConfig authPropsConfig) { - this.authPropsConfig = authPropsConfig; - } - - /** - * Helper function to create a simple default role details - */ - public static List createDefaultAuthorities() { - Role r = createRoleWithReadPrivilage("ROLE_USER"); - return singletonList(r); - } - - private static Role createRoleWithReadPrivilage(String roleName) { - Role r = new Role(); - r.setName(roleName); - Privilege priv = new Privilege(); - priv.setName("READ_PRIVILEGE"); - r.setPrivileges(singletonList(priv)); - return r; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java deleted file mode 100644 index 4fbe534468a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.dao; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.conf.SolrClientsHolder; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.SolrServiceLogPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.configurer.SolrCollectionConfigurer; -import org.apache.log4j.Logger; -import org.springframework.data.solr.core.SolrTemplate; - -@Named -public class ServiceLogsSolrDao extends SolrDaoBase { - - private static final Logger LOG = Logger.getLogger(ServiceLogsSolrDao.class); - - @Inject - private SolrServiceLogPropsConfig solrServiceLogPropsConfig; - - private volatile SolrTemplate serviceSolrTemplate; - - @Inject - @Named("solrServiceLogsState") - private SolrCollectionState solrServiceLogsState; - - @Inject - private SolrClientsHolder solrClientsHolder; - - public ServiceLogsSolrDao() { - super(LogType.SERVICE); - } - - @Override - public SolrTemplate getSolrTemplate() { - return serviceSolrTemplate; - } - - @Override - public void setSolrTemplate(SolrTemplate solrTemplate) { - this.serviceSolrTemplate = solrTemplate; - } - - @PostConstruct - public void postConstructor() { - LOG.info("postConstructor() called."); - try { - new SolrCollectionConfigurer(this, true, solrClientsHolder, SolrClientsHolder.CollectionType.HISTORY).start(); - } catch (Exception e) { - LOG.error("error while connecting to Solr for service logs : solrUrl=" + solrServiceLogPropsConfig.getSolrUrl() - + ", zkConnectString=" + solrServiceLogPropsConfig.getZkConnectString() - + ", collection=" + solrServiceLogPropsConfig.getCollection(), e); - } - } - - @Override - public SolrCollectionState getSolrCollectionState() { - return solrServiceLogsState; - } - - @Override - public SolrPropsConfig getSolrPropsConfig() { - return solrServiceLogPropsConfig; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java deleted file mode 100644 index f11f7d96cc1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.dao; - -import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR; -import static org.apache.solr.common.SolrException.ErrorCode.UNKNOWN; - -import java.io.IOException; - -import javax.inject.Inject; - -import org.apache.ambari.logsearch.common.LogSearchContext; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.conf.LogSearchConfigApiConfig; -import org.apache.ambari.logsearch.conf.SolrKerberosConfig; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.global.LogSearchConfigState; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.config.api.LogSearchConfigServer; -import org.apache.ambari.logsearch.configurer.LogSearchConfigConfigurer; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrRequest.METHOD; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.SolrResponseBase; -import org.apache.solr.client.solrj.response.UpdateResponse; -import org.apache.solr.common.SolrException; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.SolrTemplate; -import org.springframework.data.solr.core.query.SolrDataQuery; - -public abstract class SolrDaoBase { - - private static final Logger LOG = Logger.getLogger(SolrDaoBase.class); - private static final Logger LOG_PERFORMANCE = Logger.getLogger("org.apache.ambari.logsearch.performance"); - - private LogType logType; - - @Inject - private SolrKerberosConfig solrKerberosConfig; - - @Inject - private LogSearchConfigState logSearchConfigState; - - @Inject - private LogSearchConfigApiConfig logSearchConfigApiConfig; - - @Inject - private LogSearchConfigConfigurer logSearchConfigConfigurer; - - protected SolrDaoBase(LogType logType) { - this.logType = logType; - } - - public void waitForLogSearchConfig() { - if (logSearchConfigApiConfig.isConfigApiEnabled()) { - while (!logSearchConfigState.isLogSearchConfigAvailable()) { - LOG.info("Log Search config not available yet, waiting..."); - try { - Thread.sleep(1000); - } catch (Exception e) { - LOG.warn("Exception during waiting for Log Search Config", e); - } - } - } - } - - public QueryResponse process(SolrQuery solrQuery, String event) { - SolrUtil.removeDoubleOrTripleEscapeFromFilters(solrQuery); - LOG.info("Solr query will be processed: " + solrQuery); - if (getSolrClient() != null) { - event = event == null ? solrQuery.get("event") : event; - solrQuery.remove("event"); - try { - QueryResponse queryResponse = getSolrClient().query(solrQuery, METHOD.POST); - logSolrEvent(event, solrQuery, queryResponse); - return queryResponse; - } catch (SolrServerException | IOException e) { - throw new SolrException(SERVER_ERROR, "Error during solrQuery=" + solrQuery, e); - } - } else { - throw new SolrException(UNKNOWN, String.format("Solr configuration improper for %s logs", logType.getLabel())); - } - } - - private UpdateResponse deleteByQuery(SolrQuery solrQuery, String event) { - SolrUtil.removeDoubleOrTripleEscapeFromFilters(solrQuery); - LOG.info("Solr delete query will be processed: " + solrQuery); - if (getSolrClient() != null) { - try { - UpdateResponse updateResponse = getSolrClient().deleteByQuery(solrQuery.getQuery()); - logSolrEvent(event, solrQuery, updateResponse); - return updateResponse; - } catch (Exception e) { - throw new SolrException(SERVER_ERROR, "Error during delete solrQuery=" + solrQuery, e); - } - } else { - throw new SolrException(UNKNOWN, String.format("Solr configuration improper for %s logs", logType.getLabel())); - } - } - - public UpdateResponse deleteByQuery(SolrDataQuery solrDataQuery, String event) { - return deleteByQuery(new DefaultQueryParser().doConstructSolrQuery(solrDataQuery), event); - } - - public QueryResponse process(SolrQuery solrQuery) { - return process(solrQuery, null); - } - - public QueryResponse process(SolrDataQuery solrDataQuery) { - return process(new DefaultQueryParser().doConstructSolrQuery(solrDataQuery)); - } - - public long count(final SolrDataQuery solrDataQuery) { - return getSolrTemplate().execute(solrClient -> { - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(solrDataQuery); - solrQuery.setStart(0); - solrQuery.setRows(0); - QueryResponse queryResponse = solrClient.query(solrQuery); - long count = solrClient.query(solrQuery).getResults().getNumFound(); - LOG_PERFORMANCE.info("\n Username :- " + LogSearchContext.getCurrentUsername() + " Count SolrQuery :- " + - solrQuery + "\nQuery Time Execution :- " + queryResponse.getQTime() + " Total Time Elapsed is :- " + - queryResponse.getElapsedTime() + " Count result :- " + count); - return count; - }); - } - - public QueryResponse process(SolrDataQuery solrDataQuery, String event) { - return process(new DefaultQueryParser().doConstructSolrQuery(solrDataQuery), event); - } - - private void logSolrEvent(String event, SolrQuery solrQuery, SolrResponseBase solrResponseBase) { - if (event != null) { - LOG_PERFORMANCE.info("\n Username :- " + LogSearchContext.getCurrentUsername() + " Event :- " + event + " SolrQuery :- " + - solrQuery + "\nQuery Time Execution :- " + solrResponseBase.getQTime() + " Total Time Elapsed is :- " + - solrResponseBase.getElapsedTime()); - } - } - - public CloudSolrClient getSolrClient() { - return (CloudSolrClient) getSolrTemplate().getSolrClient(); - } - - public LogSearchConfigServer getLogSearchConfig() { - return logSearchConfigConfigurer.getConfig(); - } - - public abstract SolrTemplate getSolrTemplate(); - - public abstract void setSolrTemplate(SolrTemplate solrTemplate); - - public abstract SolrCollectionState getSolrCollectionState(); - - public abstract SolrPropsConfig getSolrPropsConfig(); - - public SolrKerberosConfig getSolrKerberosConfig() { - return this.solrKerberosConfig; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java deleted file mode 100644 index 5a365998e50..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.dao; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.common.MessageEnums; -import org.apache.ambari.logsearch.conf.SolrEventHistoryPropsConfig; -import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.solr.client.solrj.SolrRequest; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.HttpClientUtil; -import org.apache.solr.client.solrj.request.schema.FieldTypeDefinition; -import org.apache.solr.client.solrj.request.schema.SchemaRequest; -import org.apache.solr.client.solrj.response.LukeResponse; -import org.apache.solr.client.solrj.response.LukeResponse.FieldInfo; -import org.apache.solr.client.solrj.response.schema.SchemaResponse; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.JavaBinCodec; -import org.apache.solr.common.util.NamedList; -import org.codehaus.jettison.json.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.scheduling.annotation.Scheduled; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import javax.inject.Inject; - -public class SolrSchemaFieldDao { - - private static final Logger LOG = LoggerFactory.getLogger(SolrSchemaFieldDao.class); - - private static final int RETRY_SECOND = 30; - - @Inject - private ServiceLogsSolrDao serviceLogsSolrDao; - - @Inject - private AuditSolrDao auditSolrDao; - - @Inject - private SolrEventHistoryPropsConfig solrEventHistoryPropsConfig; - - private int retryCount; - private int skipCount; - - private Map serviceSchemaFieldNameMap = new HashMap<>(); - private Map serviceSchemaFieldTypeMap = new HashMap<>(); - private Map auditSchemaFieldNameMap = new HashMap<>(); - private Map auditSchemaFieldTypeMap = new HashMap<>(); - - @Scheduled(fixedDelay = RETRY_SECOND * 1000) - public void populateAllSchemaFields() { - if (skipCount > 0) { - skipCount--; - return; - } - if (serviceLogsSolrDao.getSolrCollectionState().isSolrCollectionReady()) { - CloudSolrClient serviceSolrClient = (CloudSolrClient) serviceLogsSolrDao.getSolrTemplate().getSolrClient(); - populateSchemaFields(serviceSolrClient, serviceSchemaFieldNameMap, serviceSchemaFieldTypeMap); - } - if (auditSolrDao.getSolrCollectionState().isSolrCollectionReady()) { - CloudSolrClient auditSolrClient = (CloudSolrClient) auditSolrDao.getSolrTemplate().getSolrClient(); - populateSchemaFields(auditSolrClient, auditSchemaFieldNameMap, auditSchemaFieldTypeMap); - } - } - - private void populateSchemaFields(CloudSolrClient solrClient, Map schemaFieldNameMap, - Map schemaFieldTypeMap) { - if (solrClient != null) { - LOG.debug("Started thread to get fields for collection=" + solrClient.getDefaultCollection()); - List lukeResponses = null; - SchemaResponse schemaResponse = null; - try { - lukeResponses = getLukeResponsesForCores(solrClient); - - SolrRequest schemaRequest = new SchemaRequest(); - schemaRequest.setMethod(SolrRequest.METHOD.GET); - schemaRequest.setPath("/schema"); - schemaResponse = schemaRequest.process(solrClient); - - LOG.debug("populateSchemaFields() collection=" + solrClient.getDefaultCollection() + ", luke=" + lukeResponses + - ", schema= " + schemaResponse); - } catch (SolrException | SolrServerException | IOException e) { - LOG.error("Error occured while popuplating field. collection=" + solrClient.getDefaultCollection(), e); - } - - if (schemaResponse != null) { - extractSchemaFieldsName(lukeResponses, schemaResponse, schemaFieldNameMap, schemaFieldTypeMap); - LOG.debug("Populate fields for collection " + solrClient.getDefaultCollection()+ " was successful, next update it after " + - solrEventHistoryPropsConfig.getPopulateIntervalMins() + " minutes"); - retryCount = 0; - skipCount = (solrEventHistoryPropsConfig.getPopulateIntervalMins() * 60) / RETRY_SECOND - 1; - } - else { - retryCount++; - LOG.error("Error while populating fields for collection " + solrClient.getDefaultCollection() + ", retryCount=" + retryCount); - } - } - } - - private static final String LUKE_REQUEST_URL_SUFFIX = "admin/luke?numTerms=0&wt=javabin&version=2"; - - @SuppressWarnings("unchecked") - private List getLukeResponsesForCores(CloudSolrClient solrClient) { - ZkStateReader zkStateReader = solrClient.getZkStateReader(); - Collection activeSlices = zkStateReader.getClusterState().getCollection(solrClient.getDefaultCollection()).getActiveSlices(); - - List lukeResponses = new ArrayList<>(); - for (Slice slice : activeSlices) { - for (Replica replica : slice.getReplicas()) { - try (CloseableHttpClient httpClient = HttpClientUtil.createClient(null)) { - HttpGet request = new HttpGet(replica.getCoreUrl() + LUKE_REQUEST_URL_SUFFIX); - HttpResponse response = httpClient.execute(request); - @SuppressWarnings("resource") // JavaBinCodec implements Closeable, yet it can't be closed if it is used for unmarshalling only - NamedList lukeData = (NamedList) new JavaBinCodec().unmarshal(response.getEntity().getContent()); - LukeResponse lukeResponse = new LukeResponse(); - lukeResponse.setResponse(lukeData); - lukeResponses.add(lukeResponse); - } catch (IOException e) { - LOG.error("Exception during getting luke responses", e); - } - } - } - return lukeResponses; - } - - private void extractSchemaFieldsName(List lukeResponses, SchemaResponse schemaResponse, - Map schemaFieldNameMap, Map schemaFieldTypeMap) { - try { - HashMap _schemaFieldNameMap = new HashMap<>(); - HashMap _schemaFieldTypeMap = new HashMap<>(); - - for (LukeResponse lukeResponse : lukeResponses) { - for (Entry e : lukeResponse.getFieldInfo().entrySet()) { - String name = e.getKey(); - String type = e.getValue().getType(); - if (!name.contains("@") && !name.startsWith("_") && !name.contains("_md5") && !name.contains("_ms") && - !name.contains(LogSearchConstants.NGRAM_PREFIX) && !name.contains("tags") && !name.contains("_str")) { - _schemaFieldNameMap.put(name, type); - } - } - } - - List fieldTypes = schemaResponse.getSchemaRepresentation().getFieldTypes(); - for (FieldTypeDefinition fieldType : fieldTypes) { - Map fieldAttributes = fieldType.getAttributes(); - String name = (String) fieldAttributes.get("name"); - String fieldTypeJson = new JSONObject(fieldAttributes).toString(); - _schemaFieldTypeMap.put(name, fieldTypeJson); - } - - List> fields = schemaResponse.getSchemaRepresentation().getFields(); - for (Map field : fields) { - String name = (String) field.get("name"); - String type = (String) field.get("type"); - if (!name.contains("@") && !name.startsWith("_") && !name.contains("_md5") && !name.contains("_ms") && - !name.contains(LogSearchConstants.NGRAM_PREFIX) && !name.contains("tags") && !name.contains("_str")) { - _schemaFieldNameMap.put(name, type); - } - } - - if (_schemaFieldNameMap.isEmpty() || _schemaFieldTypeMap.isEmpty()) { - return; - } - - synchronized (this) { - schemaFieldNameMap.clear(); - schemaFieldNameMap.putAll(_schemaFieldNameMap); - schemaFieldTypeMap.clear(); - schemaFieldTypeMap.putAll(_schemaFieldTypeMap); - } - } catch (Exception e) { - LOG.error(e + "Credentials not specified in logsearch.properties " + MessageEnums.ERROR_SYSTEM); - } - } - - public Map getSchemaFieldNameMap(LogType logType) { - return LogType.AUDIT == logType ? auditSchemaFieldNameMap : serviceSchemaFieldNameMap; - } - - public Map getSchemaFieldTypeMap(LogType logType) { - return LogType.AUDIT == logType ? auditSchemaFieldTypeMap : serviceSchemaFieldTypeMap; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java deleted file mode 100644 index 3e915a4c69c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.dao; - -import static java.util.Collections.singletonList; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.util.CommonUtil; -import org.apache.ambari.logsearch.util.FileUtil; -import org.apache.ambari.logsearch.util.JSONUtil; -import org.apache.ambari.logsearch.web.model.Privilege; -import org.apache.ambari.logsearch.web.model.Role; -import org.apache.ambari.logsearch.web.model.User; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.stereotype.Repository; - -@Repository -public class UserDao { - private static final Logger logger = Logger.getLogger(UserDao.class); - - private static final String USER_NAME = "username"; - private static final String PASSWORD = "password"; - private static final String ENC_PASSWORD = "en_password"; - private static final String NAME = "name"; - - @Inject - private AuthPropsConfig authPropsConfig; - - @Inject - private RoleDao roleDao; - - private ArrayList> userList = null; - - @SuppressWarnings("unchecked") - @PostConstruct - public void initialization() { - if (authPropsConfig.isAuthFileEnabled()) { - try { - String userPassJsonFileName = authPropsConfig.getCredentialsFile(); - logger.info("USER PASS JSON file NAME:" + userPassJsonFileName); - File jsonFile = FileUtil.getFileFromClasspath(userPassJsonFileName); - if (jsonFile == null || !jsonFile.exists()) { - logger.fatal("user_pass json file not found in classpath :" + userPassJsonFileName); - System.exit(1); - } - HashMap userInfos = JSONUtil.readJsonFromFile(jsonFile); - userList = (ArrayList>) userInfos.get("users"); - if (userList != null) { - boolean isUpdated = this.encryptAllPassword(); - userInfos.put("users", userList); - if (isUpdated) { - String jsonStr = JSONUtil.toJson(userInfos); - JSONUtil.writeJSONInFile(jsonStr, jsonFile, true); - } - } else { - userList = new ArrayList<>(); - } - - } catch (Exception exception) { - logger.error("Error while reading user prop file :" + exception.getMessage()); - userList = new ArrayList<>(); - } - } else { - logger.info("File auth is disabled."); - } - } - - public User loadUserByUsername(String username) { - logger.debug(" loadUserByUsername username" + username); - HashMap userInfo = findByUsername(username); - if (userInfo == null) { - return null; - } - - User user = new User(); - user.setFirstName(StringUtils.defaultString(userInfo.get(NAME), "Unknown")); - user.setLastName(StringUtils.defaultString(userInfo.get(NAME), "Unknown")); - user.setUsername(StringUtils.defaultString(userInfo.get(USER_NAME), "")); - user.setPassword(StringUtils.defaultString(userInfo.get(ENC_PASSWORD), "")); - user.setAuthorities(roleDao.getRolesForUser(user.getUsername())); - - return user; - } - - private HashMap findByUsername(final String username) { - if (userList == null) { - return null; - } - return userList.stream() - .filter(args -> (username != null && username.equalsIgnoreCase(args.get(USER_NAME)))) - .findFirst() - .orElse(null); - } - - private boolean encryptAllPassword() { - boolean isUpdated = false; - for (HashMap user : userList) { - String encPassword = user.get(ENC_PASSWORD); - String username = user.get(USER_NAME); - String password = user.get(PASSWORD); - if (StringUtils.isNotBlank(password)) { - encPassword = CommonUtil.encryptPassword(username, password); - user.put(PASSWORD, ""); - user.put(ENC_PASSWORD, encPassword); - isUpdated = true; - } - if (StringUtils.isBlank(password) && StringUtils.isBlank(encPassword)) { - logger.error("Password is empty or null for username : " + username); - } - } - return isUpdated; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java deleted file mode 100644 index 1b58fb82046..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.doc; - -public class DocConstants { - - public class CommonDescriptions { - public static final String X_AXIS_D = "The column which can be value for x-axis in graph formation"; - public static final String Y_AXIS_D = "The column which can be value for y-axis in graph formation"; - public static final String STACK_BY_D = "The graph property for stacking the plot"; - public static final String EXCLUDE_QUERY_D = "Exclude the values in query result e.g.: [{message:*timeout*}]"; - public static final String INCLUDE_QUERY_D = "Include the values in query result e.g.: [{message:*exception*}]"; - public static final String MUST_BE_D = "Include the components, comma separated values"; - public static final String MUST_NOT_D = "Exclude the components, comma separated values"; - public static final String FROM_D = "Date range param, start date"; - public static final String TO_D = "Date range param, end date"; - public static final String START_TIME_D = "Date range param which is suportted from browser url"; - public static final String END_TIME_D = "Date range param which is supported from browser url"; - public static final String START_INDEX_D = "Start index of the queried result"; - public static final String SORT_TYPE_D = "Type of sorting (osc, desc)"; - public static final String SORT_BY_D = "Sorting the results based on this field"; - public static final String PAGE_D = "Number of pages for the results"; - public static final String PAGE_SIZE_D = "Page size of the results"; - public static final String UNIT_D = "Aggregate the data with time gap as unit i.e 1MINUTE"; - public static final String QUERY_D = "not required"; - public static final String I_MESSAGE_D = "Include query which will query against message column"; - public static final String E_MESSAGE_D = "Exclude query which will query against message column"; - public static final String IS_LAST_PAGE_D = "Show last page (true/false)"; - public static final String FIELD_D = "Get values for particular field"; - public static final String FORMAT_D = "File Export format, can be 'txt' or 'json'"; - public static final String TOP = "Number that defines how many top element you would like to see."; - public static final String USER_D = "Filter for users (comma separated list)"; - public static final String LOG_ID_D = "Id of the log component"; - public static final String SHIPPER_CONFIG_D = "Input config json for logfeeder shipper"; - public static final String TEST_ENTRY_D = "Log sample for testing"; - - } - - public class AuditOperationDescriptions { - public static final String GET_AUDIT_CLUSTERS_OD = "Get all of the clusters for audit logs"; - public static final String GET_AUDIT_SCHEMA_FIELD_LIST_OD = "Get list of schema fields in audit collection"; - public static final String GET_AUDIT_LOGS_OD = "Get the list of logs details"; - public static final String PURGE_AUDIT_LOGS_OD = "Purge service logs based by criteria"; - public static final String GET_AUDIT_COMPONENTS_OD = "Get the list of audit components currently active or having data in Solr"; - public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "Get the data required for line graph"; - public static final String GET_TOP_AUDIT_RESOURCES_OD = "Get the top audit resource count (grouped by type)"; - public static final String EXPORT_USER_TALBE_TO_TEXT_FILE_OD = "Export the tables shown on Audit tab"; - public static final String GET_SERVICE_LOAD_OD = "The graph for showing the top users accessing the services"; - } - - public class ServiceDescriptions { - public static final String LEVEL_D = "filter for log level"; - public static final String BUNDLE_ID = "filter for host"; - public static final String CLUSTER_D = "filter for clusters (comma separated list)"; - public static final String FILE_NAME_D = "File name filter which is supported from browser url"; - public static final String HOST_NAME_D = "Host name filter which is supported from browser url"; - public static final String COMPONENT_NAME_D = "Component name filter which is supported from browser url"; - public static final String FIND_D = "Finding particular text on subsequent pages in case of table view with pagination"; - public static final String ID_D = "Log id value for traversing to that particular record with that log id"; - public static final String KEYWORD_TYPE_D = "Serching the find param value in previous or next in paginated table"; - public static final String TOKEN_D = "unique number used along with FIND_D. The request can be canceled using this token"; - public static final String SOURCE_LOG_ID_D = "fetch the record set having that log Id"; - public static final String NUMBER_ROWS_D = "Getting rows after particular log entry - used in 'Preview' option"; - public static final String SCROLL_TYPE_D = "Used in 'Preview' feature for getting records 'after' or 'before'"; - public static final String UTC_OFFSET_D = "timezone offset"; - public static final String HOST_PARAMS_D = "filter for hosts"; - } - - public class ServiceOperationDescriptions { - public static final String GET_SERVICE_CLUSTERS_OD = "Get all of the clusters for service logs"; - public static final String SEARCH_LOGS_OD = "Searching logs entry"; - public static final String PURGE_LOGS_OD = "Purge service logs based by criteria"; - public static final String GET_HOSTS_OD = "Get the list of service hosts currently active or having data in Solr"; - public static final String GET_COMPONENTS_OD = "Get the list of service components currently active or having data in Solr"; - public static final String GET_AGGREGATED_INFO_OD = "not required"; - public static final String GET_LOG_LEVELS_COUNT_OD = "Get Log levels with their counts"; - public static final String GET_COMPONENTS_COUNT_OD = "Get components with their counts"; - public static final String GET_HOSTS_COUNT_OD = "Get hosts with their counts"; - public static final String GET_TREE_EXTENSION_OD = "Get host and compoenets hierarchy with log counts"; - public static final String GET_HISTOGRAM_DATA_OD = "Get data for histogram"; - public static final String EXPORT_TO_TEXT_FILE_OD = "Export the table data in file"; - public static final String GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD = "Get components with log level distribution count"; - public static final String GET_ANY_GRAPH_COUNT_DATA_OD = "Get the data generic enough to use for graph plots (yAzis is always count)"; - public static final String GET_HOST_LIST_BY_COMPONENT_OD = "Get host list of components"; - public static final String GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD = "Get service logs schema fields"; - public static final String GET_AFTER_BEFORE_LOGS_OD = "Preview feature data"; - public static final String REQUEST_CANCEL = "Cancel an ongoing solr request"; - public static final String GET_HOST_LOGFILES_OD = "Get the log files of the components of a host"; - } - - public class PublicOperationDescriptions { - public static final String GET_FEATURES_LIST = "Get features list."; - public static final String GET_APP_DETAILS_OD = "Get application details."; - public static final String GET_AUTH_DETAILS_OD = "Get authentication details."; - public static final String GET_ALL_PROPERTIES_INFO_OD = "List all available properties for Log Search and Log Feeder"; - public static final String GET_LOGSEARCH_PROPERTIES_INFO_OD = "List all available properties for Log Search property file (e.g: logsearch.properties/logfeeder.properties)"; - public static final String GET_ALL_SHIPPER_CONFIG_INFO_OD = "List all available shipper configuration element"; - } - - public class EventHistoryDescriptions { - public static final String FILTER_NAME_D = "The saved query as filter in Solr, search is sopprted by this param"; - public static final String ROW_TYPE_D = "Row type is solr to identify as filter query"; - } - - public class EventHistoryOperationDescriptions { - public static final String SAVE_EVENT_HISTORY_DATA_OD = "Save event history data"; - public static final String DELETE_EVENT_HISTORY_DATA_OD = "Delete event history data"; - public static final String GET_EVENT_HISTORY_DATA_OD = "Get event history data"; - public static final String GET_ALL_USER_NAMES_OD = "Get all user names"; - } - - public class ShipperConfigOperationDescriptions { - public static final String GET_SERVICE_NAMES_OD = "Get service names"; - public static final String GET_SHIPPER_CONFIG_OD = "Get shipper config"; - public static final String SET_SHIPPER_CONFIG_OD = "Set shipper config"; - public static final String TEST_SHIPPER_CONFIG_OD = "Test shipper config"; - public static final String GET_LOG_LEVEL_FILTER_OD = "Get log level filter"; - public static final String UPDATE_LOG_LEVEL_FILTER_OD = "Update log level filter"; - } - - public class StatusOperationDescriptions { - public static final String STATUS_OD = "Get statuses for collections (not health state - show true if something already done)"; - public static final String SERVICE_LOGS_STATUS_OD = "Get statuses for service log collection (not health state - show true if something already done)"; - public static final String AUDIT_LOGS_STATUS_OD = "Get statuses for collections (not health state - show true if something already done)"; - public static final String EVENT_HISTORY_STATUS_OD = "Get statuses for history collection (not health state - show true if something already done)"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java deleted file mode 100644 index fde176f4cc1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.commons.collections.CollectionUtils; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.SolrZooKeeper; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.data.ACL; -import org.apache.zookeeper.data.Stat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -public class ACLHandler implements SolrZkRequestHandler { - - private static final Logger LOG = LoggerFactory.getLogger(ACLHandler.class); - - @Override - public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception { - List aclsToSetList = solrPropsConfig.getZkAcls(); - if (CollectionUtils.isNotEmpty(aclsToSetList)) { - LOG.info("Setting acls for '{}' collection...", solrPropsConfig.getCollection()); - SolrZkClient zkClient = solrClient.getZkStateReader().getZkClient(); - SolrZooKeeper solrZooKeeper = zkClient.getSolrZooKeeper(); - String collectionPath = String.format("/collections/%s", solrPropsConfig.getCollection()); - String configsPath = String.format("/configs/%s", solrPropsConfig.getConfigName()); - List collectionAcls = solrZooKeeper.getACL(collectionPath, new Stat()); - if (isRefreshAclsNeeded(aclsToSetList, collectionAcls)) { - LOG.info("Acls differs for {}, update acls.", collectionPath); - setRecursivelyOn(solrZooKeeper, collectionPath, aclsToSetList); - } - List configsAcls = solrZooKeeper.getACL(configsPath, new Stat()); - if (isRefreshAclsNeeded(aclsToSetList, configsAcls)) { - LOG.info("Acls differs for {}, update acls.", configsPath); - setRecursivelyOn(solrZooKeeper, configsPath, aclsToSetList); - } - } - return true; - } - - private boolean isRefreshAclsNeeded(List acls, List newAcls) { - boolean result = false; - if (acls != null) { - if (acls.size() != newAcls.size()) { - return true; - } - result = aclDiffers(acls, newAcls); - if (!result) { - result = aclDiffers(newAcls, acls); - } - } - return result; - } - - private boolean aclDiffers(List aclList1, List aclList2) { - for (ACL acl : aclList1) { - for (ACL newAcl : aclList2) { - if (acl.getId() != null && acl.getId().getId().equals(newAcl.getId().getId()) - && acl.getPerms() != newAcl.getPerms()) { - LOG.info("ACL for '{}' differs: '{}' on znode, should be '{}'", - acl.getId().getId(), acl.getPerms(), newAcl.getPerms()); - return true; - } - } - } - return false; - } - - private void setRecursivelyOn(SolrZooKeeper solrZooKeeper, String node, List acls) - throws KeeperException, InterruptedException { - solrZooKeeper.setACL(node, acls, -1); - for (String child : solrZooKeeper.getChildren(node, null)) { - String path = node.endsWith("/") ? node + child : node + "/" + child; - setRecursivelyOn(solrZooKeeper, path, acls); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/AbstractSolrConfigHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/AbstractSolrConfigHandler.java deleted file mode 100644 index f58b29d8f5a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/AbstractSolrConfigHandler.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import static org.apache.solr.common.cloud.ZkConfigManager.CONFIGS_ZKNODE; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkConfigManager; -import org.apache.zookeeper.KeeperException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class AbstractSolrConfigHandler implements SolrZkRequestHandler { - - private static final Logger LOG = LoggerFactory.getLogger(AbstractSolrConfigHandler.class); - - private File configSetFolder; - - public AbstractSolrConfigHandler(File configSetFolder) { - this.configSetFolder = configSetFolder; - } - - @Override - public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception { - boolean reloadCollectionNeeded = false; - String separator = FileSystems.getDefault().getSeparator(); - solrClient.connect(); - SolrZkClient zkClient = solrClient.getZkStateReader().getZkClient(); - try { - ZkConfigManager zkConfigManager = new ZkConfigManager(zkClient); - boolean configExists = zkConfigManager.configExists(solrPropsConfig.getConfigName()); - if (configExists) { - uploadMissingConfigFiles(zkClient, zkConfigManager, solrPropsConfig.getConfigName()); - reloadCollectionNeeded = doIfConfigExists(solrPropsConfig, zkClient, separator); - } else { - doIfConfigNotExist(solrPropsConfig, zkConfigManager); - uploadMissingConfigFiles(zkClient, zkConfigManager, solrPropsConfig.getConfigName()); - } - } catch (Exception e) { - throw new RuntimeException(String.format("Cannot upload configurations to zk. (collection: %s, config set folder: %s)", - solrPropsConfig.getCollection(), solrPropsConfig.getConfigSetFolder()), e); - } - return reloadCollectionNeeded; - } - - /** - * Update config file (like solrconfig.xml) to zookeeper znode of solr - */ - public abstract boolean updateConfigIfNeeded(SolrPropsConfig solrPropsConfig, SolrZkClient zkClient, File file, - String separator, byte[] content) throws IOException; - - /** - * Config file name which should be uploaded to zookeeper - */ - public abstract String getConfigFileName(); - - @SuppressWarnings("unused") - public void doIfConfigNotExist(SolrPropsConfig solrPropsConfig, ZkConfigManager zkConfigManager) throws IOException { - // Do nothing - } - - @SuppressWarnings("unused") - public void uploadMissingConfigFiles(SolrZkClient zkClient, ZkConfigManager zkConfigManager, String configName) throws IOException { - // do Nothing - } - - public boolean doIfConfigExists(SolrPropsConfig solrPropsConfig, SolrZkClient zkClient, String separator) throws IOException { - LOG.info("Config set exists for '{}' collection. Refreshing it if needed...", solrPropsConfig.getCollection()); - try { - File[] listOfFiles = getConfigSetFolder().listFiles(); - if (listOfFiles == null) - return false; - byte[] data = zkClient.getData(String.format("%s/%s/%s", CONFIGS_ZKNODE, solrPropsConfig.getConfigName(), getConfigFileName()), null, null, true); - - for (File file : listOfFiles) { - if (file.getName().equals(getConfigFileName()) && updateConfigIfNeeded(solrPropsConfig, zkClient, file, separator, data)) { - return true; - } - } - return false; - } catch (KeeperException | InterruptedException e) { - throw new IOException("Error downloading files from zookeeper path " + solrPropsConfig.getConfigName(), - SolrZkClient.checkInterrupted(e)); - } - } - - protected File getConfigSetFolder() { - return configSetFolder; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java deleted file mode 100644 index a13c27fcf3b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.commons.lang.StringUtils; -import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.impl.HttpClientUtil; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.ZkStateReader; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.ws.rs.core.Response; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.ROUTER_FIELD; - -public class CreateCollectionHandler implements SolrZkRequestHandler { - - private static final Logger LOG = LoggerFactory.getLogger(CreateCollectionHandler.class); - - private static final String MODIFY_COLLECTION_QUERY = "/admin/collections?action=MODIFYCOLLECTION&collection=%s&%s=%d"; - private static final String MAX_SHARDS_PER_NODE = "maxShardsPerNode"; - - private final List allCollectionList; - - public CreateCollectionHandler(List allCollectionList) { - this.allCollectionList = allCollectionList; - } - - @Override - public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception { - boolean result; - if (solrPropsConfig.isSolrImplicitRouting()) { - result = setupCollectionsWithImplicitRouting(solrClient, solrPropsConfig, this.allCollectionList); - } else { - result = createCollection(solrClient, solrPropsConfig, this.allCollectionList); - } - - return result; - } - - private boolean setupCollectionsWithImplicitRouting(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig, List allCollectionList) - throws Exception { - LOG.info("setupCollectionsWithImplicitRouting(). collectionName=" + solrPropsConfig.getCollection() - + ", numberOfShards=" + solrPropsConfig.getNumberOfShards()); - - // Default is true, because if the collection and shard is already there, then it will return true - boolean returnValue = true; - - List shardsList = new ArrayList<>(); - for (int i = 0; i < solrPropsConfig.getNumberOfShards(); i++) { - shardsList.add("shard" + i); - } - String shardsListStr = StringUtils.join(shardsList, ','); - - // Check if collection is already in zookeeper - if (!allCollectionList.contains(solrPropsConfig.getCollection())) { - LOG.info("Creating collection " + solrPropsConfig.getCollection() + ", shardsList=" + shardsList); - CollectionAdminRequest.Create collectionCreateRequest = CollectionAdminRequest.createCollection( - solrPropsConfig.getCollection(), solrPropsConfig.getConfigName(), solrPropsConfig.getNumberOfShards(), - solrPropsConfig.getReplicationFactor()); - collectionCreateRequest.setRouterName("implicit"); - collectionCreateRequest.setShards(shardsListStr); - collectionCreateRequest.setRouterField(ROUTER_FIELD); - collectionCreateRequest.setMaxShardsPerNode(solrPropsConfig.getReplicationFactor() * solrPropsConfig.getNumberOfShards()); - - CollectionAdminResponse createResponse = collectionCreateRequest.process(solrClient); - if (createResponse.getStatus() != 0) { - returnValue = false; - LOG.error("Error creating collection. collectionName=" + solrPropsConfig.getCollection() - + ", shardsList=" + shardsList +", response=" + createResponse); - } else { - LOG.info("Created collection " + solrPropsConfig.getCollection() + ", shardsList=" + shardsList); - } - } else { - LOG.info("Collection " + solrPropsConfig.getCollection() + " is already there. Will check whether it has the required shards"); - Collection slices = getSlices(solrClient, solrPropsConfig); - Collection existingShards = getShards(slices, solrPropsConfig); - if (existingShards.size() < shardsList.size()) { - try { - updateMaximumNumberOfShardsPerCore(slices, solrPropsConfig); - } catch (Throwable t) { - returnValue = false; - LOG.error(String.format("Exception during updating collection (%s)", t)); - } - } - for (String shard : shardsList) { - if (!existingShards.contains(shard)) { - try { - LOG.info("Going to add Shard " + shard + " to collection " + solrPropsConfig.getCollection()); - CollectionAdminRequest.CreateShard createShardRequest = - CollectionAdminRequest.createShard(solrPropsConfig.getCollection(), shard); - CollectionAdminResponse response = createShardRequest.process(solrClient); - if (response.getStatus() != 0) { - LOG.error("Error creating shard " + shard + " in collection " + solrPropsConfig.getCollection() + ", response=" + response); - returnValue = false; - break; - } else { - LOG.info("Successfully created shard " + shard + " in collection " + solrPropsConfig.getCollection()); - } - } catch (Throwable t) { - LOG.error("Error creating shard " + shard + " in collection " + solrPropsConfig.getCollection(), t); - returnValue = false; - break; - } - } - } - } - return returnValue; - } - - private boolean createCollection(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig, List allCollectionList) - throws SolrServerException, IOException { - - if (allCollectionList.contains(solrPropsConfig.getCollection())) { - LOG.info("Collection " + solrPropsConfig.getCollection() + " is already there. Won't create it"); - return true; - } - - LOG.info("Creating collection " + solrPropsConfig.getCollection() + ", numberOfShards=" + solrPropsConfig.getNumberOfShards() + - ", replicationFactor=" + solrPropsConfig.getReplicationFactor()); - - CollectionAdminRequest.Create collectionCreateRequest = CollectionAdminRequest.createCollection( - solrPropsConfig.getCollection(), solrPropsConfig.getConfigName(), solrPropsConfig.getNumberOfShards(), - solrPropsConfig.getReplicationFactor()); - collectionCreateRequest.setMaxShardsPerNode(calculateMaxShardsPerNode(solrPropsConfig)); - CollectionAdminResponse createResponse = collectionCreateRequest.process(solrClient); - if (createResponse.getStatus() != 0) { - LOG.error("Error creating collection. collectionName=" + solrPropsConfig.getCollection() + ", response=" + createResponse); - return false; - } else { - LOG.info("Created collection " + solrPropsConfig.getCollection() + ", numberOfShards=" + solrPropsConfig.getNumberOfShards() + - ", replicationFactor=" + solrPropsConfig.getReplicationFactor()); - return true; - } - } - - private void updateMaximumNumberOfShardsPerCore(Collection slices, SolrPropsConfig solrPropsConfig) throws IOException { - String baseUrl = getRandomBaseUrl(slices); - if (baseUrl != null) { - CloseableHttpClient httpClient = HttpClientUtil.createClient(null); - HttpGet request = new HttpGet(baseUrl + String.format(MODIFY_COLLECTION_QUERY, - solrPropsConfig.getCollection(), MAX_SHARDS_PER_NODE, calculateMaxShardsPerNode(solrPropsConfig))); - HttpResponse response = httpClient.execute(request); - if (response.getStatusLine().getStatusCode() != Response.Status.OK.getStatusCode()) { - throw new IllegalStateException(String.format("Cannot update collection (%s) - increase max number of nodes per core", solrPropsConfig.getCollection())); - } - } else { - throw new IllegalStateException(String.format("Cannot get any core url for updating collection (%s)", solrPropsConfig.getCollection())); - } - } - - private Collection getSlices(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) { - ZkStateReader reader = solrClient.getZkStateReader(); - DocCollection collection = reader.getClusterState().getCollection(solrPropsConfig.getCollection()); - return collection.getSlices(); - } - - private Collection getShards(Collection slices, SolrPropsConfig solrPropsConfig) { - Collection list = new HashSet<>(); - for (Slice slice : slices) { - for (Replica replica : slice.getReplicas()) { - LOG.info("colName=" + solrPropsConfig.getCollection() + ", slice.name=" + slice.getName() + ", slice.state=" + slice.getState() + - ", replica.core=" + replica.getStr("core") + ", replica.state=" + replica.getStr("state")); - list.add(slice.getName()); - } - } - return list; - } - - private String getRandomBaseUrl(Collection slices) { - String coreUrl = null; - if (slices != null) { - for (Slice slice : slices) { - if (!slice.getReplicas().isEmpty()) { - Replica replica = slice.getReplicas().iterator().next(); - coreUrl = replica.getStr("base_url"); - if (coreUrl != null) { - break; - } - } - } - } - return coreUrl; - } - - private Integer calculateMaxShardsPerNode(SolrPropsConfig solrPropsConfig) { - return solrPropsConfig.getReplicationFactor() * solrPropsConfig.getNumberOfShards(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java deleted file mode 100644 index b2c8e4f5607..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; -import org.apache.solr.common.SolrException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; - -public class ListCollectionHandler implements SolrZkRequestHandler> { - - private static final Logger LOG = LoggerFactory.getLogger(ListCollectionHandler.class); - - @SuppressWarnings("unchecked") - @Override - public List handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception { - try { - CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List(); - CollectionAdminResponse response = colListReq.process(solrClient); - if (response.getStatus() != 0) { - LOG.error("Error getting collection list from solr. response=" + response); - return null; - } - return (List) response.getResponse().get("collections"); - } catch (SolrException e) { - LOG.error("getCollections() operation failed", e); - return new ArrayList<>(); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java deleted file mode 100644 index 601bdbacece..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ReloadCollectionHandler implements SolrZkRequestHandler { - - private static final Logger LOG = LoggerFactory.getLogger(ReloadCollectionHandler.class); - - @Override - public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception { - boolean result = false; - try { - LOG.info("Reload collection - '{}'", solrPropsConfig.getCollection()); - CollectionAdminRequest.Reload request = CollectionAdminRequest.reloadCollection(solrPropsConfig.getCollection()); - request.process(solrClient); - result = true; - } catch (Exception e) { - LOG.error(String.format("Reload collection ('%s') failed.", solrPropsConfig.getCollection()), e); - } - return result; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java deleted file mode 100644 index 85ae6cb0b8d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.solr.client.solrj.impl.CloudSolrClient; - -interface SolrZkRequestHandler { - T handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws Exception; -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java deleted file mode 100644 index 2a7590cc0d9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.handler; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.FileSystems; -import java.util.Arrays; - -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkConfigManager; -import org.apache.zookeeper.CreateMode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class UploadConfigurationHandler extends AbstractSolrConfigHandler { - - private static final Logger LOG = LoggerFactory.getLogger(UploadConfigurationHandler.class); - - private static final String SOLR_CONFIG_FILE = "solrconfig.xml"; - private static final String[] configFiles = { - "admin-extra.html", "admin-extra.menu-bottom.html", "admin-extra.menu-top.html", - "elevate.xml", "enumsConfig.xml", "managed-schema", "solrconfig.xml" - }; - private boolean hasEnumConfig; - - public UploadConfigurationHandler(File configSetFolder, boolean hasEnumConfig) { - super(configSetFolder); - this.hasEnumConfig = hasEnumConfig; - } - - @Override - public boolean updateConfigIfNeeded(SolrPropsConfig solrPropsConfig, SolrZkClient zkClient, File file, - String separator, byte[] content) throws IOException { - if (Arrays.equals(FileUtils.readFileToByteArray(file), content)) - return false; - - LOG.info("Solr config file differs ('{}'), upload config set to zookeeper", file.getName()); - ZkConfigManager zkConfigManager = new ZkConfigManager(zkClient); - zkConfigManager.uploadConfigDir(getConfigSetFolder().toPath(), solrPropsConfig.getConfigName()); - String filePath = String.format("%s%s%s", getConfigSetFolder(), separator, getConfigFileName()); - String configsPath = String.format("/%s/%s/%s", "configs", solrPropsConfig.getConfigName(), getConfigFileName()); - uploadFileToZk(zkClient, filePath, configsPath); - return true; - } - - @Override - public void doIfConfigNotExist(SolrPropsConfig solrPropsConfig, ZkConfigManager zkConfigManager) throws IOException { - LOG.info("Config set does not exist for '{}' collection. Uploading it to zookeeper...", solrPropsConfig.getCollection()); - File[] listOfFiles = getConfigSetFolder().listFiles(); - if (listOfFiles != null) { - zkConfigManager.uploadConfigDir(getConfigSetFolder().toPath(), solrPropsConfig.getConfigName()); - } - } - - @Override - public String getConfigFileName() { - return SOLR_CONFIG_FILE; - } - - @Override - public void uploadMissingConfigFiles(SolrZkClient zkClient, ZkConfigManager zkConfigManager, String configName) throws IOException { - LOG.info("Check any of the configs files are missing for config ({})", configName); - for (String configFile : configFiles) { - if ("enumsConfig.xml".equals(configFile) && !hasEnumConfig) { - LOG.info("Config file ({}) is not needed for {}", configFile, configName); - continue; - } - String zkPath = String.format("%s/%s", configName, configFile); - if (zkConfigManager.configExists(zkPath)) { - LOG.info("Config file ({}) has already uploaded properly.", configFile); - } else { - LOG.info("Config file ({}) is missing. Reupload...", configFile); - FileSystems.getDefault().getSeparator(); - uploadFileToZk(zkClient, - String.format("%s%s%s", getConfigSetFolder(), FileSystems.getDefault().getSeparator(), configFile), - String.format("%s%s", "/configs/", zkPath)); - } - } - } - - private void uploadFileToZk(SolrZkClient zkClient, String filePath, String configsPath) throws FileNotFoundException { - InputStream is = new FileInputStream(filePath); - try { - if (zkClient.exists(configsPath, true)) { - zkClient.setData(configsPath, IOUtils.toByteArray(is), true); - } else { - zkClient.create(configsPath, IOUtils.toByteArray(is), CreateMode.PERSISTENT, true); - } - } catch (Exception e) { - throw new IllegalStateException(e); - } finally { - IOUtils.closeQuietly(is); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/AbstractSolrHealthIndicator.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/AbstractSolrHealthIndicator.java deleted file mode 100644 index 59936e9f807..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/AbstractSolrHealthIndicator.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.health; - -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.springframework.boot.actuate.health.AbstractHealthIndicator; -import org.springframework.boot.actuate.health.Health; -import org.springframework.boot.actuate.health.Status; -import org.springframework.data.solr.core.SolrTemplate; - -public abstract class AbstractSolrHealthIndicator extends AbstractHealthIndicator { - - @Override - protected void doHealthCheck(Health.Builder builder) throws Exception { - Status status = Status.DOWN; - String errorDetails = null; - if (getSolrTemplate() != null && getSolrTemplate().getSolrClient() != null) { - try { - SolrClient solrClient = getSolrTemplate().getSolrClient(); - SolrQuery q = new SolrQuery("*:*"); - q.setRows(0); - QueryResponse response = solrClient.query(q); - if (response.getStatus() == 0) { - status = Status.UP; - if (response.getResults() != null) { - builder.withDetail("numDocs", response.getResults().getNumFound()); - } - } - } catch (Exception e) { - errorDetails = e.getMessage(); - } - } - builder.status(status); - if (errorDetails != null) { - builder.withDetail("error", errorDetails); - } - } - - public abstract SolrTemplate getSolrTemplate(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrAuditLogsHealthIndicator.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrAuditLogsHealthIndicator.java deleted file mode 100644 index 901dfc00b09..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrAuditLogsHealthIndicator.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.health; - -import org.apache.ambari.logsearch.dao.AuditSolrDao; -import org.springframework.data.solr.core.SolrTemplate; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class SolrAuditLogsHealthIndicator extends AbstractSolrHealthIndicator { - - @Inject - private AuditSolrDao auditSolrDao; - - @Override - public SolrTemplate getSolrTemplate() { - return auditSolrDao.getSolrTemplate(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrEventHistoryHealthIndicator.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrEventHistoryHealthIndicator.java deleted file mode 100644 index bbb1f47f5e1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrEventHistoryHealthIndicator.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.health; - -import org.apache.ambari.logsearch.dao.EventHistorySolrDao; -import org.springframework.data.solr.core.SolrTemplate; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class SolrEventHistoryHealthIndicator extends AbstractSolrHealthIndicator { - - @Inject - private EventHistorySolrDao eventHistorySolrDao; - - @Override - public SolrTemplate getSolrTemplate() { - return eventHistorySolrDao.getSolrTemplate(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrServiceLogsHealthIndicator.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrServiceLogsHealthIndicator.java deleted file mode 100644 index ff95bee9d9c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/health/SolrServiceLogsHealthIndicator.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.health; - -import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao; -import org.springframework.data.solr.core.SolrTemplate; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class SolrServiceLogsHealthIndicator extends AbstractSolrHealthIndicator { - - @Inject - private ServiceLogsSolrDao serviceLogsSolrDao; - - @Override - public SolrTemplate getSolrTemplate() { - return serviceLogsSolrDao.getSolrTemplate(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AlreadyExistsException.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AlreadyExistsException.java deleted file mode 100644 index bbafbecf023..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AlreadyExistsException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -public class AlreadyExistsException extends RuntimeException { - public AlreadyExistsException(String message) { - super(message); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java deleted file mode 100644 index 97bda3a825e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.manager; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.StringWriter; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.ambari.logsearch.common.LabelFallbackHandler; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.UIMappingConfig; -import org.apache.ambari.logsearch.dao.AuditSolrDao; -import org.apache.ambari.logsearch.dao.SolrSchemaFieldDao; -import org.apache.ambari.logsearch.model.metadata.AuditFieldMetadataResponse; -import org.apache.ambari.logsearch.model.metadata.FieldMetadata; -import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest; -import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest; -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; -import org.apache.ambari.logsearch.model.request.impl.TopFieldAuditLogRequest; -import org.apache.ambari.logsearch.model.request.impl.UserExportRequest; -import org.apache.ambari.logsearch.model.response.AuditLogData; -import org.apache.ambari.logsearch.model.response.AuditLogResponse; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.model.response.LogData; -import org.apache.ambari.logsearch.solr.ResponseDataGenerator; -import org.apache.ambari.logsearch.solr.SolrConstants; -import org.apache.ambari.logsearch.solr.model.SolrAuditLogData; -import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData; -import org.apache.ambari.logsearch.util.DownloadUtil; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.response.FacetField.Count; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.UpdateResponse; -import org.springframework.core.convert.ConversionService; -import org.springframework.data.solr.core.query.SimpleFacetQuery; -import org.springframework.data.solr.core.query.SimpleQuery; - -import freemarker.template.Configuration; -import freemarker.template.Template; -import freemarker.template.TemplateException; - -@Named -public class AuditLogsManager extends ManagerBase { - private static final Logger logger = Logger.getLogger(AuditLogsManager.class); - - private static final String AUDIT_LOG_TEMPLATE = "audit_log_txt.ftl"; - - @Inject - private AuditSolrDao auditSolrDao; - @Inject - private ResponseDataGenerator responseDataGenerator; - @Inject - private ConversionService conversionService; - @Inject - private Configuration freemarkerConfiguration; - @Inject - private SolrSchemaFieldDao solrSchemaFieldDao; - @Inject - private UIMappingConfig uiMappingConfig; - @Inject - private LabelFallbackHandler labelFallbackHandler; - - public AuditLogResponse getLogs(AuditLogRequest request) { - String event = "/audit/logs"; - SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class); - if (request.isLastPage()) { - return getLastPage(auditSolrDao, solrQuery, event); - } else { - AuditLogResponse response = getLogAsPaginationProvided(solrQuery, auditSolrDao, event); - if (response.getTotalCount() > 0 && CollectionUtils.isEmpty(response.getLogList())) { - request.setLastPage(true); - solrQuery = conversionService.convert(request, SimpleQuery.class); - AuditLogResponse lastResponse = getLastPage(auditSolrDao, solrQuery, event); - if (lastResponse != null){ - response = lastResponse; - } - } - return response; - } - } - - private List getComponents(AuditComponentRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - List docList = new ArrayList<>(); - QueryResponse queryResponse = auditSolrDao.process(facetQuery); - List componentsCount = responseDataGenerator.generateCount(queryResponse); - - for (Count component : componentsCount) { - SolrComponentTypeLogData logData = new SolrComponentTypeLogData(); - logData.setType(component.getName()); - docList.add(logData); - } - return docList; - } - - public Map getAuditComponents(String clusters) { - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - solrQuery.setRows(0); - SolrUtil.setFacetField(solrQuery, AUDIT_COMPONENT); - QueryResponse queryResponse = auditSolrDao.process(solrQuery); - return responseDataGenerator.generateComponentMetadata(queryResponse, AUDIT_COMPONENT, - uiMappingConfig.getAuditComponentLabels()); - } - - public BarGraphDataListResponse getAuditBarGraphData(AuditBarGraphRequest request) { - SolrQuery solrQuery = conversionService.convert(request, SolrQuery.class); - QueryResponse response = auditSolrDao.process(solrQuery); - return responseDataGenerator.generateBarGraphDataResponseWithRanges(response, SolrConstants.AuditLogConstants.AUDIT_COMPONENT, true); - } - - public BarGraphDataListResponse topResources(TopFieldAuditLogRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - QueryResponse queryResponse = auditSolrDao.process(facetQuery); - return responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 0); - } - - public AuditFieldMetadataResponse getAuditLogSchemaMetadata() { - Map> overrides = new HashMap<>(); - List defaults = new ArrayList<>(); - - Map schemaFieldsMap = solrSchemaFieldDao.getSchemaFieldNameMap(LogType.AUDIT); - - Map> fieldLabelMap = uiMappingConfig.getMergedAuditFieldLabelMap(); - Map> fieldVisibleeMap = uiMappingConfig.getMergedAuditFieldVisibleMap(); - Map> fieldExcludeMap = uiMappingConfig.getMergedAuditFieldExcludeMap(); - Map> fieldFilterableExcludeMap = uiMappingConfig.getMergedAuditFieldFilterableExcludesMap(); - - Map commonFieldLabels = uiMappingConfig.getAuditFieldCommonLabels(); - List commonFieldVisibleList = uiMappingConfig.getAuditFieldCommonVisibleList(); - List commonFieldExcludeList = uiMappingConfig.getAuditFieldCommonExcludeList(); - List commonFieldFilterableExcludeList = uiMappingConfig.getAuditFieldCommonExcludeList(); - - Map componentLabels = uiMappingConfig.getAuditComponentLabels(); - - for (Map.Entry component : componentLabels.entrySet()) { - String componentName = component.getKey(); - List auditComponentFieldMetadataList = new ArrayList<>(); - for (Map.Entry fieldEntry : schemaFieldsMap.entrySet()) { - String field = fieldEntry.getKey(); - if (!fieldExcludeMap.containsKey(field) && !commonFieldExcludeList.contains(field)) { - String fieldLabel = fieldLabelMap.get(componentName) != null ? fieldLabelMap.get(componentName).get(field): null; - String fallbackedFieldLabel = labelFallbackHandler.fallbackIfRequired(field, fieldLabel, - true, true, true, - uiMappingConfig.getAuditFieldFallbackPrefixes(), - uiMappingConfig.getAuditFieldFallbackSuffixes()); - - Boolean excludeFromFilter = fieldFilterableExcludeMap.get(componentName) != null && fieldFilterableExcludeMap.get(componentName).contains(field); - Boolean visible = fieldVisibleeMap.get(componentName) != null && fieldVisibleeMap.get(componentName).contains(field); - auditComponentFieldMetadataList.add(new FieldMetadata(field, fallbackedFieldLabel, !excludeFromFilter, visible)); - } - overrides.put(componentName, auditComponentFieldMetadataList); - } - } - - for (Map.Entry fieldEntry : schemaFieldsMap.entrySet()) { - String field = fieldEntry.getKey(); - if (!commonFieldExcludeList.contains(field)) { - String fieldLabel = commonFieldLabels.get(field); - Boolean visible = commonFieldVisibleList.contains(field); - Boolean excludeFromFilter = commonFieldFilterableExcludeList.contains(field); - String fallbackedFieldLabel = labelFallbackHandler.fallbackIfRequired(field, fieldLabel, - true, true, true, - uiMappingConfig.getAuditFieldFallbackPrefixes(), uiMappingConfig.getAuditFieldFallbackSuffixes()); - defaults.add(new FieldMetadata(field, fallbackedFieldLabel, !excludeFromFilter, visible)); - } - } - return new AuditFieldMetadataResponse(defaults, overrides); - } - - public BarGraphDataListResponse getServiceLoad(AuditServiceLoadRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - QueryResponse response = auditSolrDao.process(facetQuery); - return responseDataGenerator.generateBarGraphFromFieldFacet(response, AUDIT_COMPONENT); - } - - public Response export(UserExportRequest request) throws TemplateException { - String startTime = request.getFrom(); - String endTime = request.getTo(); - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - - startTime = startTime == null ? "" : startTime; - endTime = endTime == null ? "" : "_" + endTime; - - String dataFormat = request.getFormat(); - - try { - QueryResponse queryResponse = auditSolrDao.process(facetQuery); - BarGraphDataListResponse vBarUserDataList = responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 0); - BarGraphDataListResponse vBarResourceDataList = responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 1); - String data; - if ("text".equals(dataFormat)) { - StringWriter stringWriter = new StringWriter(); - Template template = freemarkerConfiguration.getTemplate(AUDIT_LOG_TEMPLATE); - Map models = new HashMap<>(); - DownloadUtil.fillUserResourcesModel(models, vBarUserDataList, vBarResourceDataList); - template.process(models, stringWriter); - data = stringWriter.toString(); - - } else { - data = "{" + convertObjToString(vBarUserDataList) + "," + convertObjToString(vBarResourceDataList) + "}"; - dataFormat = "json"; - } - String fileName = String.format("Users_Resource%s%s.", startTime, endTime); - File file = File.createTempFile(fileName, dataFormat); - try (FileOutputStream fileOutputStream = new FileOutputStream(file)) { - fileOutputStream.write(data.getBytes()); - } - return Response - .ok(file, MediaType.APPLICATION_OCTET_STREAM) - .header("Content-Disposition", String.format("attachment;filename=%s%s", fileName, dataFormat)) - .build(); - } catch (IOException e) { - throw new UncheckedIOException("Error during download file (audit log) ", e); - } - } - - @Override - protected List convertToSolrBeans(QueryResponse response) { - return new ArrayList<>(response.getBeans(SolrAuditLogData.class)); - } - - @Override - protected AuditLogResponse createLogSearchResponse() { - return new AuditLogResponse(); - } - - public StatusMessage deleteLogs(AuditLogRequest request) { - SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class); - UpdateResponse updateResponse = auditSolrDao.deleteByQuery(solrQuery, "/audit/logs"); - return StatusMessage.with(updateResponse.getStatus()); - } - - public List getClusters() { - return getClusters(auditSolrDao, CLUSTER, "/audit/logs/clusters"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/EventHistoryManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/EventHistoryManager.java deleted file mode 100644 index 0782ea205a5..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/EventHistoryManager.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.manager; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.apache.ambari.logsearch.common.LogSearchContext; -import org.apache.ambari.logsearch.common.MessageEnums; -import org.apache.ambari.logsearch.dao.EventHistorySolrDao; -import org.apache.ambari.logsearch.model.request.impl.EventHistoryRequest; -import org.apache.ambari.logsearch.model.response.EventHistoryData; -import org.apache.ambari.logsearch.model.response.EventHistoryDataListResponse; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.response.FacetField.Count; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrInputDocument; -import org.springframework.core.convert.ConversionService; - -import javax.inject.Inject; -import javax.inject.Named; - -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.USER_NAME; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.VALUES; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.FILTER_NAME; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.ROW_TYPE; -import static org.apache.ambari.logsearch.solr.SolrConstants.EventHistoryConstants.SHARE_NAME_LIST; - -@Named -public class EventHistoryManager extends JsonManagerBase { - - private static final Logger logger = Logger.getLogger(EventHistoryManager.class); - - @Inject - private EventHistorySolrDao eventHistorySolrDao; - @Inject - private ConversionService conversionService; - - public String saveEvent(EventHistoryData eventHistoryData) { - String filterName = eventHistoryData.getFiltername(); - - SolrInputDocument solrInputDoc = new SolrInputDocument(); - if (!isValid(eventHistoryData)) { - throw new MalformedInputException("No FilterName Specified"); - } - - if (isNotUnique(filterName)) { - throw new AlreadyExistsException(String.format("Name '%s' already exists", eventHistoryData.getFiltername())); - } - solrInputDoc.addField(ID, eventHistoryData.getId()); - solrInputDoc.addField(USER_NAME, LogSearchContext.getCurrentUsername()); - solrInputDoc.addField(VALUES, eventHistoryData.getValues()); - solrInputDoc.addField(FILTER_NAME, filterName); - solrInputDoc.addField(ROW_TYPE, eventHistoryData.getRowType()); - List shareNameList = eventHistoryData.getShareNameList(); - if (CollectionUtils.isNotEmpty(shareNameList)) { - solrInputDoc.addField(SHARE_NAME_LIST, shareNameList); - } - - eventHistorySolrDao.addDocs(solrInputDoc); - return convertObjToString(solrInputDoc); - } - - private boolean isNotUnique(String filterName) { - - if (filterName != null) { - SolrQuery solrQuery = new SolrQuery(); - filterName = SolrUtil.makeSearcableString(filterName); - solrQuery.setQuery("*:*"); - solrQuery.addFilterQuery(FILTER_NAME + ":" + filterName); - solrQuery.addFilterQuery(USER_NAME + ":" + LogSearchContext.getCurrentUsername()); - SolrUtil.setRowCount(solrQuery, 0); - try { - Long numFound = eventHistorySolrDao.process(solrQuery).getResults().getNumFound(); - if (numFound > 0) { - return true; - } - } catch (SolrException e) { - logger.error("Error while checking if event history data is unique.", e); - } - } - return false; - } - - private boolean isValid(EventHistoryData vHistory) { - return StringUtils.isNotBlank(vHistory.getFiltername()) - && StringUtils.isNotBlank(vHistory.getRowType()) - && StringUtils.isNotBlank(vHistory.getValues()); - } - - public void deleteEvent(String id) { - eventHistorySolrDao.deleteEventHistoryData(id); - } - - @SuppressWarnings("unchecked") - public EventHistoryDataListResponse getEventHistory(EventHistoryRequest request) { - EventHistoryDataListResponse response = new EventHistoryDataListResponse(); - String rowType = request.getRowType(); - if (StringUtils.isBlank(rowType)) { - throw new MalformedInputException("Row type was not specified"); - } - - SolrQuery evemtHistoryQuery = conversionService.convert(request, SolrQuery.class); - evemtHistoryQuery.addFilterQuery(String.format("%s:%s OR %s:%s", USER_NAME, LogSearchContext.getCurrentUsername(), - SHARE_NAME_LIST, LogSearchContext.getCurrentUsername())); - SolrDocumentList solrList = eventHistorySolrDao.process(evemtHistoryQuery).getResults(); - - Collection configList = new ArrayList<>(); - - for (SolrDocument solrDoc : solrList) { - EventHistoryData eventHistoryData = new EventHistoryData(); - eventHistoryData.setFiltername("" + solrDoc.get(FILTER_NAME)); - eventHistoryData.setId("" + solrDoc.get(ID)); - eventHistoryData.setValues("" + solrDoc.get(VALUES)); - eventHistoryData.setRowType("" + solrDoc.get(ROW_TYPE)); - try { - List shareNameList = (List) solrDoc.get(SHARE_NAME_LIST); - eventHistoryData.setShareNameList(shareNameList); - } catch (Exception e) { - // do nothing - } - - eventHistoryData.setUserName("" + solrDoc.get(USER_NAME)); - - configList.add(eventHistoryData); - } - - response.setName("historyList"); - response.setEventHistoryDataList(configList); - - response.setStartIndex(Integer.parseInt(request.getStartIndex())); - response.setPageSize(Integer.parseInt(request.getPageSize())); - - response.setTotalCount(solrList.getNumFound()); - - return response; - - } - - public List getAllUserName() { - List userList = new ArrayList<>(); - SolrQuery userListQuery = new SolrQuery(); - userListQuery.setQuery("*:*"); - SolrUtil.setFacetField(userListQuery, USER_NAME); - QueryResponse queryResponse = eventHistorySolrDao.process(userListQuery); - if (queryResponse == null) { - return userList; - } - List counList = queryResponse.getFacetField(USER_NAME).getValues(); - for (Count cnt : counList) { - String userName = cnt.getName(); - userList.add(userName); - } - return userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/InfoManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/InfoManager.java deleted file mode 100644 index 3aabdbfcdef..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/InfoManager.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.manager; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.common.PropertyDescriptionStorage; -import org.apache.ambari.logsearch.common.ShipperConfigDescriptionStorage; -import org.apache.ambari.logsearch.conf.LogSearchConfigApiConfig; -import org.apache.ambari.logsearch.model.response.PropertyDescriptionData; -import org.apache.ambari.logsearch.model.response.ShipperConfigDescriptionData; -import org.springframework.beans.factory.annotation.Value; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class InfoManager extends JsonManagerBase { - - @Value("${logsearch.app.version:}") - private String logsearchAppVersion; - - @Value("${logsearch.solr.version:}") - private String logsearchSolrVersion; - - @Value("${java.runtime.version}") - private String javaRuntimeVersion; - - @Inject - private AuthPropsConfig authPropsConfig; - - @Inject - private LogSearchConfigApiConfig logSearchConfigApiConfig; - - @Inject - private PropertyDescriptionStorage propertyDescriptionStore; - - @Inject - private ShipperConfigDescriptionStorage shipperConfigDescriptionStore; - - public Map getApplicationInfo() { - Map appMap = new HashMap<>(); - appMap.put("application.version", logsearchAppVersion); - appMap.put("solr.version", logsearchSolrVersion); - appMap.put("java.runtime.version", javaRuntimeVersion); - return appMap; - } - - public Map getAuthMap() { - Map authMap = new HashMap<>(); - authMap.put("external", authPropsConfig.isAuthExternalEnabled()); - authMap.put("file", authPropsConfig.isAuthFileEnabled()); - authMap.put("jwt", authPropsConfig.isAuthJwtEnabled()); - authMap.put("ldap", authPropsConfig.isAuthLdapEnabled()); - authMap.put("simple", authPropsConfig.isAuthSimpleEnabled()); - return authMap; - } - - public Map getFeaturesMap() { - Map featuresMap = new HashMap<>(); - featuresMap.put(LogSearchConstants.AUTH_FEATURE_KEY, getAuthMap()); - featuresMap.put(LogSearchConstants.SHIPPER_CONFIG_API_KEY, logSearchConfigApiConfig.isConfigApiEnabled()); - return featuresMap; - } - - public Map> getPropertyDescriptions() { - return propertyDescriptionStore.getPropertyDescriptions(); - } - - public List getLogSearchPropertyDescriptions(String propertiesFile) { - return getPropertyDescriptions().get(propertiesFile); - } - - public List getLogSearchShipperConfigDescription() { - return shipperConfigDescriptionStore.getShipperConfigDescription(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java deleted file mode 100644 index 94191e0058c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonElement; -import com.google.gson.JsonParseException; -import com.google.gson.JsonPrimitive; -import com.google.gson.JsonSerializationContext; -import com.google.gson.JsonSerializer; - -import java.util.Date; - -public class JsonManagerBase { - - private JsonSerializer jsonDateSerialiazer = null; - private JsonDeserializer jsonDateDeserialiazer = null; - - public JsonManagerBase() { - jsonDateSerialiazer = new JsonSerializer() { - - @Override - public JsonElement serialize(Date paramT, java.lang.reflect.Type paramType, JsonSerializationContext paramJsonSerializationContext) { - return paramT == null ? null : new JsonPrimitive(paramT.getTime()); - } - }; - - jsonDateDeserialiazer = new JsonDeserializer() { - - @Override - public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT, JsonDeserializationContext context) - throws JsonParseException { - return json == null ? null : new Date(json.getAsLong()); - } - - }; - } - - protected String convertObjToString(Object obj) { - if (obj == null) { - return ""; - } - - Gson gson = new GsonBuilder() - .registerTypeAdapter(Date.class, jsonDateSerialiazer) - .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create(); - - return gson.toJson(obj); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/MalformedInputException.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/MalformedInputException.java deleted file mode 100644 index f51b6c65c1d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/MalformedInputException.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -public class MalformedInputException extends RuntimeException { - public MalformedInputException(String message) { - super(message); - } - - public MalformedInputException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java deleted file mode 100644 index 9ae1961700d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.manager; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import com.google.common.collect.Lists; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.response.LogData; -import org.apache.ambari.logsearch.model.response.LogSearchResponse; -import org.apache.ambari.logsearch.dao.SolrDaoBase; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.response.FacetField; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrDocumentList; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.SimpleQuery; -import org.springframework.data.solr.core.query.SolrDataQuery; - -public abstract class ManagerBase> extends JsonManagerBase { - private static final Logger logger = Logger.getLogger(ManagerBase.class); - - public ManagerBase() { - super(); - } - - protected SEARCH_RESPONSE getLastPage(SolrDaoBase solrDoaBase, SimpleQuery lastPageQuery, String event) { - int maxRows = lastPageQuery.getRows(); - SEARCH_RESPONSE logResponse = getLogAsPaginationProvided(lastPageQuery, solrDoaBase, event); - Long totalLogs = logResponse.getTotalCount(); - int startIndex = (int)(totalLogs - totalLogs % maxRows); - int numberOfLogsOnLastPage = (int)(totalLogs - startIndex); - logResponse.setStartIndex(startIndex); - logResponse.setTotalCount(totalLogs); - logResponse.setPageSize(maxRows); - List docList = logResponse.getLogList(); - List lastPageDocList = new ArrayList<>(); - logResponse.setLogList(lastPageDocList); - int cnt = 0; - for (LOG_DATA_TYPE doc : docList) { - if (cnt < numberOfLogsOnLastPage) { - lastPageDocList.add(doc); - } - cnt++; - } - Collections.reverse(lastPageDocList); - return logResponse; - } - - protected SEARCH_RESPONSE getLogAsPaginationProvided(SolrDataQuery solrQuery, SolrDaoBase solrDaoBase, String event) { - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(solrQuery); - return getLogAsPaginationProvided(query, solrDaoBase, event); - } - - - protected SEARCH_RESPONSE getLogAsPaginationProvided(SolrQuery solrQuery, SolrDaoBase solrDaoBase, String event) { - QueryResponse response = solrDaoBase.process(solrQuery, event); - SEARCH_RESPONSE logResponse = createLogSearchResponse(); - SolrDocumentList docList = response.getResults(); - logResponse.setTotalCount(docList.getNumFound()); - List serviceLogDataList = convertToSolrBeans(response); - if (!docList.isEmpty()) { - logResponse.setLogList(serviceLogDataList); - logResponse.setStartIndex((int) docList.getStart()); - Integer rowNumber = solrQuery.getRows(); - if (rowNumber == null) { - logger.error("No RowNumber was set in solrQuery"); - return createLogSearchResponse(); - } - logResponse.setPageSize(rowNumber); - } - return logResponse; - } - - protected abstract List convertToSolrBeans(QueryResponse response); - - protected abstract SEARCH_RESPONSE createLogSearchResponse(); - - protected List getClusters(SolrDaoBase solrDaoBase, String clusterField, String event) { - List clusterResponse = Lists.newArrayList(); - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - SolrUtil.setFacetField(solrQuery, clusterField); - SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX); - - QueryResponse response = solrDaoBase.process(solrQuery, event); - if (response == null) { - return clusterResponse; - } - List clusterFields = response.getFacetFields(); - if (CollectionUtils.isNotEmpty(clusterFields)) { - FacetField clusterFacets = clusterFields.get(0); - for (FacetField.Count clusterCount : clusterFacets.getValues()) { - clusterResponse.add(clusterCount.getName()); - } - } - return clusterResponse; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/NotFoundException.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/NotFoundException.java deleted file mode 100644 index 7dcf2337e18..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/NotFoundException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -public class NotFoundException extends RuntimeException { - public NotFoundException(String message) { - super(message); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java deleted file mode 100644 index 3658257fefd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java +++ /dev/null @@ -1,612 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.CLUSTER; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.SEQUENCE_ID; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.KEY_LOG_MESSAGE; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.StringWriter; -import java.io.UncheckedIOException; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.stream.Collectors; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.ambari.logsearch.common.LabelFallbackHandler; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.LogType; -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.UIMappingConfig; -import org.apache.ambari.logsearch.converter.BaseServiceLogRequestQueryConverter; -import org.apache.ambari.logsearch.converter.ServiceLogTruncatedRequestQueryConverter; -import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao; -import org.apache.ambari.logsearch.dao.SolrSchemaFieldDao; -import org.apache.ambari.logsearch.model.metadata.FieldMetadata; -import org.apache.ambari.logsearch.model.metadata.ServiceComponentMetadataWrapper; -import org.apache.ambari.logsearch.model.request.impl.HostLogFilesRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogAggregatedInfoRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.model.response.CountDataListResponse; -import org.apache.ambari.logsearch.model.response.GraphDataListResponse; -import org.apache.ambari.logsearch.model.response.GroupListResponse; -import org.apache.ambari.logsearch.model.response.HostLogFilesResponse; -import org.apache.ambari.logsearch.model.response.LogData; -import org.apache.ambari.logsearch.model.response.LogListResponse; -import org.apache.ambari.logsearch.model.response.NameValueDataListResponse; -import org.apache.ambari.logsearch.model.response.NodeListResponse; -import org.apache.ambari.logsearch.model.response.ServiceLogData; -import org.apache.ambari.logsearch.model.response.ServiceLogResponse; -import org.apache.ambari.logsearch.solr.ResponseDataGenerator; -import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData; -import org.apache.ambari.logsearch.solr.model.SolrHostLogData; -import org.apache.ambari.logsearch.solr.model.SolrServiceLogData; -import org.apache.ambari.logsearch.util.DateUtil; -import org.apache.ambari.logsearch.util.DownloadUtil; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.response.FacetField; -import org.apache.solr.client.solrj.response.FacetField.Count; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.UpdateResponse; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; -import org.springframework.core.convert.ConversionService; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.Criteria; -import org.springframework.data.solr.core.query.SimpleFacetQuery; -import org.springframework.data.solr.core.query.SimpleFilterQuery; -import org.springframework.data.solr.core.query.SimpleQuery; -import org.springframework.data.solr.core.query.SimpleStringCriteria; - -import com.google.common.base.Splitter; -import com.google.common.collect.Lists; - -import freemarker.template.Configuration; -import freemarker.template.Template; -import freemarker.template.TemplateException; - -@Named -public class ServiceLogsManager extends ManagerBase { - private static final Logger logger = Logger.getLogger(ServiceLogsManager.class); - - private static final String SERVICE_LOG_TEMPLATE = "service_log_txt.ftl"; - - @Inject - private ServiceLogsSolrDao serviceLogsSolrDao; - @Inject - private ResponseDataGenerator responseDataGenerator; - @Inject - private ConversionService conversionService; - @Inject - private Configuration freemarkerConfiguration; - @Inject - private SolrSchemaFieldDao solrSchemaFieldDao; - @Inject - private UIMappingConfig uiMappingConfig; - @Inject - private LabelFallbackHandler labelFallbackHandler; - - public ServiceLogResponse searchLogs(ServiceLogRequest request) { - String event = "/service/logs"; - String keyword = request.getKeyWord(); - Boolean isLastPage = request.isLastPage(); - SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class); - if (StringUtils.isNotBlank(keyword)) { - return (ServiceLogResponse) getPageByKeyword(request, event); - } else if (isLastPage) { - ServiceLogResponse logResponse = getLastPage(serviceLogsSolrDao, solrQuery, event); - if (logResponse == null){ - logResponse = new ServiceLogResponse(); - } - return logResponse; - } else { - ServiceLogResponse response = getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao, event); - if (response.getTotalCount() > 0 && CollectionUtils.isEmpty(response.getLogList())) { - request.setLastPage(true); - solrQuery = conversionService.convert(request, SimpleQuery.class); - ServiceLogResponse lastResponse = getLastPage(serviceLogsSolrDao, solrQuery, event); - if (lastResponse != null){ - response = lastResponse; - } - } - return response; - } - } - - public GroupListResponse getHosts(String clusters) { - return getFields(HOST, clusters, SolrHostLogData.class); - } - - public GraphDataListResponse getAggregatedInfo(ServiceLogAggregatedInfoRequest request) { - SimpleQuery solrDataQuery = new BaseServiceLogRequestQueryConverter().convert(request); - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(solrDataQuery); - String hierarchy = String.format("%s,%s,%s", HOST, COMPONENT, LEVEL); - solrQuery.setQuery("*:*"); - SolrUtil.setFacetPivot(solrQuery, 1, hierarchy); - QueryResponse response = serviceLogsSolrDao.process(solrQuery); - return responseDataGenerator.generateSimpleGraphResponse(response, hierarchy); - } - - private CountDataListResponse getFieldCount(String field, String clusters) { - SimpleFacetQuery facetQuery = conversionService.convert(field, SimpleFacetQuery.class); - if (StringUtils.isNotEmpty(clusters)) { - List clusterFilterList = Splitter.on(",").splitToList(clusters); - facetQuery.addFilterQuery(new SimpleFilterQuery(new Criteria(CLUSTER).in(clusterFilterList))); - } - return responseDataGenerator.generateCountResponseByField(serviceLogsSolrDao.process(facetQuery), field); - } - - public CountDataListResponse getComponentsCount(String clusters) { - return getFieldCount(COMPONENT, clusters); - } - - public CountDataListResponse getHostsCount(String clusters) { - return getFieldCount(HOST, clusters); - } - - public NodeListResponse getTreeExtension(ServiceLogHostComponentRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(facetQuery); - String hostName = request.getHostName() == null ? "" : request.getHostName(); - if (StringUtils.isNotBlank(hostName)){ - solrQuery.addFilterQuery(String.format("%s:*%s*", HOST, hostName)); - } - QueryResponse response = serviceLogsSolrDao.process(solrQuery, "/service/logs/tree"); - String firstHierarchy = String.format("%s,%s,%s", HOST, COMPONENT, LEVEL); - String secondHierarchy = String.format("%s,%s", HOST, LEVEL); - return responseDataGenerator.generateServiceNodeTreeFromFacetResponse(response, firstHierarchy, secondHierarchy, - LogSearchConstants.HOST, LogSearchConstants.COMPONENT); - } - - public NodeListResponse getHostListByComponent(ServiceLogComponentHostRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(facetQuery); - solrQuery.setFacetSort(request.getSortBy() == null ? HOST: request.getSortBy()); - - NodeListResponse list = new NodeListResponse(); - String componentName = request.getComponentName() == null ? "" : request.getComponentName(); - if (StringUtils.isNotBlank(componentName)){ - solrQuery.addFilterQuery(COMPONENT + ":" - + componentName); - QueryResponse response = serviceLogsSolrDao.process(solrQuery, "/service/logs/hosts/components"); - String firstHierarchy = String.format("%s,%s,%s", COMPONENT, HOST, LEVEL); - String secondHierarchy = String.format("%s,%s", COMPONENT, LEVEL); - return responseDataGenerator.generateServiceNodeTreeFromFacetResponse(response, firstHierarchy, secondHierarchy, - LogSearchConstants.COMPONENT, LogSearchConstants.HOST); - } else { - return list; - } - } - - public NameValueDataListResponse getLogsLevelCount(ServiceLogLevelCountRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - QueryResponse response = serviceLogsSolrDao.process(facetQuery, "/service/logs/levels/counts"); - return responseDataGenerator.getNameValueDataListResponseWithDefaults(response, LogSearchConstants.SUPPORTED_LOG_LEVELS, false); - } - - public BarGraphDataListResponse getHistogramData(ServiceGraphRequest request) { - SolrQuery solrQuery = conversionService.convert(request, SolrQuery.class); - QueryResponse response = serviceLogsSolrDao.process(solrQuery, "/service/logs/histogram"); - return responseDataGenerator.generateBarGraphDataResponseWithRanges(response, LEVEL, true); - } - - private LogListResponse getPageByKeyword(ServiceLogRequest request, String event) { - String defaultChoice = "0"; - String keyword = request.getKeyWord(); - if (StringUtils.isBlank(keyword)) { - throw new MalformedInputException("Keyword was not given"); - } - - boolean isNext = !defaultChoice.equals(request.getKeywordType()); // 1 is next, 0 is previous - return getPageForKeywordByType(request, keyword, isNext, event); - } - - private LogListResponse getPageForKeywordByType(ServiceLogRequest request, String keyword, boolean isNext, String event) { - String fromDate = request.getFrom(); // store start & end dates - String toDate = request.getTo(); - boolean timeAscending = LogSearchConstants.ASCENDING_ORDER.equals(request.getSortType()); - - int currentPageNumber = Integer.parseInt(request.getPage()); - int maxRows = Integer.parseInt(request.getPageSize()); - Date logDate = getDocDateFromNextOrLastPage(request, keyword, isNext, currentPageNumber, maxRows); - if (logDate == null) { - throw new MalformedInputException(String.format("The keyword \"%s\" was not found", keyword)); - } - - String nextOrPreviousPageDate = DateUtil.convertDateWithMillisecondsToSolrDate(logDate); - SolrServiceLogData firstKeywordLog = getNextHitForKeyword(request, keyword, isNext, event, timeAscending, nextOrPreviousPageDate); - - long keywordSeqNum = firstKeywordLog.getSeqNum(); - String keywordLogtime = DateUtil.convertDateWithMillisecondsToSolrDate(firstKeywordLog.getLogTime()); - - long numberOfDateDuplicates = countNumberOfDuplicates(request, isNext, keywordSeqNum, keywordLogtime); - - long numberOfLogsUntilFound = getNumberOfLogsUntilFound(request, fromDate, toDate, timeAscending, keywordLogtime, numberOfDateDuplicates); - int start = (int) ((numberOfLogsUntilFound / maxRows)); - - request.setFrom(fromDate); - request.setTo(toDate); - request.setPage(String.valueOf(start)); - SolrQuery keywordNextPageQuery = new DefaultQueryParser().doConstructSolrQuery(conversionService.convert(request, SimpleQuery.class)); - return getLogAsPaginationProvided(keywordNextPageQuery, serviceLogsSolrDao, event); - } - - private Long getNumberOfLogsUntilFound(ServiceLogRequest request, String fromDate, String toDate, boolean timeAscending, - String keywordLogtime, long numberOfDateDuplicates) { - if (!timeAscending) { - request.setTo(toDate); - request.setFrom(keywordLogtime); - } else { - request.setTo(keywordLogtime); - request.setFrom(fromDate); - } - SimpleQuery rangeQuery = conversionService.convert(request, SimpleQuery.class); - return serviceLogsSolrDao.count(rangeQuery) - numberOfDateDuplicates; - } - - private long countNumberOfDuplicates(ServiceLogRequest request, boolean isNext, long keywordSeqNum, String keywordLogtime) { - request.setFrom(keywordLogtime); - request.setTo(keywordLogtime); - SimpleQuery duplicationsQuery = conversionService.convert(request, SimpleQuery.class); - if (isNext) { - duplicationsQuery.addFilterQuery(new SimpleFilterQuery(new SimpleStringCriteria(String.format("%s:[* TO %d]", SEQUENCE_ID, keywordSeqNum - 1)))); - } else { - duplicationsQuery.addFilterQuery(new SimpleFilterQuery(new SimpleStringCriteria(String.format("%s:[%d TO *]", SEQUENCE_ID, keywordSeqNum + 1)))); - } - return serviceLogsSolrDao.count(duplicationsQuery); - } - - private SolrServiceLogData getNextHitForKeyword(ServiceLogRequest request, String keyword, boolean isNext, String event, boolean timeAscending, String nextOrPreviousPageDate) { - if (hasNextOrAscOrder(isNext, timeAscending)) { - request.setTo(nextOrPreviousPageDate); - } else { - request.setFrom(nextOrPreviousPageDate); - } - SimpleQuery keywordNextQuery = conversionService.convert(request, SimpleQuery.class); - keywordNextQuery.addFilterQuery(new SimpleFilterQuery(new Criteria(KEY_LOG_MESSAGE).contains(keyword))); - keywordNextQuery.setRows(1); - SolrQuery kewordNextSolrQuery = new DefaultQueryParser().doConstructSolrQuery(keywordNextQuery); - kewordNextSolrQuery.setStart(0); - if (hasNextOrAscOrder(isNext, timeAscending)) { - kewordNextSolrQuery.setSort(LOGTIME, SolrQuery.ORDER.desc); - } else { - kewordNextSolrQuery.setSort(LOGTIME, SolrQuery.ORDER.asc); - } - kewordNextSolrQuery.addSort(SEQUENCE_ID, SolrQuery.ORDER.desc); - QueryResponse queryResponse = serviceLogsSolrDao.process(kewordNextSolrQuery, event); - if (queryResponse == null) { - throw new NotFoundException(String.format("The keyword \"%s\" was not found", keyword)); - } - List solrServiceLogDataList = queryResponse.getBeans(SolrServiceLogData.class); - if (!CollectionUtils.isNotEmpty(solrServiceLogDataList)) { - throw new NotFoundException(String.format("The keyword \"%s\" was not found", keyword)); - } - return solrServiceLogDataList.get(0); - } - - private Date getDocDateFromNextOrLastPage(ServiceLogRequest request, String keyword, boolean isNext, int currentPageNumber, int maxRows) { - int lastOrFirstLogIndex; - if (isNext) { - lastOrFirstLogIndex = ((currentPageNumber + 1) * maxRows); - } else { - if (currentPageNumber == 0) { - throw new NotFoundException("This is the first Page"); - } - lastOrFirstLogIndex = (currentPageNumber * maxRows) - 1; - } - SimpleQuery sq = conversionService.convert(request, SimpleQuery.class); - SolrQuery nextPageLogTimeQuery = new DefaultQueryParser().doConstructSolrQuery(sq); - nextPageLogTimeQuery.remove("start"); - nextPageLogTimeQuery.remove("rows"); - nextPageLogTimeQuery.setStart(lastOrFirstLogIndex); - nextPageLogTimeQuery.setRows(1); - - QueryResponse queryResponse = serviceLogsSolrDao.process(nextPageLogTimeQuery); - if (queryResponse == null) { - throw new MalformedInputException(String.format("Cannot process next page query for \"%s\" ", keyword)); - } - SolrDocumentList docList = queryResponse.getResults(); - if (docList == null || docList.isEmpty()) { - throw new MalformedInputException(String.format("Next page element for \"%s\" is not found", keyword)); - } - - SolrDocument solrDoc = docList.get(0); - return (Date) solrDoc.get(LOGTIME); - } - - private boolean hasNextOrAscOrder(boolean isNext, boolean timeAscending) { - return isNext && !timeAscending || !isNext && timeAscending; - } - - public Response export(ServiceLogExportRequest request) { - String defaultFormat = "txt"; - SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class); - String format = request.getFormat() != null && defaultFormat.equalsIgnoreCase(request.getFormat()) ? ".txt" : ".json"; - DateTimeFormatter fileNameFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"); - String fileName = "Component_Logs_" + fileNameFormat.format(LocalDateTime.now()); - - try { - QueryResponse response = serviceLogsSolrDao.process(solrQuery); - SolrDocumentList docList = response.getResults(); - String textToSave; - - if (".txt".equals(format.toLowerCase(Locale.ENGLISH))) { - String utcOffset = StringUtils.isBlank(request.getUtcOffset()) ? "+0" : request.getUtcOffset(); - DateTimeFormatter inputDateFormat = DateTimeFormatter.ofPattern(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z); - DateTimeFormatter outputDateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss,SSSX"); - OffsetDateTime from = LocalDateTime.parse(request.getFrom(), inputDateFormat).atOffset(ZoneOffset.of(utcOffset)); - OffsetDateTime to = LocalDateTime.parse(request.getTo(), inputDateFormat).atOffset(ZoneOffset.of(utcOffset)); - - Template template = freemarkerConfiguration.getTemplate(SERVICE_LOG_TEMPLATE); - Map models = new HashMap<>(); - DownloadUtil.fillModelsForLogFile(docList, models, request, format, outputDateFormat.format(from), outputDateFormat.format(to)); - StringWriter stringWriter = new StringWriter(); - template.process(models, stringWriter); - textToSave = stringWriter.toString(); - } else if (".json".equals(format.toLowerCase(Locale.ENGLISH))) { - textToSave = convertObjToString(docList); - } else { - throw new UnsupportedFormatException(String.format("Unsupported format %s Either should be json or text", format.toLowerCase(Locale.ENGLISH))); - } - File file = File.createTempFile(fileName, format); - try (FileOutputStream fis = new FileOutputStream(file)) { - fis.write(textToSave.getBytes()); - } - return Response - .ok(file, MediaType.APPLICATION_OCTET_STREAM) - .header("Content-Disposition", "attachment;filename=" + fileName + format) - .build(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } catch (TemplateException e) { - throw new RuntimeException("Error while rendering freemarker template!", e); - } - } - - public NodeListResponse getComponentListWithLevelCounts(ServiceLogComponentLevelRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(facetQuery); - solrQuery.setFacetSort(StringUtils.isEmpty(request.getSortBy()) ? COMPONENT: request.getSortBy()); - QueryResponse response = serviceLogsSolrDao.process(facetQuery, "/service/logs/components/levels/counts"); - return responseDataGenerator.generateOneLevelServiceNodeTree(response, String.format("%s,%s", COMPONENT, LEVEL)); - } - - public List getServiceLogsSchemaFieldsName() { - Map schemaFieldsMap = solrSchemaFieldDao.getSchemaFieldNameMap(LogType.SERVICE); - return schemaFieldsMap - .entrySet() - .stream() - .filter(e -> !uiMappingConfig.getServiceFieldExcludeList().contains(e.getKey())) - .map(e -> - new FieldMetadata( - e.getKey(), - labelFallbackHandler.fallbackIfRequired( - e.getKey(), uiMappingConfig.getServiceFieldLabels().get(e.getKey()), - true, false, true, - uiMappingConfig.getServiceFieldFallbackPrefixes(), - uiMappingConfig.getServiceFieldFallbackSuffixes()), - !uiMappingConfig.getServiceFieldFilterableExcludesList().contains(e.getKey()), - uiMappingConfig.getServiceFieldVisibleList().contains(e.getKey()))) - .collect(Collectors.toList()); - } - - public BarGraphDataListResponse getAnyGraphCountData(ServiceAnyGraphRequest request) { - SimpleFacetQuery solrDataQuery = conversionService.convert(request, SimpleFacetQuery.class); - QueryResponse queryResponse = serviceLogsSolrDao.process(solrDataQuery); - return responseDataGenerator.getGraphDataWithDefaults(queryResponse, LEVEL, LogSearchConstants.SUPPORTED_LOG_LEVELS); - } - - public ServiceLogResponse getAfterBeforeLogs(ServiceLogTruncatedRequest request) { - ServiceLogResponse logResponse = new ServiceLogResponse(); - List docList; - String scrollType = request.getScrollType() != null ? request.getScrollType() : ""; - - String logTime = null; - String sequenceId = null; - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - solrQuery.setRows(1); - solrQuery.addFilterQuery(String.format("%s:%s", ID, request.getId())); - QueryResponse response = serviceLogsSolrDao.process(solrQuery); - if (response == null) { - return logResponse; - } - docList = convertToSolrBeans(response); - if (docList != null && !docList.isEmpty()) { - Date date = docList.get(0).getLogTime(); - logTime = DateUtil.convertDateWithMillisecondsToSolrDate(date); - sequenceId = docList.get(0).getSeqNum().toString(); - } - if (StringUtils.isBlank(logTime)) { - return logResponse; - } - if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType) || LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) { - ServiceLogResponse beforeAfterResponse = whenScroll(request, logTime, sequenceId, scrollType); - if (beforeAfterResponse.getLogList() == null) { - return logResponse; - } - List solrDocList = new ArrayList<>(beforeAfterResponse.getLogList()); - logResponse.setLogList(solrDocList); - return logResponse; - - } else { - logResponse = new ServiceLogResponse(); - List initial = new ArrayList<>(); - List before = whenScroll(request, logTime, sequenceId, LogSearchConstants.SCROLL_TYPE_BEFORE).getLogList(); - List after = whenScroll(request, logTime, sequenceId, LogSearchConstants.SCROLL_TYPE_AFTER).getLogList(); - if (before != null && !before.isEmpty()) { - initial.addAll(Lists.reverse(before)); - } - initial.add(docList.get(0)); - if (after != null && !after.isEmpty()) { - initial.addAll(after); - } - logResponse.setLogList(initial); - return logResponse; - } - } - - private ServiceLogResponse whenScroll(ServiceLogTruncatedRequest request, String logTime, String sequenceId, String afterOrBefore) { - request.setScrollType(afterOrBefore); - ServiceLogTruncatedRequestQueryConverter converter = new ServiceLogTruncatedRequestQueryConverter(); - converter.setLogTime(logTime); - converter.setSequenceId(sequenceId); - return getLogAsPaginationProvided(converter.convert(request), serviceLogsSolrDao, "service/logs/truncated"); - } - - @Override - protected List convertToSolrBeans(QueryResponse response) { - return new ArrayList<>(response.getBeans(SolrServiceLogData.class)); - } - - @Override - protected ServiceLogResponse createLogSearchResponse() { - return new ServiceLogResponse(); - } - - private List getLogDataListByFieldType(Class clazz, QueryResponse response, List fieldList) { - List groupList = getComponentBeans(clazz, response); - for (Count cnt : fieldList) { - T logData = createNewFieldByType(clazz, cnt); - groupList.add(logData); - } - return groupList; - } - - private List getComponentBeans(Class clazz, QueryResponse response) { - if (clazz.isAssignableFrom(SolrHostLogData.class) || clazz.isAssignableFrom(SolrComponentTypeLogData.class)) { - return response.getBeans(clazz); - } else { - throw new UnsupportedOperationException(); - } - } - - private GroupListResponse getFields(String field, String clusters, Class clazz) { - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - SolrUtil.addListFilterToSolrQuery(solrQuery, CLUSTER, clusters); - GroupListResponse collection = new GroupListResponse(); - SolrUtil.setFacetField(solrQuery, field); - SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX); - QueryResponse response = serviceLogsSolrDao.process(solrQuery); - if (response == null) { - return collection; - } - FacetField facetField = response.getFacetField(field); - if (facetField == null) { - return collection; - } - List fieldList = facetField.getValues(); - if (fieldList == null) { - return collection; - } - SolrDocumentList docList = response.getResults(); - if (docList == null) { - return collection; - } - List groupList = new ArrayList<>(getLogDataListByFieldType(clazz, response, fieldList)); - - collection.setGroupList(groupList); - if (!docList.isEmpty()) { - collection.setStartIndex((int) docList.getStart()); - collection.setTotalCount(docList.getNumFound()); - } - return collection; - } - - @SuppressWarnings("unchecked") - private T createNewFieldByType(Class clazz, Count count) { - String temp = count.getName(); - LogData result; - if (clazz.equals(SolrHostLogData.class)) { - result = new SolrHostLogData(); - ((SolrHostLogData)result).setHost(temp); - } else if (clazz.equals(SolrComponentTypeLogData.class)) { - result = new SolrComponentTypeLogData(); - ((SolrComponentTypeLogData)result).setType(temp); - } else { - throw new UnsupportedOperationException(); - } - - return (T)result; - } - - public HostLogFilesResponse getHostLogFileData(HostLogFilesRequest request) { - SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class); - QueryResponse queryResponse = serviceLogsSolrDao.process(facetQuery, "/service/logs/files"); - return responseDataGenerator.generateHostLogFilesResponse(queryResponse); - } - - public StatusMessage deleteLogs(ServiceLogRequest request) { - SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class); - UpdateResponse updateResponse = serviceLogsSolrDao.deleteByQuery(solrQuery, "/service/logs"); - return StatusMessage.with(updateResponse.getStatus()); - } - - public List getClusters() { - return getClusters(serviceLogsSolrDao, CLUSTER, "/service/logs/clusters"); - } - - - public ServiceComponentMetadataWrapper getComponentMetadata(String clusters) { - String pivotFields = COMPONENT + ",group"; - SolrQuery solrQuery = new SolrQuery(); - solrQuery.setQuery("*:*"); - solrQuery.setRows(0); - solrQuery.set("facet", true); - solrQuery.set("facet.pivot", pivotFields); - SolrUtil.addListFilterToSolrQuery(solrQuery, CLUSTER, clusters); - QueryResponse queryResponse = serviceLogsSolrDao.process(solrQuery, "/serivce/logs/components"); - return responseDataGenerator.generateGroupedComponentMetadataResponse( - queryResponse, pivotFields, uiMappingConfig.getServiceGroupLabels(), uiMappingConfig.getServiceComponentLabels()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java deleted file mode 100644 index e8b699e1963..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -import org.apache.ambari.logsearch.common.LogSearchContext; -import org.apache.ambari.logsearch.web.model.User; -import org.apache.log4j.Logger; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.security.web.authentication.WebAuthenticationDetails; - -import javax.inject.Named; - -@Named -public class SessionManager { - - private static final Logger logger = Logger.getLogger(SessionManager.class); - - public SessionManager() { - logger.debug("SessionManager created"); - } - - public User processSuccessLogin() { - boolean newSessionCreation = true; - Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - WebAuthenticationDetails details = (WebAuthenticationDetails) authentication.getDetails(); - String currentLoginId = authentication.getName(); - LogSearchContext context = LogSearchContext.getContext(); - User user = context.getUser(); - if (user != null) { - if (validateUser(user, currentLoginId)) { - newSessionCreation = false; - } - } - // - if (newSessionCreation) { - user = new User(); - user.setUsername(currentLoginId); - if (details != null) { - logger.info("Login Success: loginId=" + currentLoginId + ", sessionId=" + details.getSessionId() - + ", requestId=" + details.getRemoteAddress()); - } else { - logger.info("Login Success: loginId=" + currentLoginId + ", msaSessionId=" + ", details is null"); - } - - } - - return user; - } - - private boolean validateUser(User user, String currentUsername) { - if (currentUsername.equalsIgnoreCase(user.getUsername())) { - return true; - } else { - logger.info("loginId doesn't match loginId from HTTPSession. Will create new session. loginId=" - + currentUsername + ", user=" + user, new Exception()); - return false; - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java deleted file mode 100644 index 6119bb224fc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.manager; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.ambari.logfeeder.common.LogEntryParseTester; -import org.apache.ambari.logsearch.conf.LogSearchConfigApiConfig; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.configurer.LogSearchConfigConfigurer; -import org.apache.ambari.logsearch.configurer.LogLevelManagerFilterConfigurer; -import org.apache.ambari.logsearch.model.common.LSServerInputConfig; -import org.apache.ambari.logsearch.model.common.LSServerLogLevelFilterMap; -import org.apache.log4j.Logger; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -@Named -public class ShipperConfigManager extends JsonManagerBase { - - private static final Logger logger = Logger.getLogger(ShipperConfigManager.class); - - @Inject - private LogSearchConfigApiConfig logSearchConfigApiConfig; - - @Inject - private LogSearchConfigConfigurer logSearchConfigConfigurer; - - @Inject - private LogLevelManagerFilterConfigurer logLevelFilterConfigurer; - - public List getServices(String clusterName) { - return logSearchConfigConfigurer.getConfig().getServices(clusterName); - } - - public LSServerInputConfig getInputConfig(String clusterName, String serviceName) { - InputConfig inputConfig = logSearchConfigConfigurer.getConfig().getInputConfig(clusterName, serviceName); - return new LSServerInputConfig(inputConfig); - } - - public Response createInputConfig(String clusterName, String serviceName, LSServerInputConfig inputConfig) { - try { - if (logSearchConfigConfigurer.getConfig().inputConfigExists(clusterName, serviceName)) { - return Response.serverError() - .type(MediaType.APPLICATION_JSON) - .entity(ImmutableMap.of("errorMessage", "Input config already exists for service " + serviceName)) - .build(); - } - - logSearchConfigConfigurer.getConfig().createInputConfig(clusterName, serviceName, new ObjectMapper().writeValueAsString(inputConfig)); - return Response.ok().build(); - } catch (Exception e) { - logger.warn("Could not create input config", e); - return Response.serverError().build(); - } - } - - public Response setInputConfig(String clusterName, String serviceName, LSServerInputConfig inputConfig) { - try { - if (!logSearchConfigConfigurer.getConfig().inputConfigExists(clusterName, serviceName)) { - return Response.serverError() - .type(MediaType.APPLICATION_JSON) - .entity(ImmutableMap.of("errorMessage", "Input config doesn't exist for service " + serviceName)) - .build(); - } - - logSearchConfigConfigurer.getConfig().setInputConfig(clusterName, serviceName, new ObjectMapper().writeValueAsString(inputConfig)); - return Response.ok().build(); - } catch (Exception e) { - logger.warn("Could not update input config", e); - return Response.serverError().build(); - } - } - - public Response testShipperConfig(String shipperConfig, String logId, String testEntry, String clusterName) { - try { - LSServerInputConfig inputConfigValidate = new ObjectMapper().readValue(shipperConfig, LSServerInputConfig.class); - Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); - Set> violations = validator.validate(inputConfigValidate); - if (!violations.isEmpty()) { - throw new IllegalArgumentException("Error validating shipper config:\n" + violations); - } - String globalConfigs = logSearchConfigConfigurer.getConfig().getGlobalConfigs(clusterName); - LogEntryParseTester tester = new LogEntryParseTester(testEntry, shipperConfig, globalConfigs, logId); - Map resultEntrty = tester.parse(); - return Response.ok().entity(resultEntrty).build(); - } catch (Exception e) { - Map errorResponse = ImmutableMap.of("errorMessage", (Object)e.toString()); - return Response.serverError().entity(errorResponse).build(); - } - } - - public LSServerLogLevelFilterMap getLogLevelFilters(String clusterName) { - if (logSearchConfigApiConfig.isSolrFilterStorage()) { - return new LSServerLogLevelFilterMap(logLevelFilterConfigurer.getLogLevelFilterManagerSolr().getLogLevelFilters(clusterName)); - } else if (logSearchConfigApiConfig.isZkFilterStorage()) { - return new LSServerLogLevelFilterMap(logLevelFilterConfigurer.getLogLevelFilterManagerZK().getLogLevelFilters(clusterName)); - } else { - return new LSServerLogLevelFilterMap(logSearchConfigConfigurer.getConfig().getLogLevelFilterManager().getLogLevelFilters(clusterName)); - } - } - - public Response setLogLevelFilters(String clusterName, LSServerLogLevelFilterMap request) { - try { - if (logSearchConfigApiConfig.isSolrFilterStorage()) { - logLevelFilterConfigurer.getLogLevelFilterManagerSolr().setLogLevelFilters(clusterName, request.convertToApi()); - } else if (logSearchConfigApiConfig.isZkFilterStorage()) { - logLevelFilterConfigurer.getLogLevelFilterManagerZK().setLogLevelFilters(clusterName, request.convertToApi()); - } else { - logSearchConfigConfigurer.getConfig().getLogLevelFilterManager().setLogLevelFilters(clusterName, request.convertToApi()); - } - return Response.ok().build(); - } catch (Exception e) { - logger.warn("Could not update log level filters", e); - return Response.serverError().build(); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/UnsupportedFormatException.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/UnsupportedFormatException.java deleted file mode 100644 index 3588cbae035..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/UnsupportedFormatException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.manager; - -public class UnsupportedFormatException extends RuntimeException { - public UnsupportedFormatException(String message) { - super(message); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java deleted file mode 100644 index 11c4f70bf87..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerConditions { - @Valid - @NotNull - private LSServerFields fields; - - public LSServerConditions() {} - - public LSServerConditions(Conditions conditions) { - this.fields = new LSServerFields(conditions.getFields()); - } - - public LSServerFields getFields() { - return fields; - } - - public void setFields(LSServerFields fields) { - this.fields = fields; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java deleted file mode 100644 index dcaadb69c29..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.Set; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerFields { - @NotNull - private Set type; - - public LSServerFields() { - } - - public LSServerFields(Fields fields) { - this.type = fields.getType(); - } - - public Set getType() { - return type; - } - - public void setType(Set type) { - this.type = type; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java deleted file mode 100644 index 72cc72b2a3e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonInclude.Include; - -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonInclude(Include.NON_NULL) -public abstract class LSServerFilter { - @NotNull - private String filter; - - @Valid - @NotNull - private LSServerConditions conditions; - - @JsonProperty("sort_order") - private Integer sortOrder; - - @JsonProperty("source_field") - private String sourceField; - - @JsonProperty("remove_source_field") - private Boolean removeSourceField; - - @Valid - @JsonProperty("post_map_values") - private Map postMapValues; - - @JsonProperty("is_enabled") - private Boolean isEnabled; - - public LSServerFilter() {} - - public LSServerFilter(FilterDescriptor filterDescriptor) { - this.filter = filterDescriptor.getFilter(); - this.conditions = new LSServerConditions(filterDescriptor.getConditions()); - this.sortOrder = filterDescriptor.getSortOrder(); - this.sourceField = filterDescriptor.getSourceField(); - this.removeSourceField = filterDescriptor.isRemoveSourceField(); - - if (filterDescriptor.getPostMapValues() != null) { - this.postMapValues = new HashMap(); - for (Map.Entry> e : filterDescriptor.getPostMapValues().entrySet()) { - LSServerPostMapValuesList lsServerPostMapValuesList = new LSServerPostMapValuesList(e.getValue()); - postMapValues.put(e.getKey(), lsServerPostMapValuesList); - } - } - - this.isEnabled = filterDescriptor.isEnabled(); - } - - public String getFilter() { - return filter; - } - - public void setFilter(String filter) { - this.filter = filter; - } - - public LSServerConditions getConditions() { - return conditions; - } - - public void setConditions(LSServerConditions conditions) { - this.conditions = conditions; - } - - public Integer getSortOrder() { - return sortOrder; - } - - public void setSortOrder(Integer sortOrder) { - this.sortOrder = sortOrder; - } - - public String getSourceField() { - return sourceField; - } - - public void setSourceField(String sourceField) { - this.sourceField = sourceField; - } - - public Boolean getRemoveSourceField() { - return removeSourceField; - } - - public void setRemoveSourceField(Boolean removeSourceField) { - this.removeSourceField = removeSourceField; - } - - public Map getPostMapValues() { - return postMapValues; - } - - public void setPostMapValues(Map postMapValues) { - this.postMapValues = postMapValues; - } - - public Boolean getIsEnabled() { - return isEnabled; - } - - public void setIsEnabled(Boolean isEnabled) { - this.isEnabled = isEnabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterDeserializer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterDeserializer.java deleted file mode 100644 index df3998f0d9f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterDeserializer.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.ObjectCodec; -import com.fasterxml.jackson.core.TreeNode; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonNode; - -public class LSServerFilterDeserializer extends JsonDeserializer> { - @Override - public List deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { - ObjectCodec oc = jp.getCodec(); - JsonNode node = oc.readTree(jp); - - List filters = new ArrayList<>(); - for (JsonNode filterNode : node) { - if (filterNode.get("filter") == null) { - throw new IllegalArgumentException("Each filter element must have a field called 'filter' declaring it's type"); - } - switch (filterNode.get("filter").asText()) { - case "grok" : - filters.add(oc.treeToValue((TreeNode)filterNode, LSServerFilterGrok.class)); - break; - case "keyvalue" : - filters.add(oc.treeToValue((TreeNode)filterNode, LSServerFilterKeyValue.class)); - break; - case "json" : - filters.add(oc.treeToValue((TreeNode)filterNode, LSServerFilterJson.class)); - break; - } - } - - return filters; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java deleted file mode 100644 index 677603e165b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerFilterGrok extends LSServerFilter { - @JsonProperty("log4j_format") - private String log4jFormat; - - @JsonProperty("multiline_pattern") - private String multilinePattern; - - @NotNull - @JsonProperty("message_pattern") - private String messagePattern; - - @JsonProperty - private boolean skipOnError; - - @JsonProperty - private boolean deepExtract; - - public LSServerFilterGrok() {} - - public LSServerFilterGrok(FilterDescriptor filterDescriptor) { - super(filterDescriptor); - if (filterDescriptor instanceof FilterGrokDescriptor) { - FilterGrokDescriptor filterGrokDescriptor = (FilterGrokDescriptor)filterDescriptor; - this.log4jFormat = filterGrokDescriptor.getLog4jFormat(); - this.multilinePattern = filterGrokDescriptor.getMultilinePattern(); - this.messagePattern = filterGrokDescriptor.getMessagePattern(); - this.skipOnError = filterGrokDescriptor.isSkipOnError(); - this.deepExtract = filterGrokDescriptor.isDeepExtract(); - } - } - - public String getLog4jFormat() { - return log4jFormat; - } - - public void setLog4jFormat(String log4jFormat) { - this.log4jFormat = log4jFormat; - } - - public String getMultilinePattern() { - return multilinePattern; - } - - public void setMultilinePattern(String multilinePattern) { - this.multilinePattern = multilinePattern; - } - - public String getMessagePattern() { - return messagePattern; - } - - public void setMessagePattern(String messagePattern) { - this.messagePattern = messagePattern; - } - - public boolean isSkipOnError() { - return skipOnError; - } - - public void setSkipOnError(boolean skipOnError) { - this.skipOnError = skipOnError; - } - - public boolean isDeepExtract() { - return deepExtract; - } - - public void setDeepExtract(boolean deepExtract) { - this.deepExtract = deepExtract; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java deleted file mode 100644 index d20f8426673..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerFilterJson extends LSServerFilter { - public LSServerFilterJson() {} - - public LSServerFilterJson(FilterDescriptor filterDescriptor) { - super(filterDescriptor); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java deleted file mode 100644 index a879bb8f409..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerFilterKeyValue extends LSServerFilter { - @JsonProperty("field_split") - private String fieldSplit; - - @JsonProperty("value_split") - private String valueSplit; - - @JsonProperty("value_borders") - private String valueBorders; - - public LSServerFilterKeyValue() {} - - public LSServerFilterKeyValue(FilterDescriptor filterDescriptor) { - super(filterDescriptor); - FilterKeyValueDescriptor filterKeyValueDescriptor = (FilterKeyValueDescriptor)filterDescriptor; - this.fieldSplit = filterKeyValueDescriptor.getFieldSplit(); - this.valueSplit = filterKeyValueDescriptor.getValueSplit(); - this.valueBorders = filterKeyValueDescriptor.getValueBorders(); - } - - public String getFieldSplit() { - return fieldSplit; - } - - public void setFieldSplit(String fieldSplit) { - this.fieldSplit = fieldSplit; - } - - public String getValueSplit() { - return valueSplit; - } - - public void setValueSplit(String valueSplit) { - this.valueSplit = valueSplit; - } - - public String getValueBorders() { - return valueBorders; - } - - public void setValueBorders(String valueBorders) { - this.valueBorders = valueBorders; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java deleted file mode 100644 index 272f44368c8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.List; -import java.util.Map; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonInclude(Include.NON_NULL) -public abstract class LSServerInput { - @NotNull - private String type; - private String rowtype; - - @NotNull - private String path; - - @JsonProperty("add_fields") - private Map addFields; - - private String source; - private Boolean tail; - - @JsonProperty("gen_event_md5") - private Boolean genEventMd5; - - @JsonProperty("use_event_md5_as_id") - private Boolean useEventMd5AsId; - - @JsonProperty("cache_enabled") - private Boolean cacheEnabled; - - @JsonProperty("cache_key_field") - private String cacheKeyField; - - @JsonProperty("cache_last_dedup_enabled") - private Boolean cacheLastDedupEnabled; - - @JsonProperty("cache_size") - private Integer cacheSize; - - @JsonProperty("cache_dedup_interval") - private Long cacheDedupInterval; - - @JsonProperty("is_enabled") - private Boolean isEnabled; - - @JsonProperty("init_default_fields") - private Boolean initDefaultFields; - - @JsonProperty("default_log_levels") - private List defaultLogLevels; - - public LSServerInput() {} - - public LSServerInput(InputDescriptor inputDescriptor) { - this.type = inputDescriptor.getType(); - this.rowtype = inputDescriptor.getRowtype(); - this.path = inputDescriptor.getPath(); - this.addFields = inputDescriptor.getAddFields(); - this.source = inputDescriptor.getSource(); - this.tail = inputDescriptor.isTail(); - this.genEventMd5 = inputDescriptor.isGenEventMd5(); - this.useEventMd5AsId = inputDescriptor.isUseEventMd5AsId(); - this.cacheEnabled = inputDescriptor.isCacheEnabled(); - this.cacheKeyField = inputDescriptor.getCacheKeyField(); - this.cacheLastDedupEnabled = inputDescriptor.getCacheLastDedupEnabled(); - this.cacheSize = inputDescriptor.getCacheSize(); - this.cacheDedupInterval = inputDescriptor.getCacheDedupInterval(); - this.isEnabled = inputDescriptor.isEnabled(); - this.initDefaultFields = inputDescriptor.isInitDefaultFields(); - this.defaultLogLevels = inputDescriptor.getDefaultLogLevels(); - } - - public String getType() { - return type; - } - - public String getRowtype() { - return rowtype; - } - - public String getPath() { - return path; - } - - public Map getAddFields() { - return addFields; - } - - public String getSource() { - return source; - } - - public Boolean getTail() { - return tail; - } - - public Boolean getGenEventMd5() { - return genEventMd5; - } - - public Boolean getUseEventMd5AsId() { - return useEventMd5AsId; - } - - public Boolean getCacheEnabled() { - return cacheEnabled; - } - - public String getCacheKeyField() { - return cacheKeyField; - } - - public Boolean getCacheLastDedupEnabled() { - return cacheLastDedupEnabled; - } - - public Integer getCacheSize() { - return cacheSize; - } - - public Long getCacheDedupInterval() { - return cacheDedupInterval; - } - - public Boolean getIsEnabled() { - return isEnabled; - } - - public Boolean getInitDefaultFields() { - return initDefaultFields; - } - - public List getDefaultLogLevels() { - return defaultLogLevels; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java deleted file mode 100644 index 1c4939f89b0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.ArrayList; -import java.util.List; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterJsonDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor; - -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputSocketDescriptor; - -@ApiModel -public class LSServerInputConfig { - @Valid - @NotNull - @ApiModelProperty - @JsonDeserialize(using = LSServerInputDeserializer.class) - private List input; - - @Valid - @NotNull - @ApiModelProperty - @JsonDeserialize(using = LSServerFilterDeserializer.class) - private List filter; - - public LSServerInputConfig() {} - - public LSServerInputConfig(InputConfig inputConfig) { - input = new ArrayList<>(); - for (InputDescriptor inputDescriptor : inputConfig.getInput()) { - if (inputDescriptor instanceof InputFileBaseDescriptor) { - LSServerInput inputItem = new LSServerInputFile(inputDescriptor); - input.add(inputItem); - } else if (inputDescriptor instanceof InputS3FileDescriptor) { - LSServerInput inputItem = new LSServerInputS3File(inputDescriptor); - input.add(inputItem); - } else if (inputDescriptor instanceof InputSocketDescriptor) { - LSServerInput inputItem = new LSServerInputSocket(inputDescriptor); - input.add(inputItem); - } - } - - filter = new ArrayList<>(); - for (FilterDescriptor filterDescriptor : inputConfig.getFilter()) { - if (filterDescriptor instanceof FilterGrokDescriptor) { - LSServerFilter filterItem = new LSServerFilterGrok(filterDescriptor); - filter.add(filterItem); - } else if (filterDescriptor instanceof FilterKeyValueDescriptor) { - LSServerFilter filterItem = new LSServerFilterKeyValue(filterDescriptor); - filter.add(filterItem); - } else if (filterDescriptor instanceof FilterJsonDescriptor) { - LSServerFilter filterItem = new LSServerFilterJson(filterDescriptor); - filter.add(filterItem); - } - } - } - - public List getInput() { - return input; - } - - public void setInput(List input) { - this.input = input; - } - - public List getFilter() { - return filter; - } - - public void setFilter(List filter) { - this.filter = filter; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputDeserializer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputDeserializer.java deleted file mode 100644 index 63a86272740..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputDeserializer.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.ObjectCodec; -import com.fasterxml.jackson.core.TreeNode; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonNode; - -public class LSServerInputDeserializer extends JsonDeserializer> { - @Override - public List deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { - ObjectCodec oc = jp.getCodec(); - JsonNode node = oc.readTree(jp); - - List inputs = new ArrayList<>(); - for (JsonNode inputNode : node) { - - String source = null; - if (inputNode.get("source") != null) { - source = inputNode.get("source").asText(); - } else { - source = (inputNode.get("s3_access_key") != null || inputNode.get("s3_secret_key") != null) ? "s3_file" : "file"; - } - - switch (source) { - case "file" : - inputs.add(oc.treeToValue((TreeNode)inputNode, LSServerInputFile.class)); - break; - case "s3_file" : - inputs.add(oc.treeToValue((TreeNode)inputNode, LSServerInputS3File.class)); - break; - } - } - - return inputs; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFile.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFile.java deleted file mode 100644 index 012455e9607..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFile.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; - -import io.swagger.annotations.ApiModel; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileDescriptor; - -@ApiModel -public class LSServerInputFile extends LSServerInputFileBase { - - @JsonProperty("detach_interval_min") - private Integer detachIntervalMin; - - @JsonProperty("detach_time_min") - private Integer detachTimeMin; - - @JsonProperty("path_update_interval_min") - private Integer pathUpdateIntervalMin; - - @JsonProperty("max_age_min") - private Integer maxAgeMin; - - @JsonProperty("docker") - private Boolean dockerEnabled; - - public LSServerInputFile() {} - - public LSServerInputFile(InputDescriptor inputDescriptor) { - super(inputDescriptor); - InputFileDescriptor inputFileDescriptor = (InputFileDescriptor)inputDescriptor; - this.detachIntervalMin = inputFileDescriptor.getDetachIntervalMin(); - this.detachTimeMin = inputFileDescriptor.getDetachTimeMin(); - this.pathUpdateIntervalMin = inputFileDescriptor.getPathUpdateIntervalMin(); - this.maxAgeMin = inputFileDescriptor.getMaxAgeMin(); - this.dockerEnabled = inputFileDescriptor.getDockerEnabled(); - } - - public Integer getDetachIntervalMin() { - return detachIntervalMin; - } - - public void setDetachIntervalMin(Integer detachIntervalMin) { - this.detachIntervalMin = detachIntervalMin; - } - - public Integer getDetachTimeMin() { - return detachTimeMin; - } - - public void setDetachTimeMin(Integer detachTimeMin) { - this.detachTimeMin = detachTimeMin; - } - - public Integer getPathUpdateIntervalMin() { - return pathUpdateIntervalMin; - } - - public void setPathUpdateIntervalMin(Integer pathUpdateIntervalMin) { - this.pathUpdateIntervalMin = pathUpdateIntervalMin; - } - - public Integer getMaxAgeMin() { - return maxAgeMin; - } - - public void setMaxAgeMin(Integer maxAgeMin) { - this.maxAgeMin = maxAgeMin; - } - - public Boolean getDockerEnabled() { - return dockerEnabled; - } - - public void setDockerEnabled(Boolean dockerEnabled) { - this.dockerEnabled = dockerEnabled; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFileBase.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFileBase.java deleted file mode 100644 index 429d50a44c6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputFileBase.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public abstract class LSServerInputFileBase extends LSServerInput { - @JsonProperty("checkpoint_interval_ms") - private Integer checkpointIntervalMs; - - @JsonProperty("process_file") - private Boolean processFile; - - @JsonProperty("copy_file") - private Boolean copyFile; - - public LSServerInputFileBase() {} - - public LSServerInputFileBase(InputDescriptor inputDescriptor) { - super(inputDescriptor); - - InputFileBaseDescriptor inputFileBaseDescriptor = (InputFileBaseDescriptor)inputDescriptor; - this.checkpointIntervalMs = inputFileBaseDescriptor.getCheckpointIntervalMs(); - this.processFile = inputFileBaseDescriptor.getProcessFile(); - this.copyFile = inputFileBaseDescriptor.getCopyFile(); - } - - public Integer getCheckpointIntervalMs() { - return checkpointIntervalMs; - } - - public void setCheckpointIntervalMs(Integer checkpointIntervalMs) { - this.checkpointIntervalMs = checkpointIntervalMs; - } - - public Boolean getProcessFile() { - return processFile; - } - - public void setProcessFile(Boolean processFile) { - this.processFile = processFile; - } - - public Boolean getCopyFile() { - return copyFile; - } - - public void setCopyFile(Boolean copyFile) { - this.copyFile = copyFile; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputS3File.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputS3File.java deleted file mode 100644 index 24d25c4dae0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputS3File.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerInputS3File extends LSServerInputFileBase { - @NotNull - @JsonProperty("s3_access_key") - private String s3AccessKey; - - @NotNull - @JsonProperty("s3_secret_key") - private String s3SecretKey; - - public LSServerInputS3File() {} - - public LSServerInputS3File(InputDescriptor inputDescriptor) { - super(inputDescriptor); - InputS3FileDescriptor inputS3FileDescriptor = (InputS3FileDescriptor)inputDescriptor; - this.s3AccessKey = inputS3FileDescriptor.getS3AccessKey(); - this.s3SecretKey = inputS3FileDescriptor.getS3SecretKey(); - } - - public String getS3AccessKey() { - return s3AccessKey; - } - - public void setS3AccessKey(String s3AccessKey) { - this.s3AccessKey = s3AccessKey; - } - - public String getS3SecretKey() { - return s3SecretKey; - } - - public void setS3SecretKey(String s3SecretKey) { - this.s3SecretKey = s3SecretKey; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputSocket.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputSocket.java deleted file mode 100644 index efe0e3bfe3c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputSocket.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.common; - -import com.fasterxml.jackson.annotation.JsonProperty; -import io.swagger.annotations.ApiModel; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.InputSocketDescriptor; - -@ApiModel -public class LSServerInputSocket extends LSServerInput { - - @JsonProperty("port") - private Integer port; - - @JsonProperty("protocol") - private String protocol; - - @JsonProperty("secure") - private Boolean secure; - - @JsonProperty("log4j") - private Boolean log4j; - - public LSServerInputSocket(InputDescriptor inputDescriptor) { - super(inputDescriptor); - InputSocketDescriptor inputSocketDescriptor = (InputSocketDescriptor) inputDescriptor; - this.port = inputSocketDescriptor.getPort(); - this.protocol = inputSocketDescriptor.getProtocol(); - this.secure = inputSocketDescriptor.isSecure(); - this.log4j = inputSocketDescriptor.isLog4j(); - } - - public Integer getPort() { - return port; - } - - public void setPort(Integer port) { - this.port = port; - } - - public String getProtocol() { - return protocol; - } - - public void setProtocol(String protocol) { - this.protocol = protocol; - } - - public Boolean getSecure() { - return secure; - } - - public void setSecure(Boolean secure) { - this.secure = secure; - } - - public Boolean getLog4j() { - return log4j; - } - - public void setLog4j(Boolean log4j) { - this.log4j = log4j; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilter.java deleted file mode 100644 index f79fafa5175..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilter.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.Date; -import java.util.List; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -@ApiModel -public class LSServerLogLevelFilter { - - @NotNull - @ApiModelProperty - private String label; - - @NotNull - @ApiModelProperty - private List hosts; - - @NotNull - @ApiModelProperty - private List defaultLevels; - - @ApiModelProperty - private List overrideLevels; - - @ApiModelProperty - private Date expiryTime; - - public LSServerLogLevelFilter() {} - - public LSServerLogLevelFilter(LogLevelFilter logLevelFilter) { - label = logLevelFilter.getLabel(); - hosts = logLevelFilter.getHosts(); - defaultLevels = logLevelFilter.getDefaultLevels(); - overrideLevels = logLevelFilter.getOverrideLevels(); - expiryTime = logLevelFilter.getExpiryTime(); - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public List getHosts() { - return hosts; - } - - public void setHosts(List hosts) { - this.hosts = hosts; - } - - public List getDefaultLevels() { - return defaultLevels; - } - - public void setDefaultLevels(List defaultLevels) { - this.defaultLevels = defaultLevels; - } - - public List getOverrideLevels() { - return overrideLevels; - } - - public void setOverrideLevels(List overrideLevels) { - this.overrideLevels = overrideLevels; - } - - public Date getExpiryTime() { - return expiryTime; - } - - public void setExpiryTime(Date expiryTime) { - this.expiryTime = expiryTime; - } - - public LogLevelFilter convertToApi() { - LogLevelFilter apiFilter = new LogLevelFilter(); - - apiFilter.setLabel(label); - apiFilter.setHosts(hosts); - apiFilter.setDefaultLevels(defaultLevels); - apiFilter.setOverrideLevels(overrideLevels); - apiFilter.setExpiryTime(expiryTime); - - return apiFilter; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilterMap.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilterMap.java deleted file mode 100644 index 8081a27702b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerLogLevelFilterMap.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.Map; -import java.util.TreeMap; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter; -import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -@ApiModel -public class LSServerLogLevelFilterMap { - - @Valid - @NotNull - @ApiModelProperty - private TreeMap filter; - - public LSServerLogLevelFilterMap() {} - - public LSServerLogLevelFilterMap(LogLevelFilterMap logLevelFilterMap) { - filter = new TreeMap<>(); - for (Map.Entry e : logLevelFilterMap.getFilter().entrySet()) { - filter.put(e.getKey(), new LSServerLogLevelFilter(e.getValue())); - } - } - - public TreeMap getFilter() { - return filter; - } - - public void setFilter(TreeMap filter) { - this.filter = filter; - } - - public LogLevelFilterMap convertToApi() { - LogLevelFilterMap logLevelFilterMap = new LogLevelFilterMap(); - - TreeMap apiFilter = new TreeMap<>(); - for (Map.Entry e : filter.entrySet()) { - apiFilter.put(e.getKey(), e.getValue().convertToApi()); - } - logLevelFilterMap.setFilter(apiFilter); - - return logLevelFilterMap; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapDate.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapDate.java deleted file mode 100644 index 96e0287854b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapDate.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapDateDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LSServerMapDate extends LSServerMapField { - @Override - public String getName() { - return "map_date"; - } - - @JsonProperty("src_date_pattern") - private String sourceDatePattern; - - @NotNull - @JsonProperty("target_date_pattern") - private String targetDatePattern; - - public LSServerMapDate() {} - - public LSServerMapDate(MapDateDescriptor mapDateDescriptor) { - this.sourceDatePattern = mapDateDescriptor.getSourceDatePattern(); - this.targetDatePattern = mapDateDescriptor.getTargetDatePattern(); - } - - public String getSourceDatePattern() { - return sourceDatePattern; - } - - public void setSourceDatePattern(String sourceDatePattern) { - this.sourceDatePattern = sourceDatePattern; - } - - public String getTargetDatePattern() { - return targetDatePattern; - } - - public void setTargetDatePattern(String targetDatePattern) { - this.targetDatePattern = targetDatePattern; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapField.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapField.java deleted file mode 100644 index df33da1ed04..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapField.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonInclude.Include; - -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonIgnoreProperties(value = { "name" }) -@JsonInclude(Include.NON_NULL) -public abstract class LSServerMapField { - public abstract String getName(); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldAnonymize.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldAnonymize.java deleted file mode 100644 index 9fb589ef8b0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldAnonymize.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapAnonymizeDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerMapFieldAnonymize extends LSServerMapField { - @Override - public String getName() { - return "map_anonymize"; - } - - @NotNull - private String pattern; - - @JsonProperty("hide_char") - private Character hideChar; - - public LSServerMapFieldAnonymize() {} - - public LSServerMapFieldAnonymize(MapAnonymizeDescriptor mapAnonymizeDescriptor) { - this.pattern = mapAnonymizeDescriptor.getPattern(); - this.hideChar = mapAnonymizeDescriptor.getHideChar(); - } - - public String getPattern() { - return pattern; - } - - public void setPattern(String pattern) { - this.pattern = pattern; - } - - public Character getHideChar() { - return hideChar; - } - - public void setHideChar(Character hideChar) { - this.hideChar = hideChar; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldCopy.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldCopy.java deleted file mode 100644 index 4da55016724..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldCopy.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldCopyDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerMapFieldCopy extends LSServerMapField { - @Override - public String getName() { - return "map_field_copy"; - } - - @NotNull - @JsonProperty("copy_name") - private String copyName; - - public LSServerMapFieldCopy() {} - - public LSServerMapFieldCopy(MapFieldCopyDescriptor mapFieldCopyDescriptor) { - this.copyName = mapFieldCopyDescriptor.getCopyName(); - } - - public String getCopyName() { - return copyName; - } - - public void setCopyName(String copyName) { - this.copyName = copyName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldName.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldName.java deleted file mode 100644 index 8468b380720..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldName.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldNameDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerMapFieldName extends LSServerMapField { - @Override - public String getName() { - return "map_field_name"; - } - - @NotNull - @JsonProperty("new_field_name") - private String newFieldName; - - public LSServerMapFieldName() {} - - public LSServerMapFieldName(MapFieldNameDescriptor mapFieldNameDescriptor) { - this.newFieldName = mapFieldNameDescriptor.getNewFieldName(); - } - - public String getNewFieldName() { - return newFieldName; - } - - public void setNewFieldName(String newFieldName) { - this.newFieldName = newFieldName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldValue.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldValue.java deleted file mode 100644 index 389fcb3dfac..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerMapFieldValue.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldValueDescriptor; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerMapFieldValue extends LSServerMapField { - @Override - public String getName() { - return "map_field_value"; - } - - @NotNull - @JsonProperty("pre_value") - private String preValue; - - @NotNull - @JsonProperty("post_value") - private String postValue; - - public LSServerMapFieldValue() {} - - public LSServerMapFieldValue(MapFieldValueDescriptor mapFieldValueDescriptor) { - this.preValue = mapFieldValueDescriptor.getPreValue(); - this.postValue = mapFieldValueDescriptor.getPostValue(); - } - - public String getPreValue() { - return preValue; - } - - public void setPreValue(String preValue) { - this.preValue = preValue; - } - - public String getPostValue() { - return postValue; - } - - public void setPostValue(String postValue) { - this.postValue = postValue; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValues.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValues.java deleted file mode 100644 index c62a9fd2b31..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValues.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.ArrayList; -import java.util.List; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapDateDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldCopyDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldNameDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.MapFieldValueDescriptor; -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; - -import io.swagger.annotations.ApiModel; - -@ApiModel -public class LSServerPostMapValues { - @Valid - @NotNull - private List mappers; - - public LSServerPostMapValues() {} - - public LSServerPostMapValues(PostMapValues pmv) { - mappers = new ArrayList<>(); - for (MapFieldDescriptor mapFieldDescriptor : pmv.getMappers()) { - mappers.add(convert(mapFieldDescriptor)); - } - } - - private LSServerMapField convert(MapFieldDescriptor mapFieldDescriptor) { - if (mapFieldDescriptor instanceof MapDateDescriptor) { - return new LSServerMapDate((MapDateDescriptor)mapFieldDescriptor); - } else if (mapFieldDescriptor instanceof MapFieldCopyDescriptor) { - return new LSServerMapFieldCopy((MapFieldCopyDescriptor)mapFieldDescriptor); - } else if (mapFieldDescriptor instanceof MapFieldNameDescriptor) { - return new LSServerMapFieldName((MapFieldNameDescriptor)mapFieldDescriptor); - } else if (mapFieldDescriptor instanceof MapFieldValueDescriptor) { - return new LSServerMapFieldValue((MapFieldValueDescriptor)mapFieldDescriptor); - } - - throw new IllegalArgumentException("Unknown mapper: " + mapFieldDescriptor.getClass()); - } - - public List getMappers() { - return mappers; - } - - public void setMappers(List mappers) { - this.mappers = mappers; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesList.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesList.java deleted file mode 100644 index 2d29de05668..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesList.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.util.ArrayList; -import java.util.List; - -import javax.validation.Valid; -import javax.validation.constraints.NotNull; - -import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues; - -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonSerialize(using = LSServerPostMapValuesListSerializer.class) -@JsonDeserialize(using = LSServerPostMapValuesListDeserializer.class) -public class LSServerPostMapValuesList { - @Valid - @NotNull - private List mapperLists; - - public LSServerPostMapValuesList() {} - - public LSServerPostMapValuesList(List list) { - mapperLists = new ArrayList<>(); - for (PostMapValues postMapValues : list) { - mapperLists.add(new LSServerPostMapValues(postMapValues)); - } - } - - public List getMappersList() { - return mapperLists; - } - - public void setMappersList(List mapperLists) { - this.mapperLists = mapperLists; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListDeserializer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListDeserializer.java deleted file mode 100644 index de21e20aab0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListDeserializer.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.ObjectCodec; -import com.fasterxml.jackson.core.TreeNode; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonNode; - -public class LSServerPostMapValuesListDeserializer extends JsonDeserializer { - @Override - public LSServerPostMapValuesList deserialize(JsonParser jp, DeserializationContext ctxt) - throws IOException { - ObjectCodec oc = jp.getCodec(); - JsonNode node = oc.readTree(jp); - - List mappersList = new ArrayList<>(); - for (JsonNode childNode : node) { - List mappers = new ArrayList<>(); - for (Iterator> i = childNode.fields(); i.hasNext();) { - Map.Entry mapperData = i.next(); - String mapperType = mapperData.getKey(); - JsonNode mapperProperties = mapperData.getValue(); - switch (mapperType) { - case "map_date" : - LSServerMapDate mapDate = oc.treeToValue(mapperProperties, LSServerMapDate.class); - mappers.add(mapDate); - break; - case "map_field_name" : - LSServerMapFieldName mapFieldName = oc.treeToValue(mapperProperties, LSServerMapFieldName.class); - mappers.add(mapFieldName); - break; - case "map_field_value" : - LSServerMapFieldValue mapFieldValue = oc.treeToValue(mapperProperties, LSServerMapFieldValue.class); - mappers.add(mapFieldValue); - break; - case "map_field_copy" : - LSServerMapFieldCopy mapFieldCopy = oc.treeToValue(mapperProperties, LSServerMapFieldCopy.class); - mappers.add(mapFieldCopy); - break; - case "map_anonymize" : - LSServerMapFieldAnonymize mapAnonymize = oc.treeToValue(mapperProperties, LSServerMapFieldAnonymize.class); - mappers.add(mapAnonymize); - break; - } - } - - LSServerPostMapValues lsServerPostMapValues = new LSServerPostMapValues(); - lsServerPostMapValues.setMappers(mappers); - mappersList.add(lsServerPostMapValues); - } - - LSServerPostMapValuesList lsServerPostMapValuesList = new LSServerPostMapValuesList(); - lsServerPostMapValuesList.setMappersList(mappersList); - return lsServerPostMapValuesList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListSerializer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListSerializer.java deleted file mode 100644 index bb1eadf78af..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerPostMapValuesListSerializer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.model.common; - -import java.io.IOException; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.SerializerProvider; - -public class LSServerPostMapValuesListSerializer extends JsonSerializer { - @Override - public void serialize(LSServerPostMapValuesList value, JsonGenerator jgen, SerializerProvider provider) - throws IOException, JsonProcessingException { - jgen.writeStartArray(); - for (LSServerPostMapValues postMapValues : value.getMappersList()) { - jgen.writeStartObject(); - for (LSServerMapField mapField : postMapValues.getMappers()) { - jgen.writeObjectField(mapField.getName(), mapField); - } - jgen.writeEndObject(); - } - - jgen.writeEndArray(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/AuditFieldMetadataResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/AuditFieldMetadataResponse.java deleted file mode 100644 index 3ef96ff1e2e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/AuditFieldMetadataResponse.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -import io.swagger.annotations.ApiModel; - -import java.util.List; -import java.util.Map; - -@ApiModel -public class AuditFieldMetadataResponse { - - private final List defaults; - - private final Map> overrides; - - public AuditFieldMetadataResponse(List defaults, Map> overrides) { - this.defaults = defaults; - this.overrides = overrides; - } - - public List getDefaults() { - return defaults; - } - - public Map> getOverrides() { - return overrides; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ComponentMetadata.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ComponentMetadata.java deleted file mode 100644 index 41b6b2d0d9f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ComponentMetadata.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class ComponentMetadata extends Metadata implements Groupable { - private final String group; - - public ComponentMetadata(String name, String label, String group) { - super(name, label); - this.group = group; - } - - @Override - public String getGroup() { - return group; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/FieldMetadata.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/FieldMetadata.java deleted file mode 100644 index 2eec14f5d4b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/FieldMetadata.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class FieldMetadata extends Metadata implements Filterable, Visible { - private final boolean filterable; - private final boolean visible; - - public FieldMetadata(String name, String label, Boolean filterable, Boolean visible) { - super(name, label); - this.visible = visible; - this.filterable = filterable; - } - - @Override - public Boolean isFilterable() { - return this.filterable; - } - - @Override - public boolean isVisible() { - return this.visible; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Filterable.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Filterable.java deleted file mode 100644 index e076e81e2e7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Filterable.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -public interface Filterable { - - Boolean isFilterable(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Groupable.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Groupable.java deleted file mode 100644 index 49d9f7137ac..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Groupable.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -public interface Groupable { - - String getGroup(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Labelable.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Labelable.java deleted file mode 100644 index eb82b81a85a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Labelable.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -public interface Labelable { - - String getName(); - - String getLabel(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Metadata.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Metadata.java deleted file mode 100644 index 12fd664f086..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Metadata.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -public class Metadata implements Labelable { - - private final String name; - private final String label; - - Metadata(String name, String label) { - this.name = name; - this.label = label; - } - - @Override - public String getName() { - return name; - } - - @Override - public String getLabel() { - return label; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ServiceComponentMetadataWrapper.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ServiceComponentMetadataWrapper.java deleted file mode 100644 index 0c537c31903..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/ServiceComponentMetadataWrapper.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; - -import java.util.List; -import java.util.Map; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class ServiceComponentMetadataWrapper { - private final Map groups; - private final List metadata; - - public ServiceComponentMetadataWrapper(List metadata, Map groups) { - this.groups = groups; - this.metadata = metadata; - } - - public Map getGroups() { - return groups; - } - - public List getMetadata() { - return metadata; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Visible.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Visible.java deleted file mode 100644 index f105b736871..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/metadata/Visible.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.metadata; - -public interface Visible { - - boolean isVisible(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java deleted file mode 100644 index e92d7bf9183..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.X_AXIS_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.Y_AXIS_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.STACK_BY_D; - -public interface AnyGraphParamDefinition { - - String getxAxis(); - - @ApiParam(value = X_AXIS_D, name = LogSearchConstants.REQUEST_PARAM_XAXIS) - void setxAxis(String xAxis); - - String getyAxis(); - - @ApiParam(value = Y_AXIS_D, name = LogSearchConstants.REQUEST_PARAM_YAXIS) - void setyAxis(String yAxis); - - String getStackBy(); - - @ApiParam(value = STACK_BY_D, name = LogSearchConstants.REQUEST_PARAM_STACK_BY) - void setStackBy(String stackBy); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java deleted file mode 100644 index aa1a393fb96..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.BUNDLE_ID; - -public interface BundleIdParamDefinition { - - String getBundleId(); - - @ApiParam(value = BUNDLE_ID, name = LogSearchConstants.REQUEST_PARAM_BUNDLE_ID) - void setBundleId(String bundleId); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ClustersParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ClustersParamDefinition.java deleted file mode 100644 index cb15b0ab184..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ClustersParamDefinition.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.CLUSTER_D; - -public interface ClustersParamDefinition { - - String getClusters(); - - @ApiParam(value = CLUSTER_D, name = LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - void setClusters(String cluster); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java deleted file mode 100644 index ef334d117f2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.START_TIME_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.END_TIME_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.START_INDEX_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.PAGE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.PAGE_SIZE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.SORT_BY_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.SORT_TYPE_D; - - -public interface CommonSearchParamDefinition { - - String getStartIndex(); - - @ApiParam(value = START_INDEX_D, name = LogSearchConstants.REQUEST_PARAM_START_INDEX) - void setStartIndex(String startIndex); - - String getPage(); - - @ApiParam(value = PAGE_D, name = LogSearchConstants.REQUEST_PARAM_PAGE) - void setPage(String page); - - String getPageSize(); - - @ApiParam(value = PAGE_SIZE_D, name = LogSearchConstants.REQUEST_PARAM_PAGE_SIZE) - void setPageSize(String pageSize); - - String getSortBy(); - - @ApiParam(value = SORT_BY_D, name = LogSearchConstants.REQUEST_PARAM_SORT_BY) - void setSortBy(String sortBy); - - String getSortType(); - - @ApiParam(value = SORT_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_SORT_TYPE) - void setSortType(String sortType); - - String getStartTime(); - - @ApiParam(value = START_TIME_D, name = LogSearchConstants.REQUEST_PARAM_START_TIME) - void setStartTime(String startTime); - - String getEndTime(); - - @ApiParam(value = END_TIME_D, name = LogSearchConstants.REQUEST_PARAM_END_TIME) - void setEndTime(String endTime); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java deleted file mode 100644 index e6edf7c7362..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FROM_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.TO_D; - -public interface DateRangeParamDefinition { - - String getFrom(); - - @ApiParam(value = FROM_D, name = LogSearchConstants.REQUEST_PARAM_FROM) - void setFrom(String from); - - String getTo(); - - @ApiParam(value = TO_D, name = LogSearchConstants.REQUEST_PARAM_TO) - void setTo(String to); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/EventHistoryParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/EventHistoryParamDefinition.java deleted file mode 100644 index d7a5b015024..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/EventHistoryParamDefinition.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.EventHistoryDescriptions.FILTER_NAME_D; -import static org.apache.ambari.logsearch.doc.DocConstants.EventHistoryDescriptions.ROW_TYPE_D; - -public interface EventHistoryParamDefinition { - - String getFilterName(); - - @ApiParam(value = FILTER_NAME_D, name = LogSearchConstants.REQUEST_PARAM_FILTER_NAME) - void setFilterName(String filterName); - - String getRowType(); - - @ApiParam(value = ROW_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_ROW_TYPE) - void setRowType(String rowType); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java deleted file mode 100644 index 66b3eebede6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FIELD_D; - -public interface FieldParamDefinition { - - String getField(); - - @ApiParam(value = FIELD_D, name = LogSearchConstants.REQUEST_PARAM_FIELD, required = true) - void setField(String field); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java deleted file mode 100644 index 47f06201914..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FORMAT_D; - -public interface FormatParamDefinition { - - String getFormat(); - - @ApiParam(value = FORMAT_D, name = LogSearchConstants.REQUEST_PARAM_FORMAT) - void setFormat(String format); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/HostComponentParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/HostComponentParamDefinition.java deleted file mode 100644 index 914b5f09f33..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/HostComponentParamDefinition.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_NAME_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.COMPONENT_NAME_D; - -public interface HostComponentParamDefinition { - - String getHostName(); - - @ApiParam(value = HOST_NAME_D, name = LogSearchConstants.REQUEST_PARAM_HOST_NAME) - void setHostName(String hostName); - - String getComponentName(); - - @ApiParam(value = COMPONENT_NAME_D, name = LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME) - void setComponentName(String componentName); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java deleted file mode 100644 index c8531db8aba..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.IS_LAST_PAGE_D; - -public interface LastPageParamDefinition { - boolean isLastPage(); - - @ApiParam(value = IS_LAST_PAGE_D, name = LogSearchConstants.REQUEST_PARAM_LAST_PAGE) - void setLastPage(boolean lastPage); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java deleted file mode 100644 index e2c61ee06b1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.E_MESSAGE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.I_MESSAGE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.MUST_BE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.MUST_NOT_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.INCLUDE_QUERY_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.EXCLUDE_QUERY_D; - -public interface LogParamDefinition { - - String getIncludeMessage(); - - @ApiParam(value = I_MESSAGE_D, name = LogSearchConstants.REQUEST_PARAM_I_MESSAGE) - void setIncludeMessage(String includeMessage); - - String getExcludeMessage(); - - @ApiParam(value = E_MESSAGE_D, name = LogSearchConstants.REQUEST_PARAM_E_MESSAGE) - void setExcludeMessage(String excludeMessage); - - String getMustBe(); - - @ApiParam(value = MUST_BE_D, name = LogSearchConstants.REQUEST_PARAM_MUST_BE) - void setMustBe(String mustBe); - - String getMustNot(); - - @ApiParam(value = MUST_NOT_D, name = LogSearchConstants.REQUEST_PARAM_MUST_NOT) - void setMustNot(String mustNot); - - String getIncludeQuery(); - - @ApiParam(value = INCLUDE_QUERY_D, name = LogSearchConstants.REQUEST_PARAM_INCLUDE_QUERY) - void setIncludeQuery(String includeQuery); - - String getExcludeQuery(); - - @ApiParam(value = EXCLUDE_QUERY_D, name = LogSearchConstants.REQUEST_PARAM_EXCLUDE_QUERY) - void setExcludeQuery(String excludeQuery); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java deleted file mode 100644 index d3832c12655..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.ID_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.SCROLL_TYPE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.NUMBER_ROWS_D; - -public interface LogTruncatedParamDefinition { - - String getId(); - - @ApiParam(value = ID_D, name = LogSearchConstants.REQUEST_PARAM_ID) - void setId(String id); - - String getScrollType(); - - @ApiParam(value = SCROLL_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_SCROLL_TYPE) - void setScrollType(String scrollType); - - Integer getNumberRows(); - - @ApiParam(value = NUMBER_ROWS_D, name = LogSearchConstants.REQUEST_PARAM_NUMBER_ROWS) - void setNumberRows(Integer numberRows); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java deleted file mode 100644 index 8b275b6c257..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -public interface SearchRequest extends ClustersParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java deleted file mode 100644 index 9306bb55455..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_PARAMS_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.LEVEL_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.FILE_NAME_D; - -public interface ServiceLogParamDefinition extends HostComponentParamDefinition { - - String getLevel(); - - @ApiParam(value = LEVEL_D, name = LogSearchConstants.REQUEST_PARAM_LEVEL) - void setLevel(String level); - - String getFileName(); - - @ApiParam(value = FILE_NAME_D, name = LogSearchConstants.REQUEST_PARAM_FILE_NAME) - void setFileName(String fileName); - - String getHostList(); - - @ApiParam(value = HOST_PARAMS_D, name = LogSearchConstants.REQUEST_PARAM_HOSTS) - void setHostList(String hostList); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java deleted file mode 100644 index 9d8f1a680f6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.FIND_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.KEYWORD_TYPE_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.SOURCE_LOG_ID_D; -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.TOKEN_D; - -public interface ServiceLogSearchParamDefinition { - - String getKeyWord(); - - @ApiParam(value = FIND_D, name = LogSearchConstants.REQUEST_PARAM_KEYWORD) - void setKeyWord(String keyWord); - - String getSourceLogId(); - - @ApiParam(value = SOURCE_LOG_ID_D, name = LogSearchConstants.REQUEST_PARAM_SOURCE_LOG_ID) - void setSourceLogId(String sourceLogId); - - String getKeywordType(); - - @ApiParam(value = KEYWORD_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_KEYWORD_TYPE) - void setKeywordType(String keywordType); - - String getToken(); - - @ApiParam(value = TOKEN_D, name = LogSearchConstants.REQUEST_PARAM_TOKEN) - void setToken(String token); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ShipperConfigTestParams.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ShipperConfigTestParams.java deleted file mode 100644 index 28e40dfe6cc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/ShipperConfigTestParams.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.LOG_ID_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.SHIPPER_CONFIG_D; -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.TEST_ENTRY_D; - -public interface ShipperConfigTestParams { - - String getShipperConfig(); - - @ApiParam(value = SHIPPER_CONFIG_D, name = LogSearchConstants.REQUEST_PARAM_SHIPPER_CONFIG, required = true) - void setShipperConfig(String shipperConfig); - - String getLogId(); - - @ApiParam(value = LOG_ID_D, name = LogSearchConstants.REQUEST_PARAM_LOG_ID, required = true) - void setLogId(String logId); - - String getTestEntry(); - - @ApiParam(value = TEST_ENTRY_D, name = LogSearchConstants.REQUEST_PARAM_TEST_ENTRY, required = true) - void setTestEntry(String testEntry); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/TopParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/TopParamDefinition.java deleted file mode 100644 index 97d954310d3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/TopParamDefinition.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.TOP; - -public interface TopParamDefinition { - Integer getTop(); - - @ApiParam(value = TOP, name = LogSearchConstants.REQUEST_PARAM_TOP, required = true) - void setTop(Integer top); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java deleted file mode 100644 index 3f493da9804..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.UNIT_D; - -public interface UnitParamDefinition { - - String getUnit(); - - @ApiParam(value = UNIT_D, name = LogSearchConstants.REQUEST_PARAM_UNIT) - void setUnit(String unit); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UserParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UserParamDefinition.java deleted file mode 100644 index 38ffe5de292..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UserParamDefinition.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.USER_D; - -public interface UserParamDefinition { - - String getUserList(); - - @ApiParam(value = USER_D, name = LogSearchConstants.REQUEST_PARAM_USERS) - void setUserList(String userList); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java deleted file mode 100644 index aa2be7124d8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.common.LogSearchConstants; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.UTC_OFFSET_D; - -public interface UtcOffsetParamDefinition { - - String getUtcOffset(); - - @ApiParam(value = UTC_OFFSET_D, name = LogSearchConstants.REQUEST_PARAM_UTC_OFFSET) - void setUtcOffset(String utcOffset); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java deleted file mode 100644 index de75f15ce3d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.UnitParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface AuditBarGraphRequest extends BaseLogRequest, UnitParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java deleted file mode 100644 index 76af20bfe3a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface AuditComponentRequest extends BaseLogRequest, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java deleted file mode 100644 index ce3eefc7bd1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.LastPageParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface AuditLogRequest extends BaseLogRequest, LastPageParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditServiceLoadRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditServiceLoadRequest.java deleted file mode 100644 index b3cd285b073..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditServiceLoadRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface AuditServiceLoadRequest extends BaseLogRequest, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java deleted file mode 100644 index aa3995d0faa..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition; -import org.apache.ambari.logsearch.model.request.LogParamDefinition; - -@Marker -public interface BaseLogRequest extends CommonSearchRequest, LogParamDefinition, DateRangeParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java deleted file mode 100644 index 2d90e61739a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.BundleIdParamDefinition; -import org.apache.ambari.logsearch.model.request.ServiceLogParamDefinition; - - -@Marker -public interface BaseServiceLogRequest extends BaseLogRequest, ServiceLogParamDefinition, BundleIdParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java deleted file mode 100644 index bfa84a5b811..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.model.request.CommonSearchParamDefinition; -import org.apache.ambari.logsearch.model.request.SearchRequest; - -public interface CommonSearchRequest extends SearchRequest, CommonSearchParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/EventHistoryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/EventHistoryRequest.java deleted file mode 100644 index f3f8b61dbfa..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/EventHistoryRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.EventHistoryParamDefinition; - -@Marker -public interface EventHistoryRequest extends CommonSearchRequest, EventHistoryParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java deleted file mode 100644 index 460591a07df..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.FieldParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface FieldAuditBarGraphRequest extends AuditBarGraphRequest, FieldParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java deleted file mode 100644 index e00c69fa640..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.FieldParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface FieldAuditLogRequest extends BaseLogRequest, FieldParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/HostLogFilesRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/HostLogFilesRequest.java deleted file mode 100644 index 7b3cf3bb751..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/HostLogFilesRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.HostComponentParamDefinition; -import org.apache.ambari.logsearch.model.request.SearchRequest; - -@Marker -public interface HostLogFilesRequest extends HostComponentParamDefinition, SearchRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java deleted file mode 100644 index 74edc144d6e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.AnyGraphParamDefinition; -import org.apache.ambari.logsearch.model.request.UnitParamDefinition; - -@Marker -public interface ServiceAnyGraphRequest extends ServiceLogRequest, AnyGraphParamDefinition, UnitParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java deleted file mode 100644 index 1b7cb865793..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.UnitParamDefinition; - -@Marker -public interface ServiceGraphRequest extends ServiceLogRequest, UnitParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogAggregatedInfoRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogAggregatedInfoRequest.java deleted file mode 100644 index c2833da5662..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogAggregatedInfoRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface ServiceLogAggregatedInfoRequest extends BaseServiceLogRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentHostRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentHostRequest.java deleted file mode 100644 index 6e1dc43f290..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentHostRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface ServiceLogComponentHostRequest extends ServiceLogRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentLevelRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentLevelRequest.java deleted file mode 100644 index 0ff1b4fc545..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogComponentLevelRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface ServiceLogComponentLevelRequest extends ServiceLogRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java deleted file mode 100644 index 3846ad51c42..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.FormatParamDefinition; -import org.apache.ambari.logsearch.model.request.UtcOffsetParamDefinition; - -import javax.ws.rs.QueryParam; - -@Marker -public interface ServiceLogExportRequest extends ServiceLogRequest, FormatParamDefinition, UtcOffsetParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogHostComponentRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogHostComponentRequest.java deleted file mode 100644 index 4a04d4c5b33..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogHostComponentRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface ServiceLogHostComponentRequest extends ServiceLogRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogLevelCountRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogLevelCountRequest.java deleted file mode 100644 index e32a9eb7125..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogLevelCountRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface ServiceLogLevelCountRequest extends BaseServiceLogRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java deleted file mode 100644 index d6e485ce2a6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.LastPageParamDefinition; -import org.apache.ambari.logsearch.model.request.ServiceLogSearchParamDefinition; - -@Marker -public interface ServiceLogRequest extends BaseServiceLogRequest, ServiceLogSearchParamDefinition, LastPageParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java deleted file mode 100644 index 386a9bf16c4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.LogTruncatedParamDefinition; - -@Marker -public interface ServiceLogTruncatedRequest extends ServiceLogRequest, LogTruncatedParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ShipperConfigTestRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ShipperConfigTestRequest.java deleted file mode 100644 index 0e3baeeaa4b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/ShipperConfigTestRequest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.model.request.ShipperConfigTestParams; -import org.hibernate.validator.constraints.NotBlank; -import org.hibernate.validator.constraints.NotEmpty; - -import javax.ws.rs.FormParam; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.REQUEST_PARAM_LOG_ID; -import static org.apache.ambari.logsearch.common.LogSearchConstants.REQUEST_PARAM_SHIPPER_CONFIG; -import static org.apache.ambari.logsearch.common.LogSearchConstants.REQUEST_PARAM_TEST_ENTRY; - -public class ShipperConfigTestRequest implements ShipperConfigTestParams { - - @NotBlank - @FormParam(REQUEST_PARAM_LOG_ID) - private String logId; - - @NotBlank - @FormParam(REQUEST_PARAM_TEST_ENTRY) - private String testEntry; - - @NotEmpty - @FormParam(REQUEST_PARAM_SHIPPER_CONFIG) - private String shipperConfig; - - @Override - public String getLogId() { - return logId; - } - - @Override - public void setLogId(String logId) { - this.logId = logId; - } - - @Override - public String getShipperConfig() { - return shipperConfig; - } - - @Override - public void setShipperConfig(String shipperConfig) { - this.shipperConfig = shipperConfig; - } - - @Override - public String getTestEntry() { - return testEntry; - } - - public void setTestEntry(String testEntry) { - this.testEntry = testEntry; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/TopFieldAuditLogRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/TopFieldAuditLogRequest.java deleted file mode 100644 index c923dbd9f1d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/TopFieldAuditLogRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.TopParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface TopFieldAuditLogRequest extends FieldAuditLogRequest, TopParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java deleted file mode 100644 index c9c3a34def6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.FormatParamDefinition; -import org.apache.ambari.logsearch.model.request.UserParamDefinition; - -@Marker -public interface UserExportRequest extends FieldAuditLogRequest, FormatParamDefinition, UserParamDefinition { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditBarGraphBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditBarGraphBodyRequest.java deleted file mode 100644 index e63fdf98ade..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditBarGraphBodyRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest; - -public class AuditBarGraphBodyRequest extends BaseLogBodyRequest implements AuditBarGraphRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditComponentBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditComponentBodyRequest.java deleted file mode 100644 index ee7e2aa04b9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditComponentBodyRequest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest; - -public class AuditComponentBodyRequest extends BaseLogBodyRequest implements AuditComponentRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getUserList() { - return userList; - } - - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditLogBodyRequest.java deleted file mode 100644 index 5d67b34e037..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditLogBodyRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest; - -public class AuditLogBodyRequest extends BaseLogBodyRequest implements AuditLogRequest { - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_LAST_PAGE) - private boolean isLastPage; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public boolean isLastPage() { - return isLastPage; - } - - @Override - public void setLastPage(boolean lastPage) { - isLastPage = lastPage; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditServiceLoadBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditServiceLoadBodyRequest.java deleted file mode 100644 index 7a4cdaa38a6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/AuditServiceLoadBodyRequest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; - -public class AuditServiceLoadBodyRequest extends BaseLogBodyRequest implements AuditServiceLoadRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseLogBodyRequest.java deleted file mode 100644 index b65dcf89263..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseLogBodyRequest.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; - -public class BaseLogBodyRequest extends CommonSearchBodyRequest implements BaseLogRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_I_MESSAGE) - private String includeMessage; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_E_MESSAGE) - private String excludeMessage; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_MUST_BE) - private String mustBe; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_MUST_NOT) - private String mustNot; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_INCLUDE_QUERY) - private String includeQuery; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_EXCLUDE_QUERY) - private String excludeQuery; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FROM) - private String from; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_TO) - private String to; - - @Override - public String getIncludeMessage() { - return includeMessage; - } - - @Override - public void setIncludeMessage(String includeMessage) { - this.includeMessage = includeMessage; - } - - @Override - public String getExcludeMessage() { - return excludeMessage; - } - - @Override - public void setExcludeMessage(String excludeMessage) { - this.excludeMessage = excludeMessage; - } - - @Override - public String getMustBe() { - return mustBe; - } - - @Override - public void setMustBe(String mustBe) { - this.mustBe = mustBe; - } - - @Override - public String getMustNot() { - return mustNot; - } - - @Override - public void setMustNot(String mustNot) { - this.mustNot = mustNot; - } - - @Override - public String getIncludeQuery() { - return includeQuery; - } - - @Override - public void setIncludeQuery(String includeQuery) { - this.includeQuery = includeQuery; - } - - @Override - public String getExcludeQuery() { - return excludeQuery; - } - - @Override - public void setExcludeQuery(String excludeQuery) { - this.excludeQuery = excludeQuery; - } - - @Override - public String getFrom() { - return from; - } - - @Override - public void setFrom(String from) { - this.from = from; - } - - @Override - public String getTo() { - return to; - } - - @Override - public void setTo(String to) { - this.to = to; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseServiceLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseServiceLogBodyRequest.java deleted file mode 100644 index a75938ca504..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/BaseServiceLogBodyRequest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest; - -public class BaseServiceLogBodyRequest extends BaseLogBodyRequest implements BaseServiceLogRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_LEVEL) - private String level; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_HOST_NAME) - private String hostName; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME) - private String componentName; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FILE_NAME) - private String fileName; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_BUNDLE_ID) - private String bundleId; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_HOSTS) - private String hostList; - - @Override - public String getLevel() { - return level; - } - - @Override - public void setLevel(String level) { - this.level = level; - } - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } - - @Override - public String getComponentName() { - return componentName; - } - - @Override - public void setComponentName(String componentName) { - this.componentName = componentName; - } - - @Override - public String getFileName() { - return fileName; - } - - @Override - public void setFileName(String fileName) { - this.fileName = fileName; - } - - @Override - public String getBundleId() { - return bundleId; - } - - @Override - public void setBundleId(String bundleId) { - this.bundleId = bundleId; - } - - @Override - public String getHostList() { - return hostList; - } - - @Override - public void setHostList(String hostList) { - this.hostList = hostList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ClusterBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ClusterBodyRequest.java deleted file mode 100644 index 313d7e27628..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ClusterBodyRequest.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.ClustersParamDefinition; - -import javax.annotation.Nullable; - -public class ClusterBodyRequest implements ClustersParamDefinition { - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - @Nullable - private String clusters; - - @Override - public String getClusters() { - return clusters; - } - - @Override - public void setClusters(String clusters) { - this.clusters = clusters; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/CommonSearchBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/CommonSearchBodyRequest.java deleted file mode 100644 index 7c0befc0014..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/CommonSearchBodyRequest.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; - -import javax.annotation.Nullable; -import javax.ws.rs.DefaultValue; - -public class CommonSearchBodyRequest implements CommonSearchRequest { - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_START_INDEX) - private String startIndex; - - @JsonProperty(value = LogSearchConstants.REQUEST_PARAM_PAGE, defaultValue = LogSearchConstants.REQUEST_PARAM_PAGE_DEFAULT_VALUE) - @DefaultValue(LogSearchConstants.REQUEST_PARAM_PAGE_DEFAULT_VALUE) - private String page = LogSearchConstants.REQUEST_PARAM_PAGE_DEFAULT_VALUE; - - @JsonProperty(value = LogSearchConstants.REQUEST_PARAM_PAGE_SIZE, defaultValue = LogSearchConstants.REQUEST_PARAM_PAGE_SIZE_DEFAULT_VALUE) - @DefaultValue(LogSearchConstants.REQUEST_PARAM_PAGE_SIZE_DEFAULT_VALUE) - private String pageSize = LogSearchConstants.REQUEST_PARAM_PAGE_SIZE_DEFAULT_VALUE; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_SORT_BY) - private String sortBy; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_SORT_TYPE) - private String sortType; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_START_TIME) - private String startTime; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_END_TIME) - private String endTime; - - @Nullable - @JsonProperty(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - private String clusters; - - @Override - public String getStartIndex() { - return startIndex; - } - - @Override - public void setStartIndex(String startIndex) { - this.startIndex = startIndex; - } - - @Override - public String getPage() { - return page; - } - - @Override - public void setPage(String page) { - this.page = page; - } - - @Override - public String getPageSize() { - return pageSize; - } - - @Override - public void setPageSize(String pageSize) { - this.pageSize = pageSize; - } - - @Override - public String getSortBy() { - return sortBy; - } - - @Override - public void setSortBy(String sortBy) { - this.sortBy = sortBy; - } - - @Override - public String getSortType() { - return sortType; - } - - @Override - public void setSortType(String sortType) { - this.sortType = sortType; - } - - @Override - public String getStartTime() { - return startTime; - } - - @Override - public void setStartTime(String startTime) { - this.startTime = startTime; - } - - @Override - public String getEndTime() { - return endTime; - } - - @Override - public void setEndTime(String endTime) { - this.endTime = endTime; - } - - @Override - public String getClusters() { - return clusters; - } - - @Override - public void setClusters(String clusters) { - this.clusters = clusters; - } - - @Override - public String toString() { - return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditBarGraphBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditBarGraphBodyRequest.java deleted file mode 100644 index 15f44591a69..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditBarGraphBodyRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.FieldAuditBarGraphRequest; - -public class FieldAuditBarGraphBodyRequest extends AuditBarGraphBodyRequest implements FieldAuditBarGraphRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FIELD) - private String field; - - @Override - public String getField() { - return field; - } - - @Override - public void setField(String field) { - this.field = field; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditLogBodyRequest.java deleted file mode 100644 index b18f0ea193c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/FieldAuditLogBodyRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest; - -public class FieldAuditLogBodyRequest extends BaseLogBodyRequest implements FieldAuditLogRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FIELD) - private String field; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getField() { - return field; - } - - @Override - public void setField(String field) { - this.field = field; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/HostLogFilesBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/HostLogFilesBodyRequest.java deleted file mode 100644 index 4a537285926..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/HostLogFilesBodyRequest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.HostLogFilesRequest; - -import javax.annotation.Nullable; -import javax.validation.constraints.NotNull; - -public class HostLogFilesBodyRequest implements HostLogFilesRequest { - @NotNull - @JsonProperty(LogSearchConstants.REQUEST_PARAM_HOST_NAME) - private String hostName; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME) - private String componentName; - - @Nullable - @JsonProperty(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - private String clusters; - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } - - @Override - public String getComponentName() { - return componentName; - } - - @Override - public void setComponentName(String componentName) { - this.componentName = componentName; - } - - @Override - public String getClusters() { - return clusters; - } - - @Override - public void setClusters(String clusters) { - this.clusters = clusters; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceAnyGraphBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceAnyGraphBodyRequest.java deleted file mode 100644 index 23b186a62c0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceAnyGraphBodyRequest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest; - -public class ServiceAnyGraphBodyRequest extends ServiceLogBodyRequest implements ServiceAnyGraphRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_XAXIS) - private String xAxis; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_YAXIS) - private String yAxis; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_STACK_BY) - private String stackBy; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getxAxis() { - return xAxis; - } - - @Override - public void setxAxis(String xAxis) { - this.xAxis = xAxis; - } - - @Override - public String getyAxis() { - return yAxis; - } - - @Override - public void setyAxis(String yAxis) { - this.yAxis = yAxis; - } - - @Override - public String getStackBy() { - return stackBy; - } - - @Override - public void setStackBy(String stackBy) { - this.stackBy = stackBy; - } - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceGraphBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceGraphBodyRequest.java deleted file mode 100644 index 7d7d2338765..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceGraphBodyRequest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest; - -public class ServiceGraphBodyRequest extends ServiceLogBodyRequest implements ServiceGraphRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogAggregatedInfoBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogAggregatedInfoBodyRequest.java deleted file mode 100644 index 0f2fc6b56ab..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogAggregatedInfoBodyRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogAggregatedInfoRequest; - -@Marker -public class ServiceLogAggregatedInfoBodyRequest extends BaseServiceLogBodyRequest implements ServiceLogAggregatedInfoRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogBodyRequest.java deleted file mode 100644 index a7d4d794faf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogBodyRequest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest; - -public class ServiceLogBodyRequest extends BaseServiceLogBodyRequest implements ServiceLogRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_KEYWORD) - private String keyWord; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_SOURCE_LOG_ID) - private String sourceLogId; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_KEYWORD_TYPE) - private String keywordType; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_TOKEN) - private String token; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_LAST_PAGE) - private boolean isLastPage; - - @Override - public String getKeyWord() { - return keyWord; - } - - @Override - public void setKeyWord(String keyWord) { - this.keyWord = keyWord; - } - - @Override - public String getSourceLogId() { - return sourceLogId; - } - - @Override - public void setSourceLogId(String sourceLogId) { - this.sourceLogId = sourceLogId; - } - - @Override - public String getKeywordType() { - return keywordType; - } - - @Override - public void setKeywordType(String keywordType) { - this.keywordType = keywordType; - } - - @Override - public String getToken() { - return token; - } - - @Override - public void setToken(String token) { - this.token = token; - } - - @Override - public boolean isLastPage() { - return isLastPage; - } - - @Override - public void setLastPage(boolean lastPage) { - isLastPage = lastPage; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentHostBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentHostBodyRequest.java deleted file mode 100644 index fd117e0bcb6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentHostBodyRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest; - -@Marker -public class ServiceLogComponentHostBodyRequest extends ServiceLogBodyRequest implements ServiceLogComponentHostRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentLevelBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentLevelBodyRequest.java deleted file mode 100644 index 30bfa5452e8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogComponentLevelBodyRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest; - -@Marker -public class ServiceLogComponentLevelBodyRequest extends ServiceLogBodyRequest implements ServiceLogComponentLevelRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogExportBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogExportBodyRequest.java deleted file mode 100644 index a75e18dd5b8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogExportBodyRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest; - -public class ServiceLogExportBodyRequest extends ServiceLogBodyRequest implements ServiceLogExportRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FORMAT) - private String format; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_UTC_OFFSET) - private String utcOffset; - - @Override - public String getFormat() { - return format; - } - - @Override - public void setFormat(String format) { - this.format = format; - } - - @Override - public String getUtcOffset() { - return utcOffset; - } - - @Override - public void setUtcOffset(String utcOffset) { - this.utcOffset = utcOffset; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogHostComponentBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogHostComponentBodyRequest.java deleted file mode 100644 index fe7663f4d43..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogHostComponentBodyRequest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest; - -public class ServiceLogHostComponentBodyRequest extends ServiceLogBodyRequest implements ServiceLogHostComponentRequest { - @JsonProperty("hostName") - @ApiParam - String hostName; - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogLevelCountBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogLevelCountBodyRequest.java deleted file mode 100644 index 1b8e7f2f189..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogLevelCountBodyRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest; - -@Marker -public class ServiceLogLevelCountBodyRequest extends BaseServiceLogBodyRequest implements ServiceLogLevelCountRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogTruncatedBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogTruncatedBodyRequest.java deleted file mode 100644 index 41a8d856304..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/ServiceLogTruncatedBodyRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest; - -public class ServiceLogTruncatedBodyRequest extends ServiceLogBodyRequest implements ServiceLogTruncatedRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_ID) - private String id; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_SCROLL_TYPE) - private String scrollType; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_NUMBER_ROWS) - private Integer numberRows; - - @Override - public String getId() { - return id; - } - - @Override - public void setId(String id) { - this.id = id; - } - - @Override - public String getScrollType() { - return scrollType; - } - - @Override - public void setScrollType(String scrollType) { - this.scrollType = scrollType; - } - - @Override - public Integer getNumberRows() { - return numberRows; - } - - @Override - public void setNumberRows(Integer numberRows) { - this.numberRows = numberRows; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/TopFieldAuditLogBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/TopFieldAuditLogBodyRequest.java deleted file mode 100644 index 6ffbb0c8245..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/TopFieldAuditLogBodyRequest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.apache.ambari.logsearch.model.request.impl.TopFieldAuditLogRequest; - -public class TopFieldAuditLogBodyRequest extends FieldAuditLogBodyRequest implements TopFieldAuditLogRequest { - @JsonIgnore - private Integer top; - - @Override - public Integer getTop() { - return top; - } - - @Override - public void setTop(Integer top) { - this.top = top; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/UserExportBodyRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/UserExportBodyRequest.java deleted file mode 100644 index 2a0d82e4f2a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/body/UserExportBodyRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.body; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.UserExportRequest; - -public class UserExportBodyRequest extends FieldAuditLogBodyRequest implements UserExportRequest { - @JsonProperty(LogSearchConstants.REQUEST_PARAM_FORMAT) - private String format; - - @JsonProperty(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getFormat() { - return format; - } - - @Override - public void setFormat(String format) { - this.format = format; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditBarGraphQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditBarGraphQueryRequest.java deleted file mode 100644 index 6cbc54bf559..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditBarGraphQueryRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest; - -import javax.ws.rs.QueryParam; - -public class AuditBarGraphQueryRequest extends BaseLogQueryRequest implements AuditBarGraphRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditComponentQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditComponentQueryRequest.java deleted file mode 100644 index 271ecc4e747..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditComponentQueryRequest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest; - -import javax.ws.rs.QueryParam; - -public class AuditComponentQueryRequest extends BaseLogQueryRequest implements AuditComponentRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getUserList() { - return userList; - } - - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditLogQueryRequest.java deleted file mode 100644 index a483a274c32..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditLogQueryRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest; - -import javax.ws.rs.QueryParam; - -public class AuditLogQueryRequest extends BaseLogQueryRequest implements AuditLogRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_LAST_PAGE) - private boolean isLastPage; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public boolean isLastPage() { - return isLastPage; - } - - @Override - public void setLastPage(boolean lastPage) { - isLastPage = lastPage; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditServiceLoadQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditServiceLoadQueryRequest.java deleted file mode 100644 index 5b169053d62..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/AuditServiceLoadQueryRequest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; - -import javax.ws.rs.QueryParam; - -public class AuditServiceLoadQueryRequest extends BaseLogQueryRequest implements AuditServiceLoadRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseLogQueryRequest.java deleted file mode 100644 index 41a10716fa2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseLogQueryRequest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; - -import javax.ws.rs.QueryParam; - -public class BaseLogQueryRequest extends CommonSearchQueryRequest implements BaseLogRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_I_MESSAGE) - private String includeMessage; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_E_MESSAGE) - private String excludeMessage; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_MUST_BE) - private String mustBe; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_MUST_NOT) - private String mustNot; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_INCLUDE_QUERY) - private String includeQuery; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_EXCLUDE_QUERY) - private String excludeQuery; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_FROM) - private String from; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_TO) - private String to; - - @Override - public String getIncludeMessage() { - return includeMessage; - } - - @Override - public void setIncludeMessage(String includeMessage) { - this.includeMessage = includeMessage; - } - - @Override - public String getExcludeMessage() { - return excludeMessage; - } - - @Override - public void setExcludeMessage(String excludeMessage) { - this.excludeMessage = excludeMessage; - } - - @Override - public String getMustBe() { - return mustBe; - } - - @Override - public void setMustBe(String mustBe) { - this.mustBe = mustBe; - } - - @Override - public String getMustNot() { - return mustNot; - } - - @Override - public void setMustNot(String mustNot) { - this.mustNot = mustNot; - } - - @Override - public String getIncludeQuery() { - return includeQuery; - } - - @Override - public void setIncludeQuery(String includeQuery) { - this.includeQuery = includeQuery; - } - - @Override - public String getExcludeQuery() { - return excludeQuery; - } - - @Override - public void setExcludeQuery(String excludeQuery) { - this.excludeQuery = excludeQuery; - } - - @Override - public String getFrom() { - return from; - } - - @Override - public void setFrom(String from) { - this.from = from; - } - - @Override - public String getTo() { - return to; - } - - @Override - public void setTo(String to) { - this.to = to; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseServiceLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseServiceLogQueryRequest.java deleted file mode 100644 index 976aa0e18f9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/BaseServiceLogQueryRequest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest; - -import javax.ws.rs.QueryParam; - -public class BaseServiceLogQueryRequest extends BaseLogQueryRequest implements BaseServiceLogRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_LEVEL) - private String level; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_HOST_NAME) - private String hostName; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME) - private String componentName; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_FILE_NAME) - private String fileName; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_BUNDLE_ID) - private String bundleId; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_HOSTS) - private String hostList; - - @Override - public String getLevel() { - return level; - } - - @Override - public void setLevel(String level) { - this.level = level; - } - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } - - @Override - public String getComponentName() { - return componentName; - } - - @Override - public void setComponentName(String componentName) { - this.componentName = componentName; - } - - @Override - public String getFileName() { - return fileName; - } - - @Override - public void setFileName(String fileName) { - this.fileName = fileName; - } - - @Override - public String getBundleId() { - return bundleId; - } - - @Override - public void setBundleId(String bundleId) { - this.bundleId = bundleId; - } - - @Override - public String getHostList() { - return hostList; - } - - @Override - public void setHostList(String hostList) { - this.hostList = hostList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/CommonSearchQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/CommonSearchQueryRequest.java deleted file mode 100644 index 2c9caf99bd9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/CommonSearchQueryRequest.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; - -import javax.annotation.Nullable; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.QueryParam; - -public class CommonSearchQueryRequest implements CommonSearchRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_START_INDEX) - private String startIndex; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_PAGE) - @DefaultValue(LogSearchConstants.REQUEST_PARAM_PAGE_DEFAULT_VALUE) - private String page; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_PAGE_SIZE) - @DefaultValue(LogSearchConstants.REQUEST_PARAM_PAGE_SIZE_DEFAULT_VALUE) - private String pageSize; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_SORT_BY) - private String sortBy; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_SORT_TYPE) - private String sortType; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_START_TIME) - private String startTime; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_END_TIME) - private String endTime; - - @Nullable - @QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - private String clusters; - - @Override - public String getStartIndex() { - return startIndex; - } - - @Override - public void setStartIndex(String startIndex) { - this.startIndex = startIndex; - } - - @Override - public String getPage() { - return page; - } - - @Override - public void setPage(String page) { - this.page = page; - } - - @Override - public String getPageSize() { - return pageSize; - } - - @Override - public void setPageSize(String pageSize) { - this.pageSize = pageSize; - } - - @Override - public String getSortBy() { - return sortBy; - } - - @Override - public void setSortBy(String sortBy) { - this.sortBy = sortBy; - } - - @Override - public String getSortType() { - return sortType; - } - - @Override - public void setSortType(String sortType) { - this.sortType = sortType; - } - - @Override - public String getStartTime() { - return startTime; - } - - @Override - public void setStartTime(String startTime) { - this.startTime = startTime; - } - - @Override - public String getEndTime() { - return endTime; - } - - @Override - public void setEndTime(String endTime) { - this.endTime = endTime; - } - - @Override - public String getClusters() { - return clusters; - } - - @Override - public void setClusters(String clusters) { - this.clusters = clusters; - } - - @Override - public String toString() { - return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/EventHistoryQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/EventHistoryQueryRequest.java deleted file mode 100644 index d943fadb7f3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/EventHistoryQueryRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.EventHistoryRequest; - -import javax.ws.rs.QueryParam; - -public class EventHistoryQueryRequest extends CommonSearchQueryRequest implements EventHistoryRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_FILTER_NAME) - private String filterName; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_ROW_TYPE) - private String rowType; - - @Override - public String getFilterName() { - return filterName; - } - - @Override - public void setFilterName(String filterName) { - this.filterName = filterName; - } - - @Override - public String getRowType() { - return rowType; - } - - @Override - public void setRowType(String rowType) { - this.rowType = rowType; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditBarGraphQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditBarGraphQueryRequest.java deleted file mode 100644 index 198e1c6b96a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditBarGraphQueryRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.FieldAuditBarGraphRequest; - -import javax.ws.rs.QueryParam; - -public class FieldAuditBarGraphQueryRequest extends AuditBarGraphQueryRequest implements FieldAuditBarGraphRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_FIELD) - private String field; - - @Override - public String getField() { - return field; - } - - @Override - public void setField(String field) { - this.field = field; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditLogQueryRequest.java deleted file mode 100644 index be3497d131a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/FieldAuditLogQueryRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest; - -import javax.ws.rs.QueryParam; - -public class FieldAuditLogQueryRequest extends BaseLogQueryRequest implements FieldAuditLogRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_FIELD) - private String field; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getField() { - return field; - } - - @Override - public void setField(String field) { - this.field = field; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/HostLogFilesQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/HostLogFilesQueryRequest.java deleted file mode 100644 index 75d874f8c11..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/HostLogFilesQueryRequest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.HostLogFilesRequest; - -import javax.annotation.Nullable; -import javax.validation.constraints.NotNull; -import javax.ws.rs.QueryParam; - -public class HostLogFilesQueryRequest implements HostLogFilesRequest { - @NotNull - @QueryParam(LogSearchConstants.REQUEST_PARAM_HOST_NAME) - private String hostName; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME) - private String componentName; - - @Nullable - @QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) - private String clusters; - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } - - @Override - public String getComponentName() { - return componentName; - } - - @Override - public void setComponentName(String componentName) { - this.componentName = componentName; - } - - @Override - public String getClusters() { - return clusters; - } - - @Override - public void setClusters(String clusters) { - this.clusters = clusters; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceAnyGraphQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceAnyGraphQueryRequest.java deleted file mode 100644 index 5c76c1c59b2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceAnyGraphQueryRequest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceAnyGraphQueryRequest extends ServiceLogQueryRequest implements ServiceAnyGraphRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_XAXIS) - private String xAxis; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_YAXIS) - private String yAxis; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_STACK_BY) - private String stackBy; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getxAxis() { - return xAxis; - } - - @Override - public void setxAxis(String xAxis) { - this.xAxis = xAxis; - } - - @Override - public String getyAxis() { - return yAxis; - } - - @Override - public void setyAxis(String yAxis) { - this.yAxis = yAxis; - } - - @Override - public String getStackBy() { - return stackBy; - } - - @Override - public void setStackBy(String stackBy) { - this.stackBy = stackBy; - } - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceGraphQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceGraphQueryRequest.java deleted file mode 100644 index 5768a7705e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceGraphQueryRequest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceGraphQueryRequest extends ServiceLogQueryRequest implements ServiceGraphRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT) - private String unit; - - @Override - public String getUnit() { - return unit; - } - - @Override - public void setUnit(String unit) { - this.unit = unit; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogAggregatedInfoQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogAggregatedInfoQueryRequest.java deleted file mode 100644 index 9f29da12d63..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogAggregatedInfoQueryRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogAggregatedInfoRequest; - -@Marker -public class ServiceLogAggregatedInfoQueryRequest extends BaseServiceLogQueryRequest implements ServiceLogAggregatedInfoRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentHostQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentHostQueryRequest.java deleted file mode 100644 index 41b2e5bb197..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentHostQueryRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest; - -@Marker -public class ServiceLogComponentHostQueryRequest extends ServiceLogQueryRequest implements ServiceLogComponentHostRequest{ -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentLevelQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentLevelQueryRequest.java deleted file mode 100644 index 82804eb8b35..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogComponentLevelQueryRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest; - -@Marker -public class ServiceLogComponentLevelQueryRequest extends ServiceLogQueryRequest implements ServiceLogComponentLevelRequest{ -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogExportQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogExportQueryRequest.java deleted file mode 100644 index 6e4536915e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogExportQueryRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceLogExportQueryRequest extends ServiceLogQueryRequest implements ServiceLogExportRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_FORMAT) - private String format; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_UTC_OFFSET) - private String utcOffset; - - @Override - public String getFormat() { - return format; - } - - @Override - public void setFormat(String format) { - this.format = format; - } - - @Override - public String getUtcOffset() { - return utcOffset; - } - - @Override - public void setUtcOffset(String utcOffset) { - this.utcOffset = utcOffset; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogHostComponentQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogHostComponentQueryRequest.java deleted file mode 100644 index 529fecd8ca6..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogHostComponentQueryRequest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import io.swagger.annotations.ApiParam; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceLogHostComponentQueryRequest extends ServiceLogQueryRequest implements ServiceLogHostComponentRequest { - @QueryParam("hostName") - @ApiParam - String hostName; - - @Override - public String getHostName() { - return hostName; - } - - @Override - public void setHostName(String hostName) { - this.hostName = hostName; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogLevelCountQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogLevelCountQueryRequest.java deleted file mode 100644 index a3465cd569b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogLevelCountQueryRequest.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.Marker; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest; - -@Marker -public class ServiceLogLevelCountQueryRequest extends BaseServiceLogQueryRequest implements ServiceLogLevelCountRequest { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogQueryRequest.java deleted file mode 100644 index 08244a7bd1f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogQueryRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceLogQueryRequest extends BaseServiceLogQueryRequest implements ServiceLogRequest { - - @QueryParam(LogSearchConstants.REQUEST_PARAM_KEYWORD) - private String keyWord; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_SOURCE_LOG_ID) - private String sourceLogId; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_KEYWORD_TYPE) - private String keywordType; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_TOKEN) - private String token; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_LAST_PAGE) - private boolean isLastPage; - - @Override - public String getKeyWord() { - return keyWord; - } - - @Override - public void setKeyWord(String keyWord) { - this.keyWord = keyWord; - } - - @Override - public String getSourceLogId() { - return sourceLogId; - } - - @Override - public void setSourceLogId(String sourceLogId) { - this.sourceLogId = sourceLogId; - } - - @Override - public String getKeywordType() { - return keywordType; - } - - @Override - public void setKeywordType(String keywordType) { - this.keywordType = keywordType; - } - - @Override - public String getToken() { - return token; - } - - @Override - public void setToken(String token) { - this.token = token; - } - - @Override - public boolean isLastPage() { - return isLastPage; - } - - @Override - public void setLastPage(boolean lastPage) { - isLastPage = lastPage; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogTruncatedQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogTruncatedQueryRequest.java deleted file mode 100644 index 8b27f53fa17..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/ServiceLogTruncatedQueryRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest; - -import javax.ws.rs.QueryParam; - -public class ServiceLogTruncatedQueryRequest extends ServiceLogQueryRequest implements ServiceLogTruncatedRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_ID) - private String id; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_SCROLL_TYPE) - private String scrollType; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_NUMBER_ROWS) - private Integer numberRows; - - @Override - public String getId() { - return id; - } - - @Override - public void setId(String id) { - this.id = id; - } - - @Override - public String getScrollType() { - return scrollType; - } - - @Override - public void setScrollType(String scrollType) { - this.scrollType = scrollType; - } - - @Override - public Integer getNumberRows() { - return numberRows; - } - - @Override - public void setNumberRows(Integer numberRows) { - this.numberRows = numberRows; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/TopFieldAuditLogQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/TopFieldAuditLogQueryRequest.java deleted file mode 100644 index d6a3c7a0e89..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/TopFieldAuditLogQueryRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.TopFieldAuditLogRequest; - -import javax.ws.rs.PathParam; -import javax.ws.rs.QueryParam; - -public class TopFieldAuditLogQueryRequest extends FieldAuditLogQueryRequest implements TopFieldAuditLogRequest { - @PathParam(LogSearchConstants.REQUEST_PARAM_TOP) - private Integer top; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public Integer getTop() { - return top; - } - - @Override - public void setTop(Integer top) { - this.top = top; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/UserExportQueryRequest.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/UserExportQueryRequest.java deleted file mode 100644 index f3219f6c7ee..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/request/impl/query/UserExportQueryRequest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.request.impl.query; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.UserExportRequest; - -import javax.ws.rs.QueryParam; - -public class UserExportQueryRequest extends FieldAuditLogQueryRequest implements UserExportRequest { - @QueryParam(LogSearchConstants.REQUEST_PARAM_FORMAT) - private String format; - - @QueryParam(LogSearchConstants.REQUEST_PARAM_USERS) - private String userList; - - @Override - public String getFormat() { - return format; - } - - @Override - public void setFormat(String format) { - this.format = format; - } - - @Override - public String getUserList() { - return userList; - } - - @Override - public void setUserList(String userList) { - this.userList = userList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java deleted file mode 100644 index 41eca1efc6d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.Date; -import java.util.List; - -@JsonIgnoreProperties(ignoreUnknown = true) -public interface AuditLogData extends CommonLogData { - - @JsonProperty("logType") - String getLogType(); - - void setLogType(String logType); - - @JsonProperty("policy") - String getPolicy(); - - void setPolicy(String policy); - - @JsonProperty("access") - String getAccess(); - - void setAccess(String access); - - @JsonProperty("action") - String getAction(); - - void setAction(String action); - - @JsonProperty("agent") - String getAgent(); - - void setAgent(String agent); - - @JsonProperty("agentHost") - String getAgentHost(); - - void setAgentHost(String agentHost); - - @JsonProperty("cliIP") - String getClientIp(); - - void setClientIp(String clientIp); - - @JsonProperty("cliType") - String getClientType(); - - public void setClientType(String clientType); - - @JsonProperty("reqContext") - String getRequestContext(); - - void setRequestContext(String requestContext); - - @JsonProperty("enforcer") - String getEnforcer(); - - void setEnforcer(String enforcer); - - @JsonProperty("evtTime") - Date getEventTime(); - - void setEventTime(Date eventTime); - - @JsonProperty("reason") - String getReason(); - - void setReason(String reason); - - @JsonProperty("proxyUsers") - List getProxyUsers(); - - void setProxyUsers(List proxyUsers); - - @JsonProperty("repo") - String getRepo(); - - void setRepo(String repo); - - @JsonProperty("repoType") - Integer getRepoType(); - - void setRepoType(Integer repoType); - - @JsonProperty("reqData") - String getRequestData(); - - void setRequestData(String requestData); - - @JsonProperty("reqUser") - String getRequestUser(); - - void setRequestUser(String requestUser); - - @JsonProperty("resType") - String getResponseType(); - - void setResponseType(String requestType); - - @JsonProperty("resource") - String getResource(); - - void setResource(String resource); - - @JsonProperty("result") - Integer getResult(); - - void setResult(Integer result); - - @JsonProperty("sess") - String getSession(); - - void setSession(String session); - - @JsonProperty("tags") - List getTags(); - - void setTags(List tags); - - @JsonProperty("tags_str") - String getTagsStr(); - - void setTagsStr(String tagsStr); - - @JsonProperty("text") - String getText(); - - void setText(String text); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java deleted file mode 100644 index a886a963ba7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class AuditLogResponse extends LogSearchResponse { - - @ApiModelProperty - private List logList; - - @Override - public List getLogList() { - return logList; - } - - @Override - public void setLogList(List logList) { - this.logList = logList; - } - - @Override - public int getListSize() { - return logList == null ? 0 : logList.size(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java deleted file mode 100644 index 3f2bd6f6f5b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class BarGraphData implements Serializable { - - @ApiModelProperty - private Collection dataCount = new ArrayList<>(); - @ApiModelProperty - private String name; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Collection getDataCount() { - return dataCount; - } - - public void setDataCount(Collection dateValueCounts) { - this.dataCount = dateValueCounts; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java deleted file mode 100644 index 4d5d166e957..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.ArrayList; -import java.util.Collection; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class BarGraphDataListResponse { - - @ApiModelProperty - protected Collection graphData = new ArrayList<>(); - - public Collection getGraphData() { - return graphData; - } - - public void setGraphData(Collection graphData) { - this.graphData = graphData; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java deleted file mode 100644 index 21bfc5a0637..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.Date; -import java.util.Map; - -@JsonIgnoreProperties(ignoreUnknown = true) -public interface CommonLogData extends LogData { - - @JsonProperty("id") - String getId(); - - void setId(String id); - - @JsonProperty("case_id") - String getCaseId(); - - void setCaseId(String caseId); - - @JsonProperty("log_message") - String getLogMessage(); - - void setLogMessage(String logMessage); - - @JsonProperty("bundle_id") - String getBundleId(); - - void setBundleId(String bundleId); - - @JsonProperty("logfile_line_number") - Integer getLogFileLineNumber(); - - void setLogFileLineNumber(Integer logFileLineNumber); - - @JsonProperty("file") - String getFile(); - - void setFile(String file); - - @JsonProperty("type") - String getType(); - - void setType(String type); - - @JsonProperty("seq_num") - Long getSeqNum(); - - void setSeqNum(Long seqNum); - - @JsonProperty("message_md5") - String getMessageMd5(); - - void setMessageMd5(String messageMd5); - - @JsonProperty("cluster") - String getCluster(); - - void setCluster(String cluster); - - @JsonProperty("event_count") - Long getEventCount(); - - void setEventCount(Long eventCount); - - @JsonProperty("event_md5") - String getEventMd5(); - - void setEventMd5(String eventMd5); - - @JsonProperty("event_dur_ms") - Long getEventDurationMs(); - - void setEventDurationMs(Long eventDurationMs); - - @JsonProperty("_ttl_") - String getTtl(); - - void setTtl(String ttl); - - @JsonProperty("_expire_at_") - Date getExpire(); - - void setExpire(Date expire); - - @JsonProperty("_version_") - Long getVersion(); - - void setVersion(Long version); - - @JsonProperty("_router_field_") - Integer getRouterField(); - - void setRouterField(Integer routerField); - - @JsonAnyGetter - Map getAllDynamicFields(); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java deleted file mode 100644 index 6c15f9ca537..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonIgnoreProperties(ignoreUnknown = true) -public interface ComponentTypeLogData extends LogData { - - @JsonProperty("type") - String getType(); - - void setType(String type); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java deleted file mode 100644 index fabaad20d63..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class CountData { - - @ApiModelProperty - private String name; - - @ApiModelProperty - private Long count; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Long getCount() { - return count; - } - - public void setCount(Long count) { - this.count = count; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java deleted file mode 100644 index 2543dcc3b17..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class CountDataListResponse extends SearchResponse { - - @ApiModelProperty - private List vCounts; - - public List getvCounts() { - return vCounts; - } - - public void setvCounts(List vCounts) { - this.vCounts = vCounts; - } - - @Override - public int getListSize() { - if (vCounts != null) - return vCounts.size(); - return 0; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryData.java deleted file mode 100644 index 5edbc62ba4a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryData.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModelProperty; - -import java.util.Date; -import java.util.List; - -public class EventHistoryData { - - @ApiModelProperty - private String id; - - @ApiModelProperty - private String userName; - - @ApiModelProperty - private String filtername; - - @ApiModelProperty - private String values; - - @ApiModelProperty - private List shareNameList; - - @ApiModelProperty - private String rowType; - - public EventHistoryData() { - id = String.valueOf(new Date().getTime()); - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public String getFiltername() { - return filtername; - } - - public void setFiltername(String filtername) { - this.filtername = filtername; - } - - public List getShareNameList() { - return shareNameList; - } - - public void setShareNameList(List shareNameList) { - this.shareNameList = shareNameList; - } - - public String getValues() { - return values; - } - - public void setValues(String values) { - this.values = values; - } - - public String getRowType() { - return rowType; - } - - public void setRowType(String rowType) { - this.rowType = rowType; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryDataListResponse.java deleted file mode 100644 index 429005f4c92..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/EventHistoryDataListResponse.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.Collection; - -@ApiModel -public class EventHistoryDataListResponse extends SearchResponse{ - - @ApiModelProperty - private String name; - - @ApiModelProperty - private Collection eventHistoryDataList; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Collection getEventHistoryDataList() { - return eventHistoryDataList; - } - - public void setEventHistoryDataList(Collection eventHistoryDataList) { - this.eventHistoryDataList = eventHistoryDataList; - } - - @Override - public int getListSize() { - return eventHistoryDataList != null ? eventHistoryDataList.size() : 0; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java deleted file mode 100644 index e39ec95e40f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.io.Serializable; -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class GraphData implements Serializable { - - @ApiModelProperty - private String name; - - @ApiModelProperty - private Long count; - - @ApiModelProperty - private List dataList; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Long getCount() { - return count; - } - - public void setCount(Long count) { - this.count = count; - } - - public List getDataList() { - return dataList; - } - - public void setDataList(List dataList) { - this.dataList = dataList; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java deleted file mode 100644 index 4357c289672..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.List; - -@ApiModel -public class GraphDataListResponse { - - @ApiModelProperty - protected List graphData; - - public List getGraphData() { - return graphData; - } - - public void setGraphData(List graphData) { - this.graphData = graphData; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java deleted file mode 100644 index 92c51f27dda..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.ArrayList; -import java.util.List; - -@ApiModel -public class GroupListResponse extends SearchResponse { - - @ApiModelProperty - private List groupList = new ArrayList<>(); - - public List getGroupList() { - return groupList; - } - - public void setGroupList(List groupList) { - this.groupList = groupList; - } - - @Override - public int getListSize() { - if (groupList != null){ - return groupList.size(); - } - return 0; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java deleted file mode 100644 index 8cab1abeee9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonIgnoreProperties(ignoreUnknown = true) -public interface HostLogData extends LogData { - @JsonProperty("host") - String getHost(); - - void setHost(String host); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogFilesResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogFilesResponse.java deleted file mode 100644 index 27d3cf71af8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/HostLogFilesResponse.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class HostLogFilesResponse { - - @ApiModelProperty - protected Map> hostLogFiles = new HashMap<>(); - - public Map> getHostLogFiles() { - return hostLogFiles; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java deleted file mode 100644 index ec6c34c4cf3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import org.apache.ambari.logsearch.common.Marker; - -@Marker -public interface LogData { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java deleted file mode 100644 index 5a6760646bc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -@ApiModel -public class LogFileData { - - @ApiModelProperty - private String name; - - @ApiModelProperty - private String path; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java deleted file mode 100644 index 57614c37fdc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.ArrayList; -import java.util.List; - -@ApiModel -public class LogFileDataListResponse extends SearchResponse { - - @ApiModelProperty - private List logFiles = new ArrayList(); - - @Override - public int getListSize() { - if (logFiles == null) { - return 0; - } - return logFiles.size(); - } - - public List getLogFiles() { - return logFiles; - } - - public void setLogFiles(List logFiles) { - this.logFiles = logFiles; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java deleted file mode 100644 index c075fe25587..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import java.util.List; - -public interface LogListResponse { - List getLogList(); - - void setLogList(List logList); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java deleted file mode 100644 index a63415bafb0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -abstract public class LogSearchResponse extends SearchResponse implements LogListResponse { -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java deleted file mode 100644 index 3f320e72c7d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class NameValueData { - - @ApiModelProperty - private String name; - @ApiModelProperty - private String value; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - if (value.contains(".") && (value.contains("e") || value.contains("E"))) { - this.value = getExponentialValueReplaced(value); - } else { - this.value = value; - } - } - - private String getExponentialValueReplaced(String value) { - try{ - Double number = Double.parseDouble(value); - return String.format("%.0f", number); - } catch(Exception e){ - return value; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java deleted file mode 100644 index 4cb983f819c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.ArrayList; -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class NameValueDataListResponse extends SearchResponse { - @ApiModelProperty - protected List vNameValues = new ArrayList<>(); - - public List getvNameValues() { - return vNameValues; - } - - public void setvNameValues(List vNameValues) { - this.vNameValues = vNameValues; - } - - @Override - public int getListSize() { - if (vNameValues != null) { - return vNameValues.size(); - } - return 0; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java deleted file mode 100644 index b226ef81481..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.Collection; - -@ApiModel -@JsonInclude(value = JsonInclude.Include.NON_NULL) -public class NodeData { - - @ApiModelProperty - private String name; - - @ApiModelProperty - private String type; - - @ApiModelProperty - private String value; - - @ApiModelProperty - private Collection childs; - - @ApiModelProperty - private Collection logLevelCount; - - @ApiModelProperty - @JsonProperty("isParent") - private boolean parent; - - @ApiModelProperty - @JsonProperty("isRoot") - private boolean root; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - - public boolean isRoot() { - return root; - } - - public void setRoot(boolean root) { - this.root = root; - } - - public Collection getChilds() { - return childs; - } - - public void setChilds(Collection childs) { - this.childs = childs; - } - - public Collection getLogLevelCount() { - return logLevelCount; - } - - public void setLogLevelCount(Collection logLevelCount) { - this.logLevelCount = logLevelCount; - } - - public boolean isParent() { - return parent; - } - - public void setParent(boolean parent) { - this.parent = parent; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java deleted file mode 100644 index 51044b3c406..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.ArrayList; -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class NodeListResponse extends SearchResponse { - - @ApiModelProperty - protected List vNodeList = new ArrayList(); - - public List getvNodeList() { - return vNodeList; - } - - public void setvNodeList(List vNodeList) { - this.vNodeList = vNodeList; - } - - @Override - public int getListSize() { - if (vNodeList == null) { - return 0; - } - return vNodeList.size(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/PropertyDescriptionData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/PropertyDescriptionData.java deleted file mode 100644 index 6da8403379d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/PropertyDescriptionData.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -public class PropertyDescriptionData { - private final String name; - - private final String description; - - private final String[] examples; - - private final String defaultValue; - - @JsonIgnore - private final String source; - - public PropertyDescriptionData(String name, String description, String[] examples, String defaultValue, String source) { - this.name = name; - this.description = description; - this.examples = examples; - this.defaultValue = defaultValue; - this.source = source; - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - - public String[] getExamples() { - return examples; - } - - public String getDefaultValue() { - return defaultValue; - } - - public String getSource() { - return source; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java deleted file mode 100644 index dd88d29ea39..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -@JsonIgnoreProperties(ignoreUnknown = true) -public abstract class SearchResponse { - /** - * Start index for the result - */ - private int startIndex; - /** - * Page size used for the result - */ - private int pageSize; - /** - * Total records in the database for the given search conditions - */ - private long totalCount; - /** - * Number of rows returned for the search condition - */ - private int resultSize; - /** - * Sort type. Either desc or asc - */ - private String sortType; - /** - * Comma seperated list of the fields for sorting - */ - private String sortBy; - - private long queryTimeMS = System.currentTimeMillis(); - - public int getStartIndex() { - return startIndex; - } - - public int getPageSize() { - return pageSize; - } - - public long getTotalCount() { - return totalCount; - } - - public int getResultSize() { - return resultSize; - } - - public String getSortType() { - return sortType; - } - - public String getSortBy() { - return sortBy; - } - - public long getQueryTimeMS() { - return queryTimeMS; - } - - public void setStartIndex(int startIndex) { - this.startIndex = startIndex; - } - - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - public void setTotalCount(long totalCount) { - this.totalCount = totalCount; - } - - public void setResultSize(int resultSize) { - this.resultSize = resultSize; - } - - public void setSortType(String sortType) { - this.sortType = sortType; - } - - public void setSortBy(String sortBy) { - this.sortBy = sortBy; - } - - public void setQueryTimeMS(long queryTimeMS) { - this.queryTimeMS = queryTimeMS; - } - - public abstract int getListSize(); - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java deleted file mode 100644 index 2d2589e8052..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.Date; - -@JsonIgnoreProperties(ignoreUnknown = true) -public interface ServiceLogData extends CommonLogData, ComponentTypeLogData, HostLogData { - - @JsonProperty("level") - String getLevel(); - - void setLevel(String level); - - @JsonProperty("line_number") - Integer getLineNumber(); - - void setLineNumber(Integer lineNumber); - - @JsonProperty("logtime") - Date getLogTime(); - - void setLogTime(Date logTime); - - @JsonProperty("ip") - String getIp(); - - void setIp(String ip); - - @JsonProperty("path") - String getPath(); - - void setPath(String path); - - @JsonProperty("type") - String getType(); - - void setType(String type); - - @JsonProperty("host") - String getHost(); - - void setHost(String host); - - @JsonProperty("group") - String getGroup(); - - void setGroup(String group); - - @JsonProperty("logger_name") - String getLoggerName(); - - void setLoggerName(String loggerName); - - @JsonProperty("method") - String getMethod(); - - void setMethod(String method); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java deleted file mode 100644 index 2e689c18182..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; - -import java.util.List; - -@ApiModel -@JsonIgnoreProperties(ignoreUnknown = true) -public class ServiceLogResponse extends LogSearchResponse { - - @ApiModelProperty - private List logList; - - @Override - public List getLogList() { - return logList; - } - - @Override - public void setLogList(List logList) { - this.logList = logList; - } - - @Override - public int getListSize() { - return logList == null ? 0 : logList.size(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ShipperConfigDescriptionData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ShipperConfigDescriptionData.java deleted file mode 100644 index 91f7420db15..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/ShipperConfigDescriptionData.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -public class ShipperConfigDescriptionData { - private final String path; - - private final String description; - - private final String[] examples; - - private final String defaultValue; - - public ShipperConfigDescriptionData(String path, String description, String[] examples, String defaultValue) { - this.path = path; - this.description = description; - this.examples = examples; - this.defaultValue = defaultValue; - } - - public String getPath() { - return path; - } - - public String getDescription() { - return description; - } - - public String[] getExamples() { - return examples; - } - - public String getDefaultValue() { - return defaultValue; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/TemplateData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/TemplateData.java deleted file mode 100644 index 05deebda550..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/response/TemplateData.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.model.response; - -public class TemplateData { - - private String data; - - public TemplateData(String data) { - this.data = data; - } - - public String getData() { - return data; - } - - public void setData(String data) { - this.data = data; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java deleted file mode 100644 index 0da27067310..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.rest; - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.BeanParam; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import freemarker.template.TemplateException; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.model.metadata.AuditFieldMetadataResponse; -import org.apache.ambari.logsearch.model.request.impl.body.AuditBarGraphBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.AuditLogBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.AuditServiceLoadBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ClusterBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.TopFieldAuditLogBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.UserExportBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditBarGraphQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditLogQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditServiceLoadQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.TopFieldAuditLogQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.UserExportQueryRequest; -import org.apache.ambari.logsearch.model.response.AuditLogResponse; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.manager.AuditLogsManager; -import org.springframework.context.annotation.Scope; - -import java.util.List; -import java.util.Map; - -import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*; - -@Api(value = "audit/logs", description = "Audit log operations") -@Path("audit/logs") -@Named -@Scope("request") -public class AuditLogsResource { - - @Inject - private AuditLogsManager auditLogsManager; - - @GET - @Path("/schema/fields") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD) - public AuditFieldMetadataResponse getSolrFieldListGet() { - return auditLogsManager.getAuditLogSchemaMetadata(); - } - - - @POST - @Path("/schema/fields") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD) - public AuditFieldMetadataResponse getSolrFieldListPost() { - return auditLogsManager.getAuditLogSchemaMetadata(); - } - - @GET - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_LOGS_OD) - public AuditLogResponse getAuditLogsGet(@BeanParam AuditLogQueryRequest auditLogRequest) { - return auditLogsManager.getLogs(auditLogRequest); - } - - @POST - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_LOGS_OD) - public AuditLogResponse getAuditLogsPost(AuditLogBodyRequest auditLogRequest) { - return auditLogsManager.getLogs(auditLogRequest); - } - - @DELETE - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(PURGE_AUDIT_LOGS_OD) - public StatusMessage deleteAuditLogs(AuditLogBodyRequest auditLogRequest) { - return auditLogsManager.deleteLogs(auditLogRequest); - } - - @GET - @Path("/components") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_COMPONENTS_OD) - public Map getAuditComponentsGet(@QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) @Nullable String clusters) { - return auditLogsManager.getAuditComponents(clusters); - } - - @POST - @Path("/components") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_COMPONENTS_OD) - public Map getAuditComponentsPost(@Nullable ClusterBodyRequest clusterBodyRequest) { - return auditLogsManager.getAuditComponents(clusterBodyRequest != null ? clusterBodyRequest.getClusters() : null); - } - - @GET - @Path("/bargraph") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD) - public BarGraphDataListResponse getAuditBarGraphDataGet(@BeanParam AuditBarGraphQueryRequest request) { - return auditLogsManager.getAuditBarGraphData(request); - } - - @POST - @Path("/bargraph") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD) - public BarGraphDataListResponse getAuditBarGraphDataPost(AuditBarGraphBodyRequest request) { - return auditLogsManager.getAuditBarGraphData(request); - } - - @GET - @Path("/resources/{top}") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD) - public BarGraphDataListResponse getResourcesGet(@BeanParam TopFieldAuditLogQueryRequest request) { - return auditLogsManager.topResources(request); - } - - @POST - @Path("/resources/{top}") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD) - public BarGraphDataListResponse getResourcesPost(TopFieldAuditLogBodyRequest request, @PathParam(LogSearchConstants.REQUEST_PARAM_TOP) Integer top) { - request.setTop(top); // TODO: set this in the request - return auditLogsManager.topResources(request); - } - - @GET - @Path("/export") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD) - public Response exportUserTableToTextFileGet(@BeanParam UserExportQueryRequest request) throws TemplateException { - return auditLogsManager.export(request); - } - - @POST - @Path("/export") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD) - public Response exportUserTableToTextFilePost(UserExportBodyRequest request) throws TemplateException { - return auditLogsManager.export(request); - } - - @GET - @Path("/serviceload") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_LOAD_OD) - public BarGraphDataListResponse getServiceLoadGet(@BeanParam AuditServiceLoadQueryRequest request) { - return auditLogsManager.getServiceLoad(request); - } - - @POST - @Path("/serviceload") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_LOAD_OD) - public BarGraphDataListResponse getServiceLoadPost(AuditServiceLoadBodyRequest request) { - return auditLogsManager.getServiceLoad(request); - } - - @GET - @Path("/clusters") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_CLUSTERS_OD) - public List getClustersForAuditLogGet() { - return auditLogsManager.getClusters(); - } - - @POST - @Path("/clusters") - @Produces({MediaType.APPLICATION_JSON}) - @Consumes({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AUDIT_CLUSTERS_OD) - public List getClustersForAuditLogPost() { - return auditLogsManager.getClusters(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/EventHistoryResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/EventHistoryResource.java deleted file mode 100644 index 917a55c47a9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/EventHistoryResource.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.rest; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.BeanParam; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.logsearch.manager.EventHistoryManager; -import org.apache.ambari.logsearch.model.request.impl.query.EventHistoryQueryRequest; -import org.apache.ambari.logsearch.model.response.EventHistoryData; -import org.apache.ambari.logsearch.model.response.EventHistoryDataListResponse; -import org.springframework.context.annotation.Scope; - -import java.util.List; - -import static org.apache.ambari.logsearch.doc.DocConstants.EventHistoryOperationDescriptions.*; - -@Api(value = "history", description = "Event history operations") -@Path("history") -@Named -@Scope("request") -public class EventHistoryResource { - - @Inject - private EventHistoryManager eventHistoryManager; - - @POST - @Produces({"application/json"}) - @ApiOperation(SAVE_EVENT_HISTORY_DATA_OD) - public String saveEvent(EventHistoryData eventHistoryData) { - return eventHistoryManager.saveEvent(eventHistoryData); - } - - @DELETE - @Path("/{id}") - @ApiOperation(DELETE_EVENT_HISTORY_DATA_OD) - public void deleteEvent(@PathParam("id") String id) { - eventHistoryManager.deleteEvent(id); - } - - @GET - @Produces({"application/json"}) - @ApiOperation(GET_EVENT_HISTORY_DATA_OD) - public EventHistoryDataListResponse getEventHistory(@BeanParam EventHistoryQueryRequest request) { - return eventHistoryManager.getEventHistory(request); - } - - @GET - @Path("/names") - @Produces({"application/json"}) - @ApiOperation(GET_ALL_USER_NAMES_OD) - public List getAllUserName() { - return eventHistoryManager.getAllUserName(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/InfoResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/InfoResource.java deleted file mode 100644 index 52ecdca79f2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/InfoResource.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.logsearch.manager.InfoManager; -import org.apache.ambari.logsearch.model.response.PropertyDescriptionData; -import org.apache.ambari.logsearch.model.response.ShipperConfigDescriptionData; -import org.springframework.context.annotation.Scope; - -import java.util.List; -import java.util.Map; - -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_ALL_PROPERTIES_INFO_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_ALL_SHIPPER_CONFIG_INFO_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_APP_DETAILS_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_FEATURES_LIST; -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_LOGSEARCH_PROPERTIES_INFO_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.GET_AUTH_DETAILS_OD; - -@Api(value = "info", description = "General configuration information") -@Path("info") -@Named -@Scope("request") -public class InfoResource { - - @Inject - private InfoManager infoManager; - - @GET - @Produces({"application/json"}) - @ApiOperation(GET_APP_DETAILS_OD) - public Map getApplicationInfo() { - return infoManager.getApplicationInfo(); - } - - @GET - @Path("/properties") - @Produces({"application/json"}) - @ApiOperation(GET_ALL_PROPERTIES_INFO_OD) - public Map> getPropertyDescriptions() { - return infoManager.getPropertyDescriptions(); - } - - @GET - @Path("/properties/{propertyFile}") - @Produces({"application/json"}) - @ApiOperation(GET_LOGSEARCH_PROPERTIES_INFO_OD) - public List getPropertyFileDescription(@PathParam("propertyFile") String propertyFile) { - return infoManager.getLogSearchPropertyDescriptions(propertyFile); - } - - @GET - @Path("/features") - @Produces({"application/json"}) - @ApiOperation(GET_FEATURES_LIST) - public Map getFeatures() { - return infoManager.getFeaturesMap(); - } - - @GET - @Path("/features/auth") - @Produces({"application/json"}) - @ApiOperation(GET_AUTH_DETAILS_OD) - public Map getAuthInfo() { - return infoManager.getAuthMap(); - } - - @GET - @Path("/shipperconfig") - @Produces({"application/json"}) - @ApiOperation(GET_ALL_SHIPPER_CONFIG_INFO_OD) - public List getShipperConfigDescription() { - return infoManager.getLogSearchShipperConfigDescription(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java deleted file mode 100644 index 0deffa8ec68..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java +++ /dev/null @@ -1,408 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest; - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Named; -import javax.validation.Valid; -import javax.validation.executable.ValidateOnExecution; -import javax.ws.rs.BeanParam; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.model.metadata.FieldMetadata; -import org.apache.ambari.logsearch.model.metadata.ServiceComponentMetadataWrapper; -import org.apache.ambari.logsearch.model.request.impl.body.ClusterBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.HostLogFilesBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceAnyGraphBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceGraphBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogAggregatedInfoBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogComponentHostBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogComponentLevelBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogExportBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogHostComponentBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogLevelCountBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.body.ServiceLogTruncatedBodyRequest; -import org.apache.ambari.logsearch.model.request.impl.query.HostLogFilesQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceAnyGraphQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceGraphQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogAggregatedInfoQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogComponentHostQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogComponentLevelQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogExportQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogHostComponentQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogLevelCountQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogQueryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogTruncatedQueryRequest; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.model.response.CountDataListResponse; -import org.apache.ambari.logsearch.model.response.GraphDataListResponse; -import org.apache.ambari.logsearch.model.response.GroupListResponse; -import org.apache.ambari.logsearch.model.response.HostLogFilesResponse; -import org.apache.ambari.logsearch.model.response.NameValueDataListResponse; -import org.apache.ambari.logsearch.model.response.NodeListResponse; -import org.apache.ambari.logsearch.model.response.ServiceLogResponse; -import org.apache.ambari.logsearch.manager.ServiceLogsManager; -import org.springframework.context.annotation.Scope; - -import java.util.List; - -import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*; - -@Api(value = "service/logs", description = "Service log operations") -@Path("service/logs") -@Named -@Scope("request") -public class ServiceLogsResource { - - @Inject - private ServiceLogsManager serviceLogsManager; - - @GET - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(SEARCH_LOGS_OD) - public ServiceLogResponse searchServiceLogsGet(@BeanParam ServiceLogQueryRequest request) { - return serviceLogsManager.searchLogs(request); - } - - @POST - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(SEARCH_LOGS_OD) - public ServiceLogResponse searchServiceLogsPost(ServiceLogBodyRequest request) { - return serviceLogsManager.searchLogs(request); - } - - @DELETE - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(PURGE_LOGS_OD) - public StatusMessage deleteServiceLogs(ServiceLogBodyRequest request) { - return serviceLogsManager.deleteLogs(request); - } - - @GET - @Path("/hosts") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOSTS_OD) - public GroupListResponse getHostsGet(@QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) @Nullable String clusters) { - return serviceLogsManager.getHosts(clusters); - } - - @POST - @Path("/hosts") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOSTS_OD) - public GroupListResponse getHostsPost(@Nullable ClusterBodyRequest clusterBodyRequest) { - return serviceLogsManager.getHosts(clusterBodyRequest != null ? clusterBodyRequest.getClusters() : null); - } - - @GET - @Path("/components") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENTS_OD) - public ServiceComponentMetadataWrapper getComponents(@QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) @Nullable String clusters) { - return serviceLogsManager.getComponentMetadata(clusters); - } - - @POST - @Path("/components") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENTS_OD) - public ServiceComponentMetadataWrapper getComponents(@Nullable ClusterBodyRequest clusterBodyRequest) { - return serviceLogsManager.getComponentMetadata(clusterBodyRequest != null ? clusterBodyRequest.getClusters() : null); - } - - @GET - @Path("/aggregated") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AGGREGATED_INFO_OD) - public GraphDataListResponse getAggregatedInfoGet(@BeanParam ServiceLogAggregatedInfoQueryRequest request) { - return serviceLogsManager.getAggregatedInfo(request); - } - - @POST - @Path("/aggregated") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AGGREGATED_INFO_OD) - public GraphDataListResponse getAggregatedInfoPost(ServiceLogAggregatedInfoBodyRequest request) { - return serviceLogsManager.getAggregatedInfo(request); - } - - @GET - @Path("/components/count") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENTS_COUNT_OD) - public CountDataListResponse getComponentsCountGet(@QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) @Nullable String clusters) { - return serviceLogsManager.getComponentsCount(clusters); - } - - @POST - @Path("/components/count") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENTS_COUNT_OD) - public CountDataListResponse getComponentsCountPost(@Nullable ClusterBodyRequest clusterBodyRequest) { - return serviceLogsManager.getComponentsCount(clusterBodyRequest != null ? clusterBodyRequest.getClusters() : null); - } - - @GET - @Path("/hosts/count") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOSTS_COUNT_OD) - public CountDataListResponse getHostsCountGet(@QueryParam(LogSearchConstants.REQUEST_PARAM_CLUSTER_NAMES) @Nullable String clusters) { - return serviceLogsManager.getHostsCount(clusters); - } - - @POST - @Path("/hosts/count") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOSTS_COUNT_OD) - public CountDataListResponse getHostsCountPost(@Nullable ClusterBodyRequest clusterBodyRequest) { - return serviceLogsManager.getHostsCount(clusterBodyRequest != null ? clusterBodyRequest.getClusters() : null); - } - - @GET - @Path("/tree") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_TREE_EXTENSION_OD) - public NodeListResponse getTreeExtensionGet(@BeanParam ServiceLogHostComponentQueryRequest request) { - return serviceLogsManager.getTreeExtension(request); - } - - @POST - @Path("/tree") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_TREE_EXTENSION_OD) - public NodeListResponse getTreeExtensionPost(ServiceLogHostComponentBodyRequest request) { - return serviceLogsManager.getTreeExtension(request); - } - - @GET - @Path("/levels/counts") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_LOG_LEVELS_COUNT_OD) - public NameValueDataListResponse getLogsLevelCountGet(@BeanParam ServiceLogLevelCountQueryRequest request) { - return serviceLogsManager.getLogsLevelCount(request); - } - - @POST - @Path("/levels/counts") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_LOG_LEVELS_COUNT_OD) - public NameValueDataListResponse getLogsLevelCountPost(ServiceLogLevelCountBodyRequest request) { - return serviceLogsManager.getLogsLevelCount(request); - } - - @GET - @Path("/histogram") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HISTOGRAM_DATA_OD) - public BarGraphDataListResponse getHistogramDataGet(@BeanParam ServiceGraphQueryRequest request) { - return serviceLogsManager.getHistogramData(request); - } - - @POST - @Path("/histogram") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HISTOGRAM_DATA_OD) - public BarGraphDataListResponse getHistogramDataPost(ServiceGraphBodyRequest request) { - return serviceLogsManager.getHistogramData(request); - } - - - @GET - @Path("/export") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(EXPORT_TO_TEXT_FILE_OD) - public Response exportToTextFileGet(@BeanParam ServiceLogExportQueryRequest request) { - return serviceLogsManager.export(request); - } - - @POST - @Path("/export") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(EXPORT_TO_TEXT_FILE_OD) - public Response exportToTextFilePost(ServiceLogExportBodyRequest request) { - return serviceLogsManager.export(request); - } - - @GET - @Path("/hosts/components") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD) - public NodeListResponse getHostListByComponentGet(@BeanParam ServiceLogComponentHostQueryRequest request) { - return serviceLogsManager.getHostListByComponent(request); - } - - @POST - @Path("/hosts/components") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD) - public NodeListResponse getHostListByComponentPost(ServiceLogComponentHostBodyRequest request) { - return serviceLogsManager.getHostListByComponent(request); - } - - @GET - @Path("/components/levels/counts") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD) - public NodeListResponse getComponentListWithLevelCountsGet(@BeanParam ServiceLogComponentLevelQueryRequest request) { - return serviceLogsManager.getComponentListWithLevelCounts(request); - } - - @POST - @Path("/components/levels/counts") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD) - public NodeListResponse getComponentListWithLevelCountsPost(ServiceLogComponentLevelBodyRequest request) { - return serviceLogsManager.getComponentListWithLevelCounts(request); - } - - @GET - @Path("/schema/fields") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD) - public List getServiceLogsSchemaFieldsNameGet() { - return serviceLogsManager.getServiceLogsSchemaFieldsName(); - } - - @POST - @Path("/schema/fields") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD) - public List getServiceLogsSchemaFieldsNamePost() { - return serviceLogsManager.getServiceLogsSchemaFieldsName(); - } - - @GET - @Path("/count/anygraph") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_ANY_GRAPH_COUNT_DATA_OD) - public BarGraphDataListResponse getAnyGraphCountDataGet(@BeanParam ServiceAnyGraphQueryRequest request) { - return serviceLogsManager.getAnyGraphCountData(request); - } - - @POST - @Path("/count/anygraph") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_ANY_GRAPH_COUNT_DATA_OD) - public BarGraphDataListResponse getAnyGraphCountDataPost(ServiceAnyGraphBodyRequest request) { - return serviceLogsManager.getAnyGraphCountData(request); - } - - @GET - @Path("/truncated") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AFTER_BEFORE_LOGS_OD) - public ServiceLogResponse getAfterBeforeLogs(@BeanParam ServiceLogTruncatedQueryRequest request) { - return serviceLogsManager.getAfterBeforeLogs(request); - } - - @POST - @Path("/truncated") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_AFTER_BEFORE_LOGS_OD) - public ServiceLogResponse getAfterBeforeLogs(ServiceLogTruncatedBodyRequest request) { - return serviceLogsManager.getAfterBeforeLogs(request); - } - - @GET - @Path("/request/cancel") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(REQUEST_CANCEL) - public String cancelRequestGet() { - // TODO: create function that cancels an ongoing solr request - return "{\"endpoint status\": \"not supported yet\"}"; - } - - @POST - @Path("/request/cancel") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(REQUEST_CANCEL) - public String cancelRequestPost() { - // TODO: create function that cancels an ongoing solr request - return "{\"endpoint status\": \"not supported yet\"}"; - } - - @GET - @Path("/files") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOST_LOGFILES_OD) - @ValidateOnExecution - public HostLogFilesResponse getHostLogFiles(@Valid @BeanParam HostLogFilesQueryRequest request) { - return serviceLogsManager.getHostLogFileData(request); - } - - @POST - @Path("/files") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_HOST_LOGFILES_OD) - @ValidateOnExecution - public HostLogFilesResponse getHostLogFiles(@Valid @BeanParam HostLogFilesBodyRequest request) { - return serviceLogsManager.getHostLogFileData(request); - } - - @GET - @Path("/clusters") - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_CLUSTERS_OD) - public List getClustersForServiceLogGet() { - return serviceLogsManager.getClusters(); - } - - @POST - @Path("/clusters") - @Consumes({MediaType.APPLICATION_JSON}) - @Produces({MediaType.APPLICATION_JSON}) - @ApiOperation(GET_SERVICE_CLUSTERS_OD) - public List getClustersForServiceLogPost() { - return serviceLogsManager.getClusters(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java deleted file mode 100644 index 7d31f0814c1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.rest; - -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.GET_LOG_LEVEL_FILTER_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.GET_SERVICE_NAMES_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.GET_SHIPPER_CONFIG_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.SET_SHIPPER_CONFIG_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.TEST_SHIPPER_CONFIG_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.UPDATE_LOG_LEVEL_FILTER_OD; - -import java.util.List; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.validation.Valid; -import javax.validation.executable.ValidateOnExecution; -import javax.ws.rs.BeanParam; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Response; - -import org.apache.ambari.logsearch.manager.ShipperConfigManager; -import org.apache.ambari.logsearch.model.common.LSServerInputConfig; -import org.apache.ambari.logsearch.model.common.LSServerLogLevelFilterMap; -import org.apache.ambari.logsearch.model.request.impl.ShipperConfigTestRequest; -import org.springframework.context.annotation.Scope; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; - -@Api(value = "shipper", description = "Shipper config operations") -@Path("shipper") -@Named -@Scope("request") -public class ShipperConfigResource { - - @Inject - private ShipperConfigManager shipperConfigManager; - - @GET - @Path("/input/{clusterName}/services") - @Produces({"application/json"}) - @ApiOperation(GET_SERVICE_NAMES_OD) - public List getServices(@PathParam("clusterName") String clusterName) { - return shipperConfigManager.getServices(clusterName); - } - - @GET - @Path("/input/{clusterName}/services/{serviceName}") - @Produces({"application/json"}) - @ApiOperation(GET_SHIPPER_CONFIG_OD) - public LSServerInputConfig getShipperConfig(@PathParam("clusterName") String clusterName, @PathParam("serviceName") - String serviceName) { - return shipperConfigManager.getInputConfig(clusterName, serviceName); - } - - @POST - @Path("/input/{clusterName}/services/{serviceName}") - @Produces({"application/json"}) - @ApiOperation(SET_SHIPPER_CONFIG_OD) - @ValidateOnExecution - public Response createShipperConfig(@Valid LSServerInputConfig request, @PathParam("clusterName") String clusterName, - @PathParam("serviceName") String serviceName) { - return shipperConfigManager.createInputConfig(clusterName, serviceName, request); - } - - @PUT - @Path("/input/{clusterName}/services/{serviceName}") - @Produces({"application/json"}) - @ApiOperation(SET_SHIPPER_CONFIG_OD) - @ValidateOnExecution - public Response setShipperConfig(@Valid LSServerInputConfig request, @PathParam("clusterName") String clusterName, - @PathParam("serviceName") String serviceName) { - return shipperConfigManager.setInputConfig(clusterName, serviceName, request); - } - - @POST - @Path("/input/{clusterName}/test") - @Produces({"application/json"}) - @ApiOperation(TEST_SHIPPER_CONFIG_OD) - public Response testShipperConfig(@Valid @BeanParam ShipperConfigTestRequest request, @PathParam("clusterName") String clusterName) { - return shipperConfigManager.testShipperConfig(request.getShipperConfig(), request.getLogId(), request.getTestEntry(), clusterName); - } - - @GET - @Path("/filters/{clusterName}/level") - @Produces({"application/json"}) - @ApiOperation(GET_LOG_LEVEL_FILTER_OD) - public LSServerLogLevelFilterMap getLogLevelFilters(@PathParam("clusterName") String clusterName) { - return shipperConfigManager.getLogLevelFilters(clusterName); - } - - @PUT - @Path("/filters/{clusterName}/level") - @Produces({"application/json"}) - @ApiOperation(UPDATE_LOG_LEVEL_FILTER_OD) - @ValidateOnExecution - public Response setLogLevelFilter(@Valid LSServerLogLevelFilterMap request, @PathParam("clusterName") String clusterName) { - return shipperConfigManager.setLogLevelFilters(clusterName, request); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/StatusResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/StatusResource.java deleted file mode 100644 index 3499bce695d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/StatusResource.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.springframework.context.annotation.Scope; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import java.util.HashMap; -import java.util.Map; - -import static org.apache.ambari.logsearch.doc.DocConstants.StatusOperationDescriptions.AUDIT_LOGS_STATUS_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.StatusOperationDescriptions.SERVICE_LOGS_STATUS_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.StatusOperationDescriptions.STATUS_OD; -import static org.apache.ambari.logsearch.doc.DocConstants.StatusOperationDescriptions.EVENT_HISTORY_STATUS_OD; - -@Api(value = "status", description = "Status Operations") -@Path("status") -@Named -@Scope("request") -public class StatusResource { - - @Inject - @Named("solrServiceLogsState") - private SolrCollectionState solrServiceLogsState; - - @Inject - @Named("solrAuditLogsState") - private SolrCollectionState solrAuditLogsState; - - @Inject - @Named("solrEventHistoryState") - private SolrCollectionState solrEventHistoryState; - - @GET - @Produces({"application/json"}) - @ApiOperation(STATUS_OD) - public Map getStatus() { - Map response = new HashMap<>(); - response.put("serviceLogs", solrServiceLogsState); - response.put("auditLogs", solrAuditLogsState); - response.put("eventHistory", solrEventHistoryState); - return response; - } - - @GET - @Path("/servicelogs") - @Produces({"application/json"}) - @ApiOperation(SERVICE_LOGS_STATUS_OD) - public SolrCollectionState getServiceLogStatus() { - return solrServiceLogsState; - } - - @GET - @Path("/auditlogs") - @Produces({"application/json"}) - @ApiOperation(AUDIT_LOGS_STATUS_OD) - public SolrCollectionState getSolrAuditLogsStatus() { - return solrAuditLogsState; - } - - @GET - @Path("/history") - @Produces({"application/json"}) - @ApiOperation(EVENT_HISTORY_STATUS_OD) - public SolrCollectionState getSolrEventHistoryStatus() { - return solrEventHistoryState; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/SwaggerResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/SwaggerResource.java deleted file mode 100644 index 36dc5bd946b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/SwaggerResource.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest; - -import io.swagger.annotations.ApiOperation; -import org.apache.ambari.logsearch.common.ApiDocStorage; -import org.springframework.context.annotation.Scope; - -import javax.inject.Inject; -import javax.inject.Named; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -@Path("swagger.{type:json|yaml}") -@Named -@Scope("request") -public class SwaggerResource { - - @Inject - private ApiDocStorage apiDocStorage; - - @GET - @Produces({MediaType.APPLICATION_JSON, "application/yaml"}) - @ApiOperation(value = "The swagger definition in either JSON or YAML", hidden = true) - public Response swaggerDefinitionResponse(@PathParam("type") String type) { - Response response = Response.status(404).build(); - if (apiDocStorage.getSwagger() != null) { - if ("yaml".equalsIgnoreCase(type)) { - response = Response.ok().entity(apiDocStorage.getSwaggerYaml()).type("application/yaml").build(); - } else { - response = Response.ok().entity(apiDocStorage.getSwagger()).build(); - } - } - return response; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapper.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapper.java deleted file mode 100644 index 81f13fdfc09..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapper.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest.error; - -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.CONFLICT; -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; -import static javax.ws.rs.core.Response.Status.NOT_FOUND; - -import java.time.DateTimeException; -import java.util.Map; - -import javax.inject.Named; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.manager.AlreadyExistsException; -import org.apache.ambari.logsearch.manager.MalformedInputException; -import org.apache.ambari.logsearch.manager.NotFoundException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.web.bind.MethodArgumentNotValidException; - -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException; -import com.google.common.collect.Maps; - -@Named -@Provider -public class GeneralExceptionMapper implements ExceptionMapper { - private static final Logger LOG = LoggerFactory.getLogger(GeneralExceptionMapper.class); - static final String INTERNAL_SERVER_ERROR_MESSAGE = "Something went wrong, For more details check the logs or configuration."; - - private static final Map, Response.Status> exceptionStatusCodeMap = Maps.newHashMap(); - - static { - exceptionStatusCodeMap.put(MethodArgumentNotValidException.class, BAD_REQUEST); - exceptionStatusCodeMap.put(JsonMappingException.class, BAD_REQUEST); - exceptionStatusCodeMap.put(JsonParseException.class, BAD_REQUEST); - exceptionStatusCodeMap.put(UnrecognizedPropertyException.class, BAD_REQUEST); - exceptionStatusCodeMap.put(MalformedInputException.class, BAD_REQUEST); - exceptionStatusCodeMap.put(AlreadyExistsException.class, CONFLICT); - exceptionStatusCodeMap.put(NotFoundException.class, NOT_FOUND); - exceptionStatusCodeMap.put(DateTimeException.class, BAD_REQUEST); - } - - @Override - public Response toResponse(Exception exception) { - try { - return toResponse(exception, getStatus(exception)); - } - catch (Exception ex) { - LOG.error("Error while generating status message. Original Exception was", exception); - throw ex; - } - } - - private Response.Status getStatus(Exception exception) { - for (Map.Entry, Response.Status> entry : exceptionStatusCodeMap.entrySet()) { - if (entry.getKey().isAssignableFrom(exception.getClass())) { - Response.Status status = entry.getValue(); - LOG.info("Exception mapped to: {} with status code: {}", entry.getKey().getCanonicalName(), entry.getValue().getStatusCode()); - return status; - } - } - return INTERNAL_SERVER_ERROR; - } - - static Response toResponse(Exception exception, Response.Status status) { - String errorMessage; - if (status.getStatusCode() < 500) { - errorMessage = exception.getMessage(); - LOG.info("REST Exception occurred: {}", exception.getMessage()); - LOG.debug("REST Exception occurred:", exception); - } - else { - errorMessage = INTERNAL_SERVER_ERROR_MESSAGE; - LOG.error("REST Exception occurred:", exception); - } - - return Response.status(status).entity(StatusMessage.with(status, errorMessage)) - .type(MediaType.APPLICATION_JSON_TYPE).build(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/SolrExceptionMapper.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/SolrExceptionMapper.java deleted file mode 100644 index 4791131bebd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/error/SolrExceptionMapper.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.rest.error; - -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; - -import javax.inject.Named; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import org.apache.solr.common.SolrException; - -@Named -@Provider -public class SolrExceptionMapper implements ExceptionMapper { - @Override - public Response toResponse(SolrException exception) { - Response.Status status = Response.Status.fromStatusCode(exception.code()); - if (status == null) - status = INTERNAL_SERVER_ERROR; - - return GeneralExceptionMapper.toResponse(exception, status); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/service/UserService.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/service/UserService.java deleted file mode 100644 index ba4431de39a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/service/UserService.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.service; - -import org.springframework.security.core.userdetails.UserDetailsService; -import org.springframework.security.core.userdetails.UsernameNotFoundException; -import org.springframework.stereotype.Service; -import org.apache.ambari.logsearch.dao.UserDao; -import org.apache.ambari.logsearch.web.model.User; -import org.apache.log4j.Logger; - -import javax.inject.Inject; - - -@Service -public class UserService implements UserDetailsService { - private static final Logger logger = Logger.getLogger(UserService.class); - - @Inject - private UserDao userDao; - - @Override - public User loadUserByUsername(final String username) throws UsernameNotFoundException { - logger.debug(userDao + " loadUserByUsername " + username); - return userDao.loadUserByUsername(username); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/ResponseDataGenerator.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/ResponseDataGenerator.java deleted file mode 100644 index ad0e6dcdcd5..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/ResponseDataGenerator.java +++ /dev/null @@ -1,524 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.solr; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import org.apache.ambari.logsearch.common.LabelFallbackHandler; -import org.apache.ambari.logsearch.model.metadata.ComponentMetadata; -import org.apache.ambari.logsearch.model.metadata.ServiceComponentMetadataWrapper; -import org.apache.ambari.logsearch.model.response.BarGraphData; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.model.response.CountData; -import org.apache.ambari.logsearch.model.response.CountDataListResponse; -import org.apache.ambari.logsearch.model.response.GraphData; -import org.apache.ambari.logsearch.model.response.GraphDataListResponse; -import org.apache.ambari.logsearch.model.response.HostLogFilesResponse; -import org.apache.ambari.logsearch.model.response.NameValueData; -import org.apache.ambari.logsearch.model.response.NameValueDataListResponse; -import org.apache.ambari.logsearch.model.response.NodeData; -import org.apache.ambari.logsearch.model.response.NodeListResponse; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.client.solrj.response.FacetField; -import org.apache.solr.client.solrj.response.FacetField.Count; -import org.apache.solr.client.solrj.response.PivotField; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.RangeFacet; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.util.NamedList; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class ResponseDataGenerator { - - @Inject - private LabelFallbackHandler labelFallbackHandler; - - public BarGraphDataListResponse generateBarGraphDataResponseWithRanges(QueryResponse response, String typeField, boolean typeUppercase) { - BarGraphDataListResponse dataList = new BarGraphDataListResponse(); - if (response == null) { - return dataList; - } - NamedList> facetPivotResponse = response.getFacetPivot(); - if (response.getFacetPivot() == null) { - return dataList; - } - List pivotFields = facetPivotResponse.get(typeField); - for (int pivotIndex = 0; pivotIndex < pivotFields.size(); pivotIndex++) { - PivotField pivotField = facetPivotResponse.get(typeField).get(pivotIndex); - List nameValues = generateNameValueDataList(pivotField.getFacetRanges()); - BarGraphData barGraphData = new BarGraphData(); - barGraphData.setDataCount(nameValues); - String typeValue = typeUppercase ? StringUtils.upperCase(pivotField.getValue().toString()) : pivotField.getValue().toString(); - barGraphData.setName(typeValue); - dataList.getGraphData().add(barGraphData); - } - return dataList; - } - - public BarGraphDataListResponse generateSecondLevelBarGraphDataResponse(QueryResponse response, int val) { - BarGraphDataListResponse barGraphDataListResponse = new BarGraphDataListResponse(); - NamedList> pivotFieldNameList = response.getFacetPivot(); - if (pivotFieldNameList == null) { - return barGraphDataListResponse; - } - List pivotFields = pivotFieldNameList.getVal(val); - List barGraphDataList = new ArrayList<>(); - for (PivotField pivotField : pivotFields) { - BarGraphData barGraphData = new BarGraphData(); - barGraphData.setName(String.valueOf(pivotField.getValue())); - List secondLevelPivotFields = pivotField.getPivot(); - List nameValueDataList = new ArrayList<>(); - for (PivotField sPivotField : secondLevelPivotFields) { - NameValueData nvD = new NameValueData(); - nvD.setName(String.valueOf(sPivotField.getValue())); - nvD.setValue(String.valueOf(sPivotField.getCount())); - nameValueDataList.add(nvD); - } - barGraphData.setDataCount(nameValueDataList); - barGraphDataList.add(barGraphData); - } - barGraphDataListResponse.setGraphData(barGraphDataList); - return barGraphDataListResponse; - } - - public BarGraphDataListResponse generateBarGraphFromFieldFacet(QueryResponse response, String facetField) { - BarGraphDataListResponse dataList = new BarGraphDataListResponse(); - Collection vaDatas = new ArrayList<>(); - dataList.setGraphData(vaDatas); - if (response == null) { - return dataList; - } - FacetField facetFieldObj = response.getFacetField(facetField); - if (facetFieldObj == null) { - return dataList; - } - - List counts = facetFieldObj.getValues(); - if (counts == null) { - return dataList; - } - for (Count cnt : counts) { - List valueList = new ArrayList<>(); - BarGraphData vBarGraphData = new BarGraphData(); - vaDatas.add(vBarGraphData); - NameValueData vNameValue = new NameValueData(); - vNameValue.setName(cnt.getName()); - vBarGraphData.setName(cnt.getName().toUpperCase()); - vNameValue.setValue("" + cnt.getCount()); - valueList.add(vNameValue); - vBarGraphData.setDataCount(valueList); - } - return dataList; - } - - @SuppressWarnings("rawtypes") - public List generateNameValueDataList(List rangeFacet) { - List nameValues = new ArrayList<>(); - if (rangeFacet == null) { - return nameValues; - } - RangeFacet range = rangeFacet.get(0); - - if (range == null) { - return nameValues; - } - List listCount = range.getCounts(); - for (RangeFacet.Count cnt : listCount) { - NameValueData nameValue = new NameValueData(); - nameValue.setName(String.valueOf(cnt.getValue())); - nameValue.setValue(String.valueOf(cnt.getCount())); - nameValues.add(nameValue); - } - return nameValues; - } - - public List generateCount(QueryResponse response) { - List counts = new ArrayList<>(); - List facetFields = null; - FacetField facetField = null; - if (response == null) { - return counts; - } - - facetFields = response.getFacetFields(); - if (facetFields == null) { - return counts; - } - if (!facetFields.isEmpty()) { - facetField = facetFields.get(0); - } - if (facetField != null) { - counts = facetField.getValues(); - } - return counts; - } - - public BarGraphDataListResponse getGraphDataWithDefaults(QueryResponse queryResponse, String field, String[] defaults) { - BarGraphDataListResponse response = new BarGraphDataListResponse(); - BarGraphData barGraphData = new BarGraphData(); - List nameValues = generateLevelCountData(queryResponse, defaults, true); - barGraphData.setName(field); - barGraphData.setDataCount(nameValues); - response.setGraphData(Lists.newArrayList(barGraphData)); - return response; - } - - public NameValueDataListResponse getNameValueDataListResponseWithDefaults(QueryResponse response, String[] defaults, boolean emptyResponseDisabled) { - NameValueDataListResponse result = new NameValueDataListResponse(); - result.setvNameValues(generateLevelCountData(response, defaults, emptyResponseDisabled)); - return result; - } - - public NodeListResponse generateServiceNodeTreeFromFacetResponse(QueryResponse queryResponse, - String firstHierarchy, String secondHierarchy, - String firstType, String secondType) { - NodeListResponse response = new NodeListResponse(); - if (queryResponse == null) { - return response; - } - NamedList> namedPivotFieldList = queryResponse.getFacetPivot(); - List firstLevelPivots = namedPivotFieldList.get(firstHierarchy); - List secondLevelPivots = namedPivotFieldList.get(secondHierarchy); - if (!CollectionUtils.isNotEmpty(firstLevelPivots) || !CollectionUtils.isNotEmpty(secondLevelPivots)) { - return response; - } - List nodeDataList = buidTreeData(firstLevelPivots, secondLevelPivots, firstType, secondType); - response.setvNodeList(nodeDataList); - return response; - } - - public NodeListResponse generateOneLevelServiceNodeTree(QueryResponse queryResponse, String componentLevelHirachy) { - NodeListResponse response = new NodeListResponse(); - List datatList = new ArrayList<>(); - List> listPivotField = new ArrayList<>(); - NamedList> namedList = queryResponse.getFacetPivot(); - if (namedList != null) { - listPivotField = namedList.getAll(componentLevelHirachy); - } - List secondHirarchicalPivotFields = null; - if (listPivotField == null || listPivotField.isEmpty()) { - return response; - } else { - secondHirarchicalPivotFields = listPivotField.get(0); - } - for (PivotField singlePivotField : secondHirarchicalPivotFields) { - if (singlePivotField != null) { - NodeData comp = new NodeData(); - comp.setName("" + singlePivotField.getValue()); - List levelList = singlePivotField.getPivot(); - List levelCountList = new ArrayList<>(); - comp.setLogLevelCount(levelCountList); - if (levelList != null) { - for (PivotField levelPivot : levelList) { - NameValueData level = new NameValueData(); - level.setName(("" + levelPivot.getValue()).toUpperCase()); - level.setValue("" + levelPivot.getCount()); - levelCountList.add(level); - } - } - datatList.add(comp); - } - } - response.setvNodeList(datatList); - return response; - } - - private List buidTreeData(List firstHirarchicalPivotFields, - List secondHirarchicalPivotFields, - String firstPriority, String secondPriority) { - List extensionTree = new ArrayList<>(); - if (firstHirarchicalPivotFields != null) { - for (PivotField pivotHost : firstHirarchicalPivotFields) { - if (pivotHost != null) { - NodeData hostNode = new NodeData(); - String name = (pivotHost.getValue() == null ? "" : "" + pivotHost.getValue()); - String value = "" + pivotHost.getCount(); - if (StringUtils.isNotBlank(name)) { - hostNode.setName(name); - } - if (StringUtils.isNotBlank(value)) { - hostNode.setValue(value); - } - if (StringUtils.isNotBlank(firstPriority)) { - hostNode.setType(firstPriority); - } - - hostNode.setParent(true); - hostNode.setRoot(true); - PivotField hostPivot = null; - for (PivotField searchHost : secondHirarchicalPivotFields) { - if (StringUtils.isNotBlank(hostNode.getName()) - && hostNode.getName().equals(searchHost.getValue())) { - hostPivot = searchHost; - break; - } - } - List pivotLevelHost = hostPivot == null? null : hostPivot.getPivot(); - if (pivotLevelHost != null) { - Collection logLevelCount = new ArrayList<>(); - for (PivotField pivotLevel : pivotLevelHost) { - if (pivotLevel != null) { - NameValueData vnameValue = new NameValueData(); - String levelName = (pivotLevel.getValue() == null ? "" : "" + pivotLevel.getValue()); - vnameValue.setName(levelName.toUpperCase()); - vnameValue.setValue("" + pivotLevel.getCount()); - logLevelCount.add(vnameValue); - } - } - hostNode.setLogLevelCount(logLevelCount); - } - List pivotComponents = pivotHost.getPivot(); - if (pivotComponents != null) { - Collection componentNodes = new ArrayList<>(); - for (PivotField pivotComp : pivotComponents) { - if (pivotComp != null) { - NodeData compNode = new NodeData(); - String compName = (pivotComp.getValue() == null ? "" : "" + pivotComp.getValue()); - compNode.setName(compName); - if (StringUtils.isNotBlank(secondPriority)) { - compNode.setType(secondPriority); - } - compNode.setValue("" + pivotComp.getCount()); - compNode.setParent(false); - compNode.setRoot(false); - List pivotLevels = pivotComp.getPivot(); - if (pivotLevels != null) { - Collection logLevelCount = new ArrayList<>(); - for (PivotField pivotLevel : pivotLevels) { - if (pivotLevel != null) { - NameValueData vnameValue = new NameValueData(); - String compLevel = pivotLevel.getValue() == null ? "" : "" + pivotLevel.getValue(); - vnameValue.setName((compLevel).toUpperCase()); - - vnameValue.setValue("" + pivotLevel.getCount()); - logLevelCount.add(vnameValue); - } - } - compNode.setLogLevelCount(logLevelCount); - } - componentNodes.add(compNode); - } - } - hostNode.setChilds(componentNodes); - } - extensionTree.add(hostNode); - } - } - } - - return extensionTree; - } - - private List generateLevelCountData(QueryResponse queryResponse, String[] defaults, boolean emptyResponseEnabled) { - List nameValues = Lists.newLinkedList(); - Map linkedMap = Maps.newLinkedHashMap(); - List counts = generateCount(queryResponse); - if (!CollectionUtils.isNotEmpty(counts) && emptyResponseEnabled) { - return nameValues; - } - for (String defaultValue : defaults) { - NameValueData nameValue = new NameValueData(); - nameValue.setName(defaultValue); - nameValue.setValue("0"); - linkedMap.put(defaultValue, nameValue); - } - if (CollectionUtils.isNotEmpty(counts)) { - for (Count count : counts) { - if (!linkedMap.containsKey(count.getName())) { - NameValueData nameValue = new NameValueData(); - String name = count.getName().toUpperCase(); - nameValue.setName(name); - nameValue.setValue(String.valueOf(count.getCount())); - linkedMap.put(name, nameValue); - } - } - } - - for (Map.Entry nameValueDataEntry : linkedMap.entrySet()) { - nameValues.add(nameValueDataEntry.getValue()); - } - return nameValues; - } - - public CountDataListResponse generateCountResponseByField(QueryResponse response, String field) { - CountDataListResponse collection = new CountDataListResponse(); - List vCounts = new ArrayList<>(); - if (response == null) { - return collection; - } - FacetField facetFields = response.getFacetField(field); - if (facetFields == null) { - return collection; - } - List fieldList = facetFields.getValues(); - - if (fieldList == null) { - return collection; - } - - for (Count cnt : fieldList) { - if (cnt != null) { - CountData vCount = new CountData(); - vCount.setName(cnt.getName()); - vCount.setCount(cnt.getCount()); - vCounts.add(vCount); - } - } - collection.setvCounts(vCounts); - return collection; - } - - public GraphDataListResponse generateSimpleGraphResponse(QueryResponse response, String hierarchy) { - GraphDataListResponse graphInfo = new GraphDataListResponse(); - if (response == null) { - return graphInfo; - } - List> hirarchicalPivotField = new ArrayList<>(); - List dataList = new ArrayList<>(); - NamedList> namedList = response.getFacetPivot(); - if (namedList != null) { - hirarchicalPivotField = namedList.getAll(hierarchy); - } - if (!hirarchicalPivotField.isEmpty()) { - dataList = buidGraphData(hirarchicalPivotField.get(0)); - } - if (!dataList.isEmpty()) { - graphInfo.setGraphData(dataList); - } - - return graphInfo; - } - - private List buidGraphData(List pivotFields) { - List logList = new ArrayList<>(); - if (pivotFields != null) { - for (PivotField pivotField : pivotFields) { - if (pivotField != null) { - GraphData logLevel = new GraphData(); - logLevel.setName("" + pivotField.getValue()); - logLevel.setCount(Long.valueOf(pivotField.getCount())); - if (pivotField.getPivot() != null) { - logLevel.setDataList(buidGraphData(pivotField.getPivot())); - } - logList.add(logLevel); - } - } - } - return logList; - } - - - public HostLogFilesResponse generateHostLogFilesResponse(QueryResponse queryResponse) { - HostLogFilesResponse response = new HostLogFilesResponse(); - Map> componentLogFiles = response.getHostLogFiles(); - - NamedList> facetPivot = queryResponse.getFacetPivot(); - List componentFields = facetPivot.get(COMPONENT + "," + PATH); - for (PivotField componentField : componentFields) { - String component = (String)componentField.getValue(); - LinkedList logFileList = new LinkedList<>(); - componentLogFiles.put(component, logFileList); - - for (PivotField logField : componentField.getPivot()) { - // the log file names are in increasing order of their cardinality, using addFirst reverses the list - logFileList.addFirst((String)logField.getValue()); - } - } - - return response; - } - - public Map generateComponentMetadata(QueryResponse queryResponse, - String facetField, Map componetnLabels) { - Map result = new HashMap<>(); - if (queryResponse == null) { - return result; - } - FacetField facetFields = queryResponse.getFacetField(facetField); - if (facetFields == null) { - return result; - } - List counts = facetFields.getValues(); - if (counts == null) { - return result; - } - for (Count count : counts) { - if (count.getName() != null) { - String label = componetnLabels.get(count.getName()); - String fallbackedLabel = labelFallbackHandler.fallbackIfRequired(count.getName(), label, true, false, true); - result.put(count.getName(), fallbackedLabel); - } - } - return result; - } - - public ServiceComponentMetadataWrapper generateGroupedComponentMetadataResponse(QueryResponse queryResponse, String pivotFields, - Map groupLabels, - Map componentLabels) { - List componentMetadata = new ArrayList<>(); - Map groupsMetadata = new HashMap<>(); - - if (queryResponse == null) { - return new ServiceComponentMetadataWrapper(componentMetadata, groupsMetadata); - } - NamedList> facetPivotResponse = queryResponse.getFacetPivot(); - if (facetPivotResponse == null || facetPivotResponse.size() < 1) { - return new ServiceComponentMetadataWrapper(componentMetadata, groupsMetadata); - } - if (CollectionUtils.isEmpty(facetPivotResponse.get(pivotFields))) { - return new ServiceComponentMetadataWrapper(componentMetadata, groupsMetadata); - } - - for (PivotField pivotField : facetPivotResponse.get(pivotFields)) { - if (pivotField != null && pivotField.getValue() != null) { - String componentName = pivotField.getValue().toString(); - String groupName = null; - if (CollectionUtils.isNotEmpty(pivotField.getPivot())) { - Object groupValue = pivotField.getPivot().get(0).getValue(); - if (groupValue != null) { - groupName = groupValue.toString(); - groupsMetadata.put(groupName, groupLabels.getOrDefault(groupName, null)); - } - } - String label = componentLabels.get(componentName); - String fallbackedLabel = labelFallbackHandler.fallbackIfRequired(componentName, label, true, false, true); - componentMetadata.add((new ComponentMetadata(componentName, fallbackedLabel, groupName))); - - } - } - - return new ServiceComponentMetadataWrapper(componentMetadata, groupsMetadata); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/SolrConstants.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/SolrConstants.java deleted file mode 100644 index a999f930d10..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/SolrConstants.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr; - -public class SolrConstants { - - private SolrConstants() { - } - - public class CommonLogConstants { - private CommonLogConstants() { - } - - public static final String ID = "id"; - public static final String SEQUENCE_ID = "seq_num"; - public static final String BUNDLE_ID = "bundle_id"; - public static final String CASE_ID = "case_id"; - public static final String CLUSTER = "cluster"; - public static final String LOG_MESSAGE = "log_message"; - public static final String LOGFILE_LINE_NUMBER = "logfile_line_number"; - public static final String EVENT_DURATION_MD5 = "event_dur_m5"; - public static final String FILE = "file"; - public static final String EVENT_COUNT = "event_count"; - public static final String EVENT_MD5 = "event_md5"; - public static final String MESSAGE_MD5= "message_md5"; - public static final String TTL = "_ttl_"; - public static final String EXPIRE_AT = "_expire_at_"; - public static final String VERSION = "_version_"; - public static final String ROUTER_FIELD = "_router_field_"; - public static final String TYPE = "type"; - } - - public class ServiceLogConstants { - - private ServiceLogConstants() { - } - - public static final String BUNDLE_ID = "bundle_id"; - public static final String LOGTIME = "logtime"; - public static final String COMPONENT = "type"; - public static final String LOG_MESSAGE = "log_message"; - public static final String KEY_LOG_MESSAGE = "key_log_message"; - public static final String HOST = "host"; - public static final String GROUP = "group"; - public static final String LEVEL = "level"; - public static final String THREAD_NAME = "thread_name"; - public static final String METHOD = "method"; - public static final String LOGGER_NAME = "logger_name"; - public static final String LINE_NUMBER = "line_number"; - public static final String PATH = "path"; - public static final String IP = "ip"; - public static final String STORED_TOKEN_DYNAMIC_FIELDS = "std_*"; - public static final String KEY_DYNAMIC_FIELDS = "key_*"; - public static final String WS_DYNAMIC_FIELDS = "ws_*"; - public static final String SDI_DYNAMIC_FIELDS = "sdi_*"; - public static final String INT_DYNAMIC_FIELDS = "*_i"; - public static final String LONG_DYNAMIC_FIELDS = "*_l"; - public static final String BOOLEAN_DYNAMIC_FIELDS = "*_b"; - public static final String STRING_DYNAMIC_FIELDS = "*_s"; - } - - public class AuditLogConstants { - private AuditLogConstants() { - } - - public static final String AUDIT_LOG_TYPE = "logType"; - public static final String AUDIT_POLICY = "policy"; - public static final String AUDIT_ACCESS = "access"; - public static final String AUDIT_ACTION = "action"; - public static final String AUDIT_AGENT = "agent"; - public static final String AUDIT_AGENT_HOST = "agentHost"; - public static final String AUDIT_CLIENT_IP = "cliIP"; - public static final String AUDIT_CLIENT_TYPE = "cliType"; - public static final String AUDIT_REQEST_CONTEXT = "reqContext"; - public static final String AUDIT_ENFORCER = "enforcer"; - public static final String AUDIT_REASON = "reason"; - public static final String AUDIT_PROXY_USERS = "proxyUsers"; - public static final String AUDIT_REPO_TYPE = "repoType"; - public static final String AUDIT_REQEST_DATA = "reqData"; - public static final String AUDIT_RESPONSE_TYPE = "resType"; - public static final String AUDIT_SESSION = "sess"; - public static final String AUDIT_TEXT = "text"; - public static final String AUDIT_RESULT = "result"; - public static final String AUDIT_COMPONENT = "repo"; - public static final String AUDIT_EVTTIME = "evtTime"; - public static final String AUDIT_REQUEST_USER = "reqUser"; - public static final String AUDIT_RESOURCE = "resource"; - public static final String AUDIT_TAGS = "tags"; - public static final String AUDIT_TAGS_STR = "tags_str"; - } - - public class EventHistoryConstants { - private EventHistoryConstants() { - } - - public static final String ID = "id"; - public static final String USER_NAME = "username"; - public static final String VALUES = "jsons"; - public static final String FILTER_NAME = "filtername"; - public static final String ROW_TYPE = "rowtype"; - public static final String SHARE_NAME_LIST = "share_username_list"; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java deleted file mode 100644 index 9de05db66fd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr.model; - -import org.apache.ambari.logsearch.model.response.AuditLogData; -import org.apache.solr.client.solrj.beans.Field; - -import java.util.Date; -import java.util.List; - -import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.*; - -public class SolrAuditLogData extends SolrCommonLogData implements AuditLogData { - - @Field(AUDIT_LOG_TYPE) - private String logType; - - @Field(AUDIT_POLICY) - private String policy; - - @Field(AUDIT_ACCESS) - private String access; - - @Field(AUDIT_ACTION) - private String action; - - @Field(AUDIT_AGENT) - private String agent; - - @Field(AUDIT_AGENT_HOST) - private String agentHost; - - @Field(AUDIT_CLIENT_IP) - private String clientIp; - - @Field(AUDIT_CLIENT_TYPE) - private String clientType; - - @Field(AUDIT_REQEST_CONTEXT) - private String requestContext; - - @Field(AUDIT_ENFORCER) - private String enforcer; - - @Field(AUDIT_EVTTIME) - private Date eventTime; - - @Field(AUDIT_REASON) - private String reason; - - @Field(AUDIT_PROXY_USERS) - private List proxyUsers; - - @Field(AUDIT_COMPONENT) - private String repo; - - @Field(AUDIT_REPO_TYPE) - private Integer repoType; - - @Field(AUDIT_REQEST_DATA) - private String requestData; - - @Field(AUDIT_REQUEST_USER) - private String requestUser; - - @Field(AUDIT_RESPONSE_TYPE) - private String responseType; - - @Field(AUDIT_RESOURCE) - private String resource; - - @Field(AUDIT_RESULT) - private Integer result; - - @Field(AUDIT_SESSION) - private String session; - - @Field(AUDIT_TAGS) - private List tags; - - @Field(AUDIT_TAGS_STR) - private String tagsStr; - - @Field(AUDIT_TEXT) - private String text; - - @Override - public String getText() { - return text; - } - - @Override - public void setText(String text) { - this.text = text; - } - - @Override - public String getTagsStr() { - return tagsStr; - } - - @Override - public void setTagsStr(String tagsStr) { - this.tagsStr = tagsStr; - } - - @Override - public List getTags() { - return tags; - } - - @Override - public void setTags(List tags) { - this.tags = tags; - } - - @Override - public String getSession() { - return session; - } - - @Override - public void setSession(String session) { - this.session = session; - } - - @Override - public Integer getResult() { - return result; - } - - @Override - public void setResult(Integer result) { - this.result = result; - } - - @Override - public String getResource() { - return resource; - } - - @Override - public void setResource(String resource) { - this.resource = resource; - } - - @Override - public String getResponseType() { - return responseType; - } - - public void setResponseType(String responseType) { - this.responseType = responseType; - } - - @Override - public String getRequestUser() { - return requestUser; - } - - @Override - public void setRequestUser(String requestUser) { - this.requestUser = requestUser; - } - - @Override - public String getRequestData() { - return requestData; - } - - @Override - public void setRequestData(String requestData) { - this.requestData = requestData; - } - - @Override - public Integer getRepoType() { - return repoType; - } - - @Override - public void setRepoType(Integer repoType) { - this.repoType = repoType; - } - - @Override - public String getRepo() { - return repo; - } - - @Override - public void setRepo(String repo) { - this.repo = repo; - } - - @Override - public List getProxyUsers() { - return proxyUsers; - } - - @Override - public void setProxyUsers(List proxyUsers) { - this.proxyUsers = proxyUsers; - } - - @Override - public String getReason() { - return reason; - } - - @Override - public void setReason(String reason) { - this.reason = reason; - } - - @Override - public Date getEventTime() { - return eventTime; - } - - @Override - public void setEventTime(Date eventTime) { - this.eventTime = eventTime; - } - - @Override - public String getEnforcer() { - return enforcer; - } - - @Override - public void setEnforcer(String enforcer) { - this.enforcer = enforcer; - } - - @Override - public String getRequestContext() { - return requestContext; - } - - @Override - public void setRequestContext(String requestContext) { - this.requestContext = requestContext; - } - - @Override - public String getClientType() { - return clientType; - } - - @Override - public void setClientType(String clientType) { - this.clientType = clientType; - } - - @Override - public String getClientIp() { - return clientIp; - } - - @Override - public void setClientIp(String clientIp) { - this.clientIp = clientIp; - } - - @Override - public String getAgent() { - return agent; - } - - @Override - public void setAgent(String agent) { - this.agent = agent; - } - - @Override - public String getAgentHost() { - return agentHost; - } - - @Override - public void setAgentHost(String agentHost) { - this.agentHost = agentHost; - } - - @Override - public String getAction() { - return action; - } - - @Override - public void setAction(String action) { - this.action = action; - } - - @Override - public String getAccess() { - return access; - } - - @Override - public void setAccess(String access) { - this.access = access; - } - - @Override - public String getPolicy() { - return policy; - } - - @Override - public void setPolicy(String policy) { - this.policy = policy; - } - - @Override - public String getLogType() { - return logType; - } - - @Override - public void setLogType(String logType) { - this.logType = logType; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java deleted file mode 100644 index 1cdc572c94e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr.model; - -import org.apache.ambari.logsearch.model.response.CommonLogData; -import org.apache.solr.client.solrj.beans.Field; - -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.*; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.INT_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LONG_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BOOLEAN_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.STRING_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.KEY_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.STORED_TOKEN_DYNAMIC_FIELDS; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.WS_DYNAMIC_FIELDS; - -public class SolrCommonLogData implements CommonLogData { - - @Field(ID) - private String id; - - @Field(BUNDLE_ID) - private String bundleId; - - @Field(CASE_ID) - private String caseId; - - @Field(CLUSTER) - private String cluster; - - @Field(SEQUENCE_ID) - private Long seqNum; - - @Field(LOG_MESSAGE) - private String logMessage; - - @Field(LOGFILE_LINE_NUMBER) - private Integer logFileLineNumber; - - @Field(EVENT_DURATION_MD5) - private Long eventDurationMs; - - @Field(FILE) - private String file; - - @Field(TYPE) - private String type; - - @Field(EVENT_COUNT) - private Long eventCount; - - @Field(EVENT_MD5) - private String eventMd5; - - @Field(MESSAGE_MD5) - private String messageMd5; - - @Field(TTL) - private String ttl; - - @Field(EXPIRE_AT) - private Date expire; - - @Field(VERSION) - private Long version; - - @Field(ROUTER_FIELD) - private Integer routerField; - - @Field(STORED_TOKEN_DYNAMIC_FIELDS) - private Map stdDynamicFields; - - @Field(KEY_DYNAMIC_FIELDS) - private Map keyDynamicFields; - - @Field(WS_DYNAMIC_FIELDS) - private Map wsDynamicFields; - - @Field(INT_DYNAMIC_FIELDS) - private Map intDynamicFields; - - @Field(LONG_DYNAMIC_FIELDS) - private Map longDynamicFields; - - @Field(STRING_DYNAMIC_FIELDS) - private Map stringDynamicFields; - - @Field(BOOLEAN_DYNAMIC_FIELDS) - private Map booleanDynamicFields; - - @Override - public String getId() { - return this.id; - } - - @Override - public void setId(String id) { - this.id = id; - } - - @Override - public String getCaseId() { - return this.caseId; - } - - @Override - public void setCaseId(String caseId) { - this.caseId = caseId; - } - - @Override - public String getLogMessage() { - return this.logMessage; - } - - @Override - public String getBundleId() { - return bundleId; - } - - @Override - public void setBundleId(String bundleId) { - this.bundleId = bundleId; - } - - @Override - public Integer getLogFileLineNumber() { - return logFileLineNumber; - } - - @Override - public void setLogFileLineNumber(Integer logFileLineNumber) { - this.logFileLineNumber = logFileLineNumber; - } - - @Override - public void setLogMessage(String logMessage) { - this.logMessage = logMessage; - } - - @Override - public Long getEventDurationMs() { - return eventDurationMs; - } - - @Override - public void setEventDurationMs(Long eventDurationMs) { - this.eventDurationMs = eventDurationMs; - } - - @Override - public String getFile() { - return file; - } - - @Override - public void setFile(String file) { - this.file = file; - } - - @Override - public Long getSeqNum() { - return seqNum; - } - - @Override - public void setSeqNum(Long seqNum) { - this.seqNum = seqNum; - } - - @Override - public String getMessageMd5() { - return messageMd5; - } - - @Override - public void setMessageMd5(String messageMd5) { - this.messageMd5 = messageMd5; - } - - @Override - public String getEventMd5() { - return eventMd5; - } - - @Override - public void setEventMd5(String eventMd5) { - this.eventMd5 = eventMd5; - } - - @Override - public String getCluster() { - return cluster; - } - - @Override - public void setCluster(String cluster) { - this.cluster = cluster; - } - - @Override - public Long getEventCount() { - return eventCount; - } - - @Override - public void setEventCount(Long eventCount) { - this.eventCount = eventCount; - } - - @Override - public String getTtl() { - return this.ttl; - } - - @Override - public void setTtl(String ttl) { - this.ttl = ttl; - } - - @Override - public Date getExpire() { - return expire; - } - - @Override - public void setExpire(Date expire) { - this.expire = expire; - } - - @Override - public Long getVersion() { - return version; - } - - @Override - public void setVersion(Long version) { - this.version = version; - } - - @Override - public Integer getRouterField() { - return this.routerField; - } - - @Override - public void setRouterField(Integer routerField) { - this.routerField = routerField; - } - - @Override - public String getType() { - return type; - } - - @Override - public void setType(String type) { - this.type = type; - } - - @Override - public Map getAllDynamicFields() { - Map allDynamicFields = new HashMap<>(); - - if (stdDynamicFields != null) { - allDynamicFields.putAll(stdDynamicFields); - } - if (keyDynamicFields != null) { - allDynamicFields.putAll(keyDynamicFields); - } - if (wsDynamicFields != null) { - allDynamicFields.putAll(wsDynamicFields); - } - - if (intDynamicFields != null) { - allDynamicFields.putAll(intDynamicFields); - } - - if (longDynamicFields != null) { - allDynamicFields.putAll(longDynamicFields); - } - - if (stringDynamicFields != null) { - allDynamicFields.putAll(stringDynamicFields); - } - - if (booleanDynamicFields != null) { - allDynamicFields.putAll(booleanDynamicFields); - } - - return allDynamicFields; - } - - public void setStdDynamicFields(Map stdDynamicFields) { - this.stdDynamicFields = stdDynamicFields; - } - - public void setKeyDynamicFields(Map keyDynamicFields) { - this.keyDynamicFields = keyDynamicFields; - } - - public void setWsDynamicFields(Map wsDynamicFields) { - this.wsDynamicFields = wsDynamicFields; - } - - public void setIntDynamicFields(Map intDynamicFields) { - this.intDynamicFields = intDynamicFields; - } - - public void setLongDynamicFields(Map longDynamicFields) { - this.longDynamicFields = longDynamicFields; - } - - public void setStringDynamicFields(Map stringDynamicFields) { - this.stringDynamicFields = stringDynamicFields; - } - - public void setBooleanDynamicFields(Map booleanDynamicFields) { - this.booleanDynamicFields = booleanDynamicFields; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java deleted file mode 100644 index 1a637002fe3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr.model; - -import org.apache.ambari.logsearch.model.response.ComponentTypeLogData; -import org.apache.solr.client.solrj.beans.Field; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; - -public class SolrComponentTypeLogData implements ComponentTypeLogData { - - @Field(COMPONENT) - private String type; - - @Override - public String getType() { - return this.type; - } - - @Override - public void setType(String type) { - this.type = type; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java deleted file mode 100644 index 8391815e53a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr.model; - -import org.apache.ambari.logsearch.model.response.HostLogData; -import org.apache.solr.client.solrj.beans.Field; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; - -public class SolrHostLogData implements HostLogData { - - @Field(HOST) - private String host; - - @Override - public String getHost() { - return host; - } - - @Override - public void setHost(String host) { - this.host = host; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java deleted file mode 100644 index 59e6849e755..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.solr.model; - -import org.apache.ambari.logsearch.model.response.ServiceLogData; -import org.apache.solr.client.solrj.beans.Field; - -import java.util.Date; -import java.util.Map; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.*; - -public class SolrServiceLogData extends SolrCommonLogData implements ServiceLogData { - - @Field(LEVEL) - private String level; - - @Field(LINE_NUMBER) - private Integer lineNumber; - - @Field(LOGTIME) - private Date logTime; - - @Field(COMPONENT) - private String type; - - @Field(IP) - private String ip; - - @Field(PATH) - private String path; - - @Field(HOST) - private String host; - - @Field(GROUP) - private String group; - - @Field(LOGGER_NAME) - private String loggerName; - - @Field(METHOD) - private String method; - - @Field(SDI_DYNAMIC_FIELDS) - private Map sdiDynamicFields; - - @Override - public String getPath() { - return path; - } - - @Override - public void setPath(String path) { - this.path = path; - } - - @Override - public String getIp() { - return ip; - } - - @Override - public void setIp(String ip) { - this.ip = ip; - } - - @Override - public String getType() { - return type; - } - - @Override - public void setType(String type) { - this.type = type; - } - - @Override - public String getHost() { - return host; - } - - @Override - public void setHost(String host) { - this.host = host; - } - - @Override - public String getGroup() { - return group; - } - - @Override - public void setGroup(String group) { - this.group = group; - } - - @Override - public Date getLogTime() { - return logTime; - } - - @Override - public void setLogTime(Date logTime) { - this.logTime = logTime; - } - - @Override - public Integer getLineNumber() { - return lineNumber; - } - - @Override - public void setLineNumber(Integer lineNumber) { - this.lineNumber = lineNumber; - } - - @Override - public String getLevel() { - return level; - } - - @Override - public void setLevel(String level) { - this.level = level; - } - - @Override - public String getLoggerName() { - return loggerName; - } - - @Override - public void setLoggerName(String loggerName) { - this.loggerName = loggerName; - } - - @Override - public String getMethod() { - return method; - } - - @Override - public void setMethod(String method) { - this.method = method; - } - - public void setSdiDynamicFields(Map sdiDynamicFields) { - this.sdiDynamicFields = sdiDynamicFields; - } - - @Override - public Map getAllDynamicFields() { - Map dynamicFieldsMap = super.getAllDynamicFields(); - if (sdiDynamicFields != null) { - dynamicFieldsMap.putAll(sdiDynamicFields); - } - return dynamicFieldsMap; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java deleted file mode 100644 index 1cfe469f114..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.util; - -import java.security.SecureRandom; - -import org.springframework.security.authentication.encoding.Md5PasswordEncoder; - -public class CommonUtil { - private CommonUtil() { - throw new UnsupportedOperationException(); - } - - private static SecureRandom secureRandom = new SecureRandom(); - private static int counter = 0; - - public static String genGUI() { - return System.currentTimeMillis() + "_" + secureRandom.nextInt(1000) + "_" + counter++; - } - - private static final Md5PasswordEncoder md5Encoder = new Md5PasswordEncoder(); - public static String encryptPassword(String username, String password) { - return md5Encoder.encodePassword(password, username); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java deleted file mode 100644 index 4a0a01b49cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.util; - -import java.text.DateFormat; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; -import java.util.Locale; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; - -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.time.DateUtils; -import org.apache.log4j.Logger; - -public class DateUtil { - - private DateUtil() { - throw new UnsupportedOperationException(); - } - - public static String convertDateWithMillisecondsToSolrDate(Date date) { - if (date == null) { - return ""; - } - SimpleDateFormat formatter = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH); - TimeZone timeZone = TimeZone.getTimeZone("GMT"); - formatter.setTimeZone(timeZone); - - return formatter.format(date); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DownloadUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DownloadUtil.java deleted file mode 100644 index 85c14e0e6a1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/DownloadUtil.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.util; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import com.google.common.base.Splitter; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest; -import org.apache.ambari.logsearch.model.response.BarGraphData; -import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse; -import org.apache.ambari.logsearch.model.response.NameValueData; -import org.apache.ambari.logsearch.model.response.TemplateData; -import org.apache.commons.lang.StringUtils; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; - -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LINE_NUMBER; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOG_MESSAGE; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGGER_NAME; -import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.THREAD_NAME; -import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.FILE; - - -public class DownloadUtil { - - private DownloadUtil() { - throw new UnsupportedOperationException(); - } - - public static void fillModelsForLogFile(SolrDocumentList docList, Map models, ServiceLogExportRequest request, - String format, String from, String to) { - long numLogs = docList.getNumFound(); - List hosts = new ArrayList<>(); - List components = new ArrayList<>(); - List levels = new ArrayList<>(); - List logData = new ArrayList<>(); - for (SolrDocument doc : docList) { - if (doc != null) { - String hostname = (String) doc.getFieldValue(HOST); - String comp = (String) doc.getFieldValue(COMPONENT); - String level = (String) doc.getFieldValue(LEVEL); - - if (!hosts.contains(hostname)) { - hosts.add(hostname); - } - - if (!components.contains(comp)) { - components.add(comp); - } - - if (!levels.contains(level)) { - levels.add(level); - } - - StringBuffer textToWrite = new StringBuffer(); - - if (doc.getFieldValue(LOGTIME) != null) { - textToWrite.append(doc.getFieldValue(LOGTIME).toString() + " "); - } - if (doc.getFieldValue(LEVEL) != null) { - textToWrite.append(doc.getFieldValue(LEVEL).toString()).append(" "); - } - if (doc.getFieldValue(THREAD_NAME) != null) { - textToWrite.append(doc.getFieldValue(THREAD_NAME).toString().trim()).append(" "); - } - if (doc.getFieldValue(LOGGER_NAME) != null) { - textToWrite.append(doc.getFieldValue(LOGGER_NAME).toString().trim()).append(" "); - } - if (doc.getFieldValue(FILE) != null && doc.getFieldValue(LINE_NUMBER) != null) { - textToWrite - .append(doc.getFieldValue(FILE).toString()) - .append(":") - .append(doc.getFieldValue(LINE_NUMBER).toString()) - .append(" "); - } - if (doc.getFieldValue(LOG_MESSAGE) != null) { - textToWrite.append("- ") - .append(doc.getFieldValue(LOG_MESSAGE).toString()); - } - logData.add(new TemplateData((textToWrite.toString()))); - } - } - models.put("numberOfLogs", numLogs); - models.put("logs", logData); - models.put("hosts", "[ " + StringUtils.join(hosts, " ; ") + " ]"); - models.put("components", "[ " + StringUtils.join(components, " ; ") + " ]"); - models.put("format", format); - models.put("from", from); - models.put("levels", StringUtils.join(levels, ", ")); - models.put("to", to); - String includeString = request.getIncludeMessage(); - if (StringUtils.isBlank(includeString)) { - includeString = "\"\""; - } else { - List include = Splitter.on(request.getIncludeMessage()).splitToList(LogSearchConstants.I_E_SEPRATOR); - includeString = "\"" + StringUtils.join(include, "\", \"") + "\""; - } - models.put("iString", includeString); - - String excludeString = request.getExcludeMessage(); - if (StringUtils.isBlank(excludeString)) { - excludeString = "\"\""; - } else { - List exclude = Splitter.on(request.getExcludeMessage()).splitToList(LogSearchConstants.I_E_SEPRATOR); - excludeString = "\"" + StringUtils.join(exclude, "\", \"") + "\""; - } - models.put("eString", excludeString); - } - - public static void fillUserResourcesModel(Map models, BarGraphDataListResponse vBarUserDataList, BarGraphDataListResponse vBarResourceDataList) { - List usersDataList = new ArrayList<>(); - List resourceDataList = new ArrayList<>(); - Collection tableUserData = vBarUserDataList.getGraphData(); - for (BarGraphData graphData : tableUserData) { - String userName = graphData.getName().length() > 45 ? graphData.getName().substring(0, 45) : graphData.getName(); - Collection vnameValueList = graphData.getDataCount(); - usersDataList.add(new TemplateData(appendNameValueData(addBlank(userName), vnameValueList))); - } - Collection tableResourceData = vBarResourceDataList.getGraphData(); - for (BarGraphData graphData : tableResourceData) { - String resourceName = graphData.getName().length() > 45 ? graphData.getName().substring(0, 45) : graphData.getName(); - Collection vnameValueList = graphData.getDataCount(); - resourceDataList.add(new TemplateData(appendNameValueData(addBlank(resourceName), vnameValueList))); - } - models.put("users", usersDataList); - models.put("resources", resourceDataList); - models.put("usersSummary", vBarUserDataList.getGraphData().size()); - models.put("resourcesSummary", vBarResourceDataList.getGraphData().size()); - } - - private static String appendNameValueData(String data, Collection vnameValueList) { - int count = 0; - String blank = ""; - for (NameValueData vNameValue : vnameValueList) { - data += blank + vNameValue.getName() + " " + vNameValue.getValue(); - if (count == 0) - blank = addBlank(blank); - count++; - } - return data; - } - - private static String addBlank(String field) { - int blanks = 50; - int strSize = field.length(); - String fieldWithBlank = field; - for (int i = 0; i < blanks - strSize; i++) { - fieldWithBlank += " "; - } - return fieldWithBlank; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java deleted file mode 100644 index 5d4efbc2f14..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.util; - -import java.io.File; -import java.net.URL; - -import org.apache.log4j.Logger; -import org.apache.tools.ant.Project; -import org.apache.tools.ant.taskdefs.Chmod; -import org.apache.tools.ant.types.FileSet; - -public class FileUtil { - private static final Logger logger = Logger.getLogger(FileUtil.class); - - private FileUtil() { - throw new UnsupportedOperationException(); - } - - public static File getFileFromClasspath(String filename) { - URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename); - logger.debug("File Complete URI :" + fileCompleteUrl); - File file = null; - try { - file = new File(fileCompleteUrl.toURI()); - } catch (Exception exception) { - logger.debug(exception.getMessage(), exception.getCause()); - } - return file; - } - - public static void createDirectory(String dirPath) { - File dir = new File(dirPath); - if (!dir.exists()) { - logger.info("Directory " + dirPath + " does not exist. Creating ..."); - boolean mkDirSuccess = dir.mkdirs(); - if (!mkDirSuccess) { - String errorMessage = String.format("Could not create directory %s", dirPath); - logger.error(errorMessage); - throw new RuntimeException(errorMessage); - } - } - } - - public static void setPermissionOnDirectory(String dirPath, String permission) { - Chmod chmod = new Chmod(); - chmod.setProject(new Project()); - FileSet fileSet = new FileSet(); - fileSet.setDir(new File(dirPath)); - fileSet.setIncludes("**"); - chmod.addFileset(fileSet); - chmod.setPerm(permission); - chmod.execute(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java deleted file mode 100644 index 5ea6dd28523..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.util; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; - -import org.apache.ambari.logsearch.manager.MalformedInputException; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONObject; - -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -public class JSONUtil { - private static final Logger logger = Logger.getLogger(JSONUtil.class); - - private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - private static final Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create(); - - private JSONUtil() { - throw new UnsupportedOperationException(); - } - - @SuppressWarnings("unchecked") - public static HashMap jsonToMapObject(String jsonStr) { - if (StringUtils.isBlank(jsonStr)) { - logger.info("jsonString is empty, cannot convert to map"); - return null; - } - ObjectMapper mapper = new ObjectMapper(); - try { - Object tempObject = mapper.readValue(jsonStr, new TypeReference>() {}); - return (HashMap) tempObject; - } catch (JsonMappingException | JsonParseException e) { - throw new MalformedInputException("Invalid json input data", e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public static HashMap readJsonFromFile(File jsonFile) { - ObjectMapper mapper = new ObjectMapper(); - try { - return mapper.readValue(jsonFile, new TypeReference>() {}); - } catch (IOException e) { - logger.error(e, e.getCause()); - } - return new HashMap<>(); - } - - public static String toJson(Object o) { - ObjectMapper om = new ObjectMapper(); - try { - return om.writeValueAsString(o); - } catch (JsonProcessingException e) { - throw new RuntimeException("Error while serializing object to json string", e); - } - } - - /** - * WRITE JOSN IN FILE ( Delete existing file and create new file) - */ - public static synchronized void writeJSONInFile(String jsonStr, File outputFile, boolean beautify) { - FileWriter fileWriter = null; - if (outputFile == null) { - logger.error("user_pass json file can't be null."); - return; - } - try { - boolean writePermission = false; - if (outputFile.exists() && outputFile.canWrite()) { - writePermission = true; - } - if (writePermission) { - fileWriter = new FileWriter(outputFile); - if (beautify) { - ObjectMapper mapper = new ObjectMapper(); - Object json = mapper.readValue(jsonStr, Object.class); - jsonStr = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(json); - } - fileWriter.write(jsonStr); - } else { - logger.error("Applcation does not have permission to update file to write enc_password. file="+ outputFile.getAbsolutePath()); - } - } catch (IOException e) { - logger.error("Error writing to password file.", e.getCause()); - } finally { - if (fileWriter != null) { - try { - fileWriter.flush(); - fileWriter.close(); - } catch (Exception exception) { - logger.error(exception); - } - } - } - } - - /** - * GET VALUES FROM JSON BY GIVING KEY RECURSIVELY - */ - @SuppressWarnings("rawtypes") - public static String getValuesOfKey(String jsonStr, String keyName, List values) { - if (values == null) { - return null; - } - Object jsonObj = null; - try { - jsonObj = new JSONObject(jsonStr); - } catch (Exception e) { - // ignore - } - if (jsonObj == null) { - try { - JSONArray jsonArray = new JSONArray(jsonStr); - for (int i = 0; i < jsonArray.length(); i++) { - String str = getValuesOfKey(jsonArray.getString(i), keyName, values); - if (str != null) { - return str; - } - } - - } catch (Exception e) { - // ignore - } - } - if (jsonObj == null) { - return null; - } - - Iterator iterator = ((JSONObject) jsonObj).keys(); - if (iterator == null) { - return null; - } - while (iterator.hasNext()) { - String key = (String) iterator.next(); - - if (key != null && key.equals(keyName)) { - - try { - String val = ((JSONObject) jsonObj).getString(key); - values.add(val); - } catch (Exception e) { - // ignore - } - - } else if ((((JSONObject) jsonObj).optJSONArray(key) != null) || (((JSONObject) jsonObj).optJSONObject(key) != null)) { - - String str = null; - try { - str = getValuesOfKey("" + ((JSONObject) jsonObj).getString(key), keyName, values); - } catch (Exception e) { - // ignore - } - if (str != null) { - return str; - } - - } - - } - return null; - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java deleted file mode 100644 index 95f3cdf0ef1..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.util; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import com.google.common.base.Splitter; -import org.apache.ambari.logsearch.common.LogSearchConstants; -import org.apache.lucene.analysis.core.KeywordTokenizerFactory; -import org.apache.lucene.analysis.path.PathHierarchyTokenizerFactory; -import org.apache.lucene.analysis.standard.StandardTokenizerFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.solr.schema.DoublePointField; -import org.apache.solr.schema.FloatPointField; -import org.apache.solr.schema.IntPointField; -import org.apache.solr.schema.LongPointField; - -public class SolrUtil { - private SolrUtil() { - throw new UnsupportedOperationException(); - } - - /** - * Copied from Solr ClientUtils.escapeQueryChars and removed escaping * - */ - public static String escapeQueryChars(String s) { - StringBuilder sb = new StringBuilder(); - if (s != null) { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - int ic = (int) c; - if (ic == 10) { - sb.append('\\'); - sb.append((char) 13); - } - // Note: Remove || c == '*' - // These characters are part of the query syntax and must be escaped - if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' - || c == ')' || c == ':' || c == '^' || c == '[' || c == ']' - || c == '\"' || c == '{' || c == '}' || c == '~' || c == '?' - || c == '|' || c == '&' || c == ';' || c == '/' - || Character.isWhitespace(c)) { - sb.append('\\'); - } - sb.append(c); - } - } - return sb.toString(); - } - - public static String escapeForStandardTokenizer(String search) { - if (search == null) { - return null; - } - String newSearch = escapeQueryChars(search.trim()); - if (StringUtils.containsWhitespace(newSearch)) { - newSearch = "\"" + newSearch + "\""; - } - - return newSearch; - } - - private static String makeSolrSearchStringWithoutAsterisk(String search) { - String newString = search.trim(); - String newSearch = newString.replaceAll("(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\"); - newSearch = newSearch.replace("\n", "*"); - newSearch = newSearch.replace("\t", "*"); - newSearch = newSearch.replace("\r", "*"); - newSearch = newSearch.replace(" ", "\\ "); - newSearch = newSearch.replace("**", "*"); - newSearch = newSearch.replace("***", "*"); - return newSearch; - } - - public static String makeSearcableString(String search) { - if (StringUtils.isBlank(search)) { - return ""; - } - String newSearch = search.replaceAll("[\\t\\n\\r]", " "); - newSearch = newSearch.replaceAll("(?=[]\\[+&|!(){}^~=$/@%?:.\\\\-])", "\\\\"); - - return newSearch.replace(" ", "\\ "); - } - - public static void removeDoubleOrTripleEscapeFromFilters(SolrQuery solrQuery) { - String[] filterQueries = solrQuery.getFilterQueries(); - List newArray = new ArrayList<>(); - if (filterQueries != null && filterQueries.length > 0) { - for (String filterQuery : filterQueries) { - newArray.add(filterQuery.replaceAll("\\\\\\\\\\\\|\\\\\\\\", "\\\\")); - } - } - solrQuery.setFilterQueries(newArray.toArray(new String[0])); - } - - - private static boolean isSolrFieldNumber(Map fieldTypeInfoMap) { - if (MapUtils.isEmpty(fieldTypeInfoMap)) { - return false; - } - String fieldTypeClassName = (String) fieldTypeInfoMap.get("class"); - return fieldTypeClassName.equalsIgnoreCase(IntPointField.class.getSimpleName()) || - fieldTypeClassName.equalsIgnoreCase(DoublePointField.class.getSimpleName()) || - fieldTypeClassName.equalsIgnoreCase(FloatPointField.class.getSimpleName()) || - fieldTypeClassName.equalsIgnoreCase(LongPointField.class.getSimpleName()); - } - - public static String putWildCardByType(String str, String fieldType, String fieldTypeMetaData) { - Map fieldTypeInfoMap = getFieldTypeInfoMap(fieldTypeMetaData); - if (StringUtils.isNotBlank(fieldType)) { - if (isSolrFieldNumber(fieldTypeInfoMap)) { - String value = putEscapeCharacterForNumber(str, fieldTypeInfoMap); - if (StringUtils.isNotBlank(value)) { - return value; - } else { - return null; - } - } else if (checkTokenizer(StandardTokenizerFactory.class, fieldTypeInfoMap)) { - return escapeForStandardTokenizer(str); - } else if (checkTokenizer(KeywordTokenizerFactory.class, fieldTypeInfoMap) || "string".equalsIgnoreCase(fieldType)) { - return makeSolrSearchStringWithoutAsterisk(str); - } else if (checkTokenizer(PathHierarchyTokenizerFactory.class, fieldTypeInfoMap)) { - return str; - } else { - return escapeQueryChars(str); - } - } - return str; - } - - private static String putEscapeCharacterForNumber(String str, Map fieldTypeInfoMap) { - if (StringUtils.isNotEmpty(str)) { - str = str.replace("*", ""); - } - String escapeCharSting = parseInputValueAsPerFieldType(str, fieldTypeInfoMap); - if (escapeCharSting == null || escapeCharSting.isEmpty()) { - return null; - } - escapeCharSting = escapeCharSting.replace("-", "\\-"); - return escapeCharSting; - } - - private static String parseInputValueAsPerFieldType(String str, Map fieldTypeInfoMap) { - try { - String className = (String) fieldTypeInfoMap.get("class"); - if (className.equalsIgnoreCase(DoublePointField.class.getSimpleName())) { - return "" + Double.parseDouble(str); - } else if (className.equalsIgnoreCase(FloatPointField.class.getSimpleName())) { - return "" + Float.parseFloat(str); - } else if (className.equalsIgnoreCase(LongPointField.class.getSimpleName())) { - return "" + Long.parseLong(str); - } else { - return "" + Integer.parseInt(str); - } - } catch (Exception e) { - return null; - } - } - - public static SolrQuery addListFilterToSolrQuery(SolrQuery solrQuery, String fieldName, String fieldValue) { - if (org.apache.commons.lang.StringUtils.isNotEmpty(fieldValue)) { - List clusters = Splitter.on(",").splitToList(fieldValue); - if (clusters.size() > 1) { - solrQuery.addFilterQuery(String.format("%s:(%s)", fieldName, org.apache.commons.lang.StringUtils.join(clusters, " OR "))); - } else { - solrQuery.addFilterQuery(String.format("%s:%s", fieldName, clusters.get(0))); - } - } - return solrQuery; - } - - private static Map getFieldTypeInfoMap(String fieldTypeMetaData) { - HashMap fieldTypeMap = JSONUtil.jsonToMapObject(fieldTypeMetaData); - if (fieldTypeMap == null) { - return new HashMap<>(); - } - String classname = (String) fieldTypeMap.get("class"); - if (StringUtils.isNotBlank(classname)) { - classname = classname.replace("solr.", ""); - fieldTypeMap.put("class", classname); - } - return fieldTypeMap; - } - - //============================================================================================================= - - public static void setFacetField(SolrQuery solrQuery, String facetField) { - solrQuery.setFacet(true); - setRowCount(solrQuery, 0); - solrQuery.set(LogSearchConstants.FACET_FIELD, facetField); - setFacetLimit(solrQuery, -1); - } - - public static void setFacetSort(SolrQuery solrQuery, String sortType) { - solrQuery.setFacet(true); - solrQuery.setFacetSort(sortType); - } - - public static void setFacetPivot(SolrQuery solrQuery, int mincount, String... hirarchy) { - solrQuery.setFacet(true); - setRowCount(solrQuery, 0); - solrQuery.set(LogSearchConstants.FACET_PIVOT, hirarchy); - solrQuery.set(LogSearchConstants.FACET_PIVOT_MINCOUNT, mincount); - setFacetLimit(solrQuery, -1); - } - - private static void setFacetLimit(SolrQuery solrQuery, int limit) { - solrQuery.set("facet.limit", limit); - } - - public static void setRowCount(SolrQuery solrQuery, int rows) { - if (rows > 0) { - solrQuery.setRows(rows); - } else { - solrQuery.setRows(0); - solrQuery.remove(LogSearchConstants.SORT); - } - } - - @SuppressWarnings("unchecked") - private static boolean checkTokenizer(Class tokenizerFactoryClass, Map fieldTypeInfoMap) { - HashMap analyzer = (HashMap) fieldTypeInfoMap.get("analyzer"); - HashMap tokenizerMap = (HashMap)MapUtils.getObject(analyzer, "tokenizer"); - if (tokenizerMap != null) { - String tokenizerClass = (String) tokenizerMap.get("class"); - if (StringUtils.isNotEmpty(tokenizerClass)) { - tokenizerClass = tokenizerClass.replace("solr.", ""); - return tokenizerClass.equalsIgnoreCase(tokenizerFactoryClass.getSimpleName()); - } - } - - return false; - } - -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java deleted file mode 100644 index fdec8d328cf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.authenticate; - -import java.io.IOException; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.log4j.Logger; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.web.authentication.ExceptionMappingAuthenticationFailureHandler; - -public class LogsearchAuthFailureHandler extends ExceptionMappingAuthenticationFailureHandler { - private static final Logger logger = Logger.getLogger(LogsearchAuthFailureHandler.class); - - public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response, AuthenticationException exception) - throws IOException, ServletException { - logger.debug(" AuthFailureHandler + onAuthenticationFailure"); - // TODO UI side handle status and redirect to login page with proper - response.setContentType("application/json"); - response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); - response.getOutputStream().println("{ \"error\": \"" + "login failed !!" + "\" }"); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthSuccessHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthSuccessHandler.java deleted file mode 100644 index 756cb2e2d79..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthSuccessHandler.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.authenticate; - -import java.io.IOException; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.springframework.security.core.Authentication; -import org.springframework.security.web.authentication.AuthenticationSuccessHandler; - -public class LogsearchAuthSuccessHandler implements AuthenticationSuccessHandler { - @Override - public void onAuthenticationSuccess(HttpServletRequest request, - HttpServletResponse response, Authentication authentication) - throws ServletException, IOException { - response.setStatus(HttpServletResponse.SC_OK); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java deleted file mode 100644 index 2439828e55a..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.authenticate; - -import java.io.IOException; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.log4j.Logger; -import org.springframework.security.core.Authentication; -import org.springframework.security.web.authentication.logout.LogoutSuccessHandler; -import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler; - -public class LogsearchLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler implements LogoutSuccessHandler { - private static final Logger logger = Logger.getLogger(LogsearchLogoutSuccessHandler.class); - - @Override - public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication) - throws IOException, ServletException { - logger.debug("LogsearchLogoutSuccessHandler ::: onLogoutSuccess"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/ConfigStateProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/ConfigStateProvider.java deleted file mode 100644 index fff431fabc5..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/ConfigStateProvider.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.web.filters; - -import static javax.ws.rs.core.Response.Status.CONFLICT; -import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.global.LogSearchConfigState; - -/** - * Filter to decide whether the server is ready to serve requests which require Log Search configuration available. - */ -public class ConfigStateProvider implements StatusProvider { - - private static final String CONFIG_NOT_AVAILABLE = "Configuration is not available"; - private static final String CONFIG_API_DISABLED = "Configuration API is disabled"; - - private final LogSearchConfigState logSearchConfigState; - private final boolean enabled; - - public ConfigStateProvider(LogSearchConfigState logSearchConfigState, boolean enabled) { - this.logSearchConfigState = logSearchConfigState; - this.enabled = enabled; - } - - @Override - public StatusMessage getStatusMessage(String requestUri) { - if (!enabled) { - return StatusMessage.with(CONFLICT, CONFIG_API_DISABLED); - } - if (!logSearchConfigState.isLogSearchConfigAvailable()) { - return StatusMessage.with(SERVICE_UNAVAILABLE, CONFIG_NOT_AVAILABLE); - } - - return null; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/GlobalStateProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/GlobalStateProvider.java deleted file mode 100644 index ef9c343a528..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/GlobalStateProvider.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; - -public class GlobalStateProvider implements StatusProvider { - - private static final String ZNODE_NOT_READY_MSG = "ZNode is not available for %s. (connection string: %s, endpoint: %s)"; - private static final String ZK_CONFIG_NOT_READY_MSG = "Collection configuration has not uploaded yet for %s. (configuration name: %s, collection name: %s, endpoint: %s)"; - private static final String SOLR_COLLECTION_NOT_READY_MSG = "Solr has not accessible yet for %s collection. (endpoint: %s)"; - - private final SolrCollectionState solrCollectionState; - private final SolrPropsConfig solrPropsConfig; - - public GlobalStateProvider(SolrCollectionState solrCollectionState, SolrPropsConfig solrPropsConfig) { - this.solrCollectionState = solrCollectionState; - this.solrPropsConfig = solrPropsConfig; - } - - @Override - public StatusMessage getStatusMessage(String requestUri) { - if (!solrCollectionState.isZnodeReady()) { - return StatusMessage.with(SERVICE_UNAVAILABLE, String.format(ZNODE_NOT_READY_MSG, - solrPropsConfig.getCollection(), solrPropsConfig.getZkConnectString(), requestUri)); - } else if (!solrCollectionState.isConfigurationUploaded()) { - return StatusMessage.with(SERVICE_UNAVAILABLE, String.format(ZK_CONFIG_NOT_READY_MSG, solrPropsConfig.getCollection(), - solrPropsConfig.getConfigName(), solrPropsConfig.getCollection(), requestUri)); - } else if (!solrCollectionState.isSolrCollectionReady()) { - return StatusMessage.with(SERVICE_UNAVAILABLE, String.format(SOLR_COLLECTION_NOT_READY_MSG, - solrPropsConfig.getCollection(), requestUri)); - } - return null; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java deleted file mode 100644 index a328ace1be8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import java.io.IOException; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint; - -public class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEntryPoint { - private static final Logger logger = LoggerFactory.getLogger(LogsearchAuthenticationEntryPoint.class); - private final AuthPropsConfig authPropsConfig; - - public LogsearchAuthenticationEntryPoint(String loginFormUrl, AuthPropsConfig authPropsConfig) { - super(loginFormUrl); - this.authPropsConfig = authPropsConfig; - } - - @Override - public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) - throws IOException, ServletException { - if (!authPropsConfig.isAuthJwtEnabled()) { // TODO: find better solution if JWT enabled, as it always causes an basic auth failure before JWT auth - logger.debug("Got 401 from request: {}", request.getRequestURI()); - response.sendError(HttpServletResponse.SC_UNAUTHORIZED); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java deleted file mode 100644 index f5e7bcaeafd..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import org.apache.ambari.logsearch.conf.LogSearchHttpHeaderConfig; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; - -public class LogsearchCorsFilter implements Filter { - - private LogSearchHttpHeaderConfig logSearchHttpHeaderConfig; - - public LogsearchCorsFilter(LogSearchHttpHeaderConfig logSearchHttpHeaderConfig) { - this.logSearchHttpHeaderConfig = logSearchHttpHeaderConfig; - } - - @Override - public void init(FilterConfig filterConfig) throws ServletException { - } - - @Override - public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) - throws IOException, ServletException { - HttpServletResponse response = (HttpServletResponse) servletResponse; - response.setHeader("Access-Control-Allow-Origin", logSearchHttpHeaderConfig.getAccessControlAllowOrigin()); - response.setHeader("Access-Control-Allow-Headers", logSearchHttpHeaderConfig.getAccessControlAllowHeaders()); - response.setHeader("Access-Control-Allow-Credentials", logSearchHttpHeaderConfig.getAccessControlAllowCredentials()); - response.setHeader("Access-Control-Allow-Methods", logSearchHttpHeaderConfig.getAccessControlAllowMethods()); - filterChain.doFilter(servletRequest, servletResponse); - } - - @Override - public void destroy() { - - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchFilter.java deleted file mode 100644 index 98e02b3f866..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchFilter.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import static org.apache.ambari.logsearch.util.JSONUtil.toJson; - -import java.io.IOException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.web.util.matcher.RequestMatcher; - -public class LogsearchFilter implements Filter { - - private static final Logger LOG = LoggerFactory.getLogger(LogsearchFilter.class); - - private final RequestMatcher requestMatcher; - private final StatusProvider statusProvider; - - public LogsearchFilter(RequestMatcher requestMatcher, StatusProvider statusProvider) { - this(requestMatcher, statusProvider, true); - } - - public LogsearchFilter(RequestMatcher requestMatcher, StatusProvider statusProvider, boolean enabled) { - this.requestMatcher = requestMatcher; - this.statusProvider = statusProvider; - } - - @Override - public void init(FilterConfig filterConfig) throws ServletException { - } - - @Override - public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { - HttpServletRequest request = (HttpServletRequest) servletRequest; - if (requestMatcher.matches(request)) { - StatusMessage errorResponse = statusProvider.getStatusMessage(request.getRequestURI()); - if (errorResponse != null) { - LOG.info("{} request is filtered out: {}", request.getRequestURL(), errorResponse.getMessage()); - HttpServletResponse resp = (HttpServletResponse) servletResponse; - resp.setStatus(errorResponse.getStatusCode()); - resp.setContentType("application/json"); - resp.getWriter().print(toJson(errorResponse)); - return; - } - } - filterChain.doFilter(servletRequest, servletResponse); - } - - @Override - public void destroy() { - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchJWTFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchJWTFilter.java deleted file mode 100644 index c75d9c129eb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchJWTFilter.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import org.apache.ambari.logsearch.auth.filter.AbstractJWTFilter; -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.dao.RoleDao; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.web.util.matcher.NegatedRequestMatcher; -import org.springframework.security.web.util.matcher.RequestMatcher; - -import java.util.Collection; -import java.util.List; - -public class LogsearchJWTFilter extends AbstractJWTFilter { - - private AuthPropsConfig authPropsConfig; - private RoleDao roleDao; - - public LogsearchJWTFilter(RequestMatcher requestMatcher, AuthPropsConfig authPropsConfig, RoleDao roleDao) { - super(new NegatedRequestMatcher(requestMatcher)); - this.authPropsConfig = authPropsConfig; - this.roleDao = roleDao; - } - - @Override - protected String getPublicKey() { - return authPropsConfig.getPublicKey(); - } - - @Override - protected String getProvidedUrl() { - return authPropsConfig.getProvidedUrl(); - } - - @Override - protected boolean isAuthJwtEnabled() { - return authPropsConfig.isAuthJwtEnabled(); - } - - @Override - protected String getCookieName() { - return authPropsConfig.getCookieName(); - } - - @Override - protected String getOriginalUrlQueryParam() { - return authPropsConfig.getOriginalUrlQueryParam(); - } - - @Override - protected List getAudiences() { - return authPropsConfig.getAudiences(); - } - - @Override - protected List getUserAgentList() { - return authPropsConfig.getUserAgentList(); - } - - @Override - protected Collection getAuthorities(String username) { - if (authPropsConfig.isFileAuthorization()) { - return roleDao.getRolesForUser(username); - } - return RoleDao.createDefaultAuthorities(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java deleted file mode 100644 index 709678085dc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.web.filters; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletContext; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.ambari.logsearch.conf.LogSearchSpnegoConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.authentication.AbstractAuthenticationToken; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.security.core.context.SecurityContextImpl; -import org.springframework.security.core.userdetails.User; -import org.springframework.security.core.userdetails.UserDetails; -import org.apache.commons.collections.iterators.IteratorEnumeration; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; -import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.springframework.security.web.authentication.WebAuthenticationDetails; -import org.springframework.security.web.util.matcher.NegatedRequestMatcher; -import org.springframework.security.web.util.matcher.RequestMatcher; - - -public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter { - private static final Logger logger = LoggerFactory.getLogger(LogsearchKRBAuthenticationFilter.class); - - @Inject - private LogSearchSpnegoConfig logSearchSpnegoConfig; - - private static final String NAME_RULES_PARAM = "kerberos.name.rules"; - private static final String TOKEN_VALID_PARAM = "token.validity"; - private static final String COOKIE_DOMAIN_PARAM = "cookie.domain"; - private static final String COOKIE_PATH_PARAM = "cookie.path"; - private static final String PRINCIPAL_PARAM = "kerberos.principal"; - private static final String KEYTAB_PARAM = "kerberos.keytab"; - private static final String AUTH_TYPE = "type"; - private static final String AUTH_COOKIE_NAME = "hadoop.auth"; - private static final String DEFAULT_USER_ROLE = "ROLE_USER"; - - private static final NoServletContext NO_SERVLET_CONTEXT = new NoServletContext(); - private static final Pattern usernamePattern = Pattern.compile("(?<=u=)(.*?)(?=&)|(?<=u=)(.*)"); - - private String authType = PseudoAuthenticationHandler.TYPE; - private static boolean spnegoEnable = false; - - private RequestMatcher requestMatcher; - - public LogsearchKRBAuthenticationFilter(RequestMatcher requestMatcher) { - this.requestMatcher = new NegatedRequestMatcher(requestMatcher); - } - @PostConstruct - public void postConstruct() { - try { - isSpnegoEnable(); - init(null); - } catch (ServletException e) { - logger.error("Error while initializing Filter : " + e.getMessage()); - } - } - - @Override - public void init(FilterConfig conf) throws ServletException { - final FilterConfig globalConf = conf; - String hostName = logSearchSpnegoConfig.getHostName(); - final Map params = new HashMap(); - if (spnegoEnable) { - authType = KerberosAuthenticationHandler.TYPE; - } - params.put(AUTH_TYPE,authType); - params.put(NAME_RULES_PARAM, logSearchSpnegoConfig.getNameRules()); - params.put(TOKEN_VALID_PARAM, logSearchSpnegoConfig.getTokenValid()); - params.put(COOKIE_DOMAIN_PARAM, logSearchSpnegoConfig.getCookieDomain()); - params.put(COOKIE_PATH_PARAM, logSearchSpnegoConfig.getCookiePath()); - params.put(PRINCIPAL_PARAM, logSearchSpnegoConfig.getPrincipal()); - params.put(KEYTAB_PARAM, logSearchSpnegoConfig.getKeyTab()); - FilterConfig myConf = new FilterConfig() { - @Override - public ServletContext getServletContext() { - if (globalConf != null) { - return globalConf.getServletContext(); - } else { - return NO_SERVLET_CONTEXT; - } - } - - @SuppressWarnings("unchecked") - @Override - public Enumeration getInitParameterNames() { - return new IteratorEnumeration(params.keySet().iterator()); - } - - @Override - public String getInitParameter(String param) { - return params.get(param); - } - - @Override - public String getFilterName() { - return "KerberosFilter"; - } - }; - super.init(myConf); - } - - @Override - protected void doFilter(FilterChain filterChain, HttpServletRequest request, - HttpServletResponse response) throws IOException, ServletException { - logger.debug("LogsearchKRBAuthenticationFilter private filter"); - String userName = getUsernameFromResponse(response); - if (StringUtils.isNotEmpty(userName)) { - Authentication existingAuth = SecurityContextHolder.getContext() - .getAuthentication(); - if (existingAuth == null || !existingAuth.isAuthenticated()) { - // --------------------------- To Create Logsearch Session-------------------------------------- - // if we get the userName from the token then log into Logsearch using the same user - final List grantedAuths = new ArrayList<>(); - grantedAuths.add(new SimpleGrantedAuthority(DEFAULT_USER_ROLE)); - final UserDetails principal = new User(userName, "", grantedAuths); - final Authentication finalAuthentication = new UsernamePasswordAuthenticationToken( - principal, "", grantedAuths); - WebAuthenticationDetails webDetails = new WebAuthenticationDetails( - request); - ((AbstractAuthenticationToken) finalAuthentication) - .setDetails(webDetails); - Authentication authentication = this - .authenticate(finalAuthentication); - authentication = getGrantedAuthority(authentication); - SecurityContextHolder.getContext().setAuthentication(authentication); - request.getSession(true).setAttribute("SPRING_SECURITY_CONTEXT", - SecurityContextHolder.getContext()); - request.setAttribute("spnegoEnabled", true); - logger.info("Logged into Logsearch as = " + userName); - filterChain.doFilter(request, response); - } else { - try { - super.doFilter(filterChain, request, response); - } catch (Exception e) { - logger.error("Error LogsearchKRBAuthenticationFilter : " + e.getMessage()); - } - } - } else { - filterChain.doFilter(request, response); - } - } - - @Override - public void doFilter(ServletRequest request, ServletResponse response, - FilterChain filterChain) throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest) request; - if (requestMatcher.matches(httpRequest)) { - logger.debug("LogsearchKRBAuthenticationFilter public filter path >>>>" + httpRequest.getPathInfo()); - SecurityContextImpl securityContextImpl = (SecurityContextImpl) httpRequest.getSession(true).getAttribute("SPRING_SECURITY_CONTEXT"); - Authentication existingAuth = null; - if (securityContextImpl != null) { - existingAuth = securityContextImpl.getAuthentication(); - } - if (!isLoginRequest(httpRequest) && spnegoEnable - && (existingAuth == null || !existingAuth.isAuthenticated())) { - KerberosName.setRules(logSearchSpnegoConfig.getNameRules()); - String userName = getUsernameFromRequest(httpRequest); - if ((existingAuth == null || !existingAuth.isAuthenticated()) - && (StringUtils.isNotEmpty(userName))) { - // --------------------------- To Create Logsearch Session-------------------------------------- - // if we get the userName from the token then log into logsearch using the same user - final List grantedAuths = new ArrayList<>(); - grantedAuths.add(new SimpleGrantedAuthority(DEFAULT_USER_ROLE)); - final UserDetails principal = new User(userName, "", grantedAuths); - final Authentication finalAuthentication = new UsernamePasswordAuthenticationToken( - principal, "", grantedAuths); - WebAuthenticationDetails webDetails = new WebAuthenticationDetails( - httpRequest); - ((AbstractAuthenticationToken) finalAuthentication) - .setDetails(webDetails); - Authentication authentication = this - .authenticate(finalAuthentication); - authentication = getGrantedAuthority(authentication); - SecurityContextHolder.getContext().setAuthentication(authentication); - request.setAttribute("spnegoEnabled", true); - logger.info("Logged into Logsearch as = " + userName); - } else { - try { - super.doFilter(request, response, filterChain); - } catch (Exception e) { - logger.error("Error LogsearchKRBAuthenticationFilter : " + e.getMessage()); - } - } - } else { - filterChain.doFilter(request, response); - } - } else { - filterChain.doFilter(request, response); - } - } - - private void isSpnegoEnable() { - spnegoEnable = logSearchSpnegoConfig.isKerberosEnabled(); - if (spnegoEnable) { - spnegoEnable = false; - String keytab = logSearchSpnegoConfig.getKeyTab(); - String principal = logSearchSpnegoConfig.getPrincipal(); - String hostname = logSearchSpnegoConfig.getHostName(); - if (StringUtils.isNotEmpty(keytab) && StringUtils.isNotEmpty(principal) - && StringUtils.isNotEmpty(hostname)) { - spnegoEnable = true; - } - } - } - - private Authentication getGrantedAuthority(Authentication authentication) { - UsernamePasswordAuthenticationToken result = null; - if (authentication != null && authentication.isAuthenticated()) { - final List grantedAuths = getAuthorities(); - final UserDetails userDetails = new User(authentication.getName() - .toString(), authentication.getCredentials().toString(), grantedAuths); - result = new UsernamePasswordAuthenticationToken(userDetails, - authentication.getCredentials(), grantedAuths); - result.setDetails(authentication.getDetails()); - return result; - } - return authentication; - } - - private List getAuthorities() { - final List grantedAuths = new ArrayList<>(); - grantedAuths.add(new SimpleGrantedAuthority(DEFAULT_USER_ROLE)); - return grantedAuths; - } - - private Authentication authenticate(Authentication authentication) - throws AuthenticationException { - String username = authentication.getName(); - String password = (String) authentication.getCredentials(); - username = StringEscapeUtils.unescapeHtml(username); - if (StringUtils.isEmpty(username)) { - throw new BadCredentialsException("Username can't be null or empty."); - } - org.apache.ambari.logsearch.web.model.User user = new org.apache.ambari.logsearch.web.model.User(); - user.setUsername(username); - authentication = new UsernamePasswordAuthenticationToken(username, - password, getAuthorities()); - return authentication; - } - - private String getUsernameFromRequest(HttpServletRequest httpRequest) { - String userName = null; - Cookie[] cookie = httpRequest.getCookies(); - if (cookie != null) { - for (Cookie c : cookie) { - if (c.getName().equalsIgnoreCase(AUTH_COOKIE_NAME)) { - String cookieStr = c.getName() + "=" + c.getValue(); - Matcher m = usernamePattern.matcher(cookieStr); - if (m.find()) { - userName = m.group(1); - } - } - } - } - logger.debug("kerberos username from request >>>>>>>>" + userName); - return userName; - } - - private String getUsernameFromResponse(HttpServletResponse response) { - String userName = null; - boolean checkCookie = response.containsHeader("Set-Cookie"); - if (checkCookie) { - Collection cookiesCollection = response.getHeaders("Set-Cookie"); - if (cookiesCollection != null) { - Iterator iterator = cookiesCollection.iterator(); - while (iterator.hasNext()) { - String cookie = iterator.next(); - if (StringUtils.isNotEmpty(cookie)) { - if (cookie.toLowerCase().startsWith(AUTH_COOKIE_NAME.toLowerCase())) { - Matcher m = usernamePattern.matcher(cookie); - if (m.find()) { - userName = m.group(1); - } - } - } - if (StringUtils.isNotEmpty(userName)) { - break; - } - } - } - } - logger.debug("kerberos username from response >>>>>>>>" + userName); - return userName; - } - - - - private boolean isLoginRequest(HttpServletRequest httpServletRequest) { - boolean isLoginRequest = false; - if ("POST".equalsIgnoreCase(httpServletRequest.getMethod())) { - String url = httpServletRequest.getRequestURI().toString(); - if ("/login".equalsIgnoreCase(url)) { - isLoginRequest = true; - } - } - return isLoginRequest; - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKrbFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKrbFilter.java deleted file mode 100644 index 3677d112345..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKrbFilter.java +++ /dev/null @@ -1,577 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.security.authentication.client.AuthenticatedURL; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; -import org.apache.hadoop.security.authentication.server.AuthenticationHandler; -import org.apache.hadoop.security.authentication.server.AuthenticationToken; -import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; -import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; -import org.apache.hadoop.security.authentication.util.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletContext; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; -import javax.servlet.http.HttpServletResponse; - -import java.io.IOException; -import java.security.Principal; -import java.text.SimpleDateFormat; -import java.util.*; - -import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_SESSION_ID; - -public class LogsearchKrbFilter implements Filter { - - private static Logger logger = LoggerFactory.getLogger(LogsearchKrbFilter.class); - - /** - * Constant for the property that specifies the configuration prefix. - */ - public static final String CONFIG_PREFIX = "config.prefix"; - - /** - * Constant for the property that specifies the authentication handler to use. - */ - public static final String AUTH_TYPE = "type"; - - /** - * Constant for the property that specifies the secret to use for signing the HTTP Cookies. - */ - public static final String SIGNATURE_SECRET = "signature.secret"; - - public static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file"; - - /** - * Constant for the configuration property that indicates the validity of the generated token. - */ - public static final String AUTH_TOKEN_VALIDITY = "token.validity"; - - /** - * Constant for the configuration property that indicates the domain to use in the HTTP cookie. - */ - public static final String COOKIE_DOMAIN = "cookie.domain"; - - /** - * Constant for the configuration property that indicates the path to use in the HTTP cookie. - */ - public static final String COOKIE_PATH = "cookie.path"; - - /** - * Constant for the configuration property that indicates the name of the - * SignerSecretProvider class to use. - * Possible values are: "string", "random", "zookeeper", or a classname. - * If not specified, the "string" implementation will be used with - * SIGNATURE_SECRET; and if that's not specified, the "random" implementation - * will be used. - */ - public static final String SIGNER_SECRET_PROVIDER = - "signer.secret.provider"; - - /** - * Constant for the ServletContext attribute that can be used for providing a - * custom implementation of the SignerSecretProvider. Note that the class - * should already be initialized. If not specified, SIGNER_SECRET_PROVIDER - * will be used. - */ - public static final String SIGNER_SECRET_PROVIDER_ATTRIBUTE = - "signer.secret.provider.object"; - - private Properties config; - private Signer signer; - private SignerSecretProvider secretProvider; - private AuthenticationHandler authHandler; - private long validity; - private String cookieDomain; - private String cookiePath; - - /** - *

Initializes the authentication filter and signer secret provider.

- * It instantiates and initializes the specified {@link - * AuthenticationHandler}. - * - * @param filterConfig filter configuration. - * - * @throws ServletException thrown if the filter or the authentication handler could not be initialized properly. - */ - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String configPrefix = filterConfig.getInitParameter(CONFIG_PREFIX); - configPrefix = (configPrefix != null) ? configPrefix + "." : ""; - config = getConfiguration(configPrefix, filterConfig); - String authHandlerName = config.getProperty(AUTH_TYPE, null); - String authHandlerClassName; - if (authHandlerName == null) { - throw new ServletException("Authentication type must be specified: " + - PseudoAuthenticationHandler.TYPE + "|" + - KerberosAuthenticationHandler.TYPE + "|"); - } - if(StringUtils.equalsIgnoreCase(authHandlerName, PseudoAuthenticationHandler.TYPE)){ - authHandlerClassName = PseudoAuthenticationHandler.class.getName(); - }else if(StringUtils.equalsIgnoreCase(authHandlerName, KerberosAuthenticationHandler.TYPE)){ - authHandlerClassName = KerberosAuthenticationHandler.class.getName(); - } else { - authHandlerClassName = authHandlerName; - } - - validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000")) - * 1000; //10 hours - initializeSecretProvider(filterConfig); - - initializeAuthHandler(authHandlerClassName); - - cookieDomain = config.getProperty(COOKIE_DOMAIN, null); - cookiePath = config.getProperty(COOKIE_PATH, null); - } - - private void initializeAuthHandler(String authHandlerClassName) - throws ServletException { - try { - Class klass = Thread.currentThread().getContextClassLoader().loadClass(authHandlerClassName); - authHandler = (AuthenticationHandler) klass.newInstance(); - authHandler.init(config); - } catch (ClassNotFoundException | InstantiationException | - IllegalAccessException ex) { - throw new ServletException(ex); - } - } - - private void initializeSecretProvider(FilterConfig filterConfig) - throws ServletException { - secretProvider = (SignerSecretProvider) filterConfig.getServletContext(). - getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE); - if (secretProvider == null) { - try { - secretProvider = constructSecretProvider( - filterConfig.getServletContext(), - config, false); - } catch (Exception ex) { - throw new ServletException(ex); - } - } - signer = new Signer(secretProvider); - } - - private static SignerSecretProvider constructSecretProvider( - ServletContext ctx, Properties config, - boolean disallowFallbackToRandomSecretProvider) throws Exception { - long validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, - "36000")) * 1000; - - String name = config.getProperty(SIGNER_SECRET_PROVIDER); - if (StringUtils.isEmpty(name)) { - if (!disallowFallbackToRandomSecretProvider) { - name = "random"; - } else { - name = "file"; - } - } - - SignerSecretProvider provider; - if ("file".equals(name)) { - provider = new FileSignerSecretProvider(); - try { - provider.init(config, ctx, validity); - } catch (Exception e) { - if (!disallowFallbackToRandomSecretProvider) { - logger.info("Unable to initialize FileSignerSecretProvider, " + - "falling back to use random secrets."); - provider = new RandomSignerSecretProvider(); - provider.init(config, ctx, validity); - } else { - throw e; - } - } - } else if ("random".equals(name)) { - provider = new RandomSignerSecretProvider(); - provider.init(config, ctx, validity); - } else if ("zookeeper".equals(name)) { - provider = new ZKSignerSecretProvider(); - provider.init(config, ctx, validity); - } else { - provider = (SignerSecretProvider) Thread.currentThread(). - getContextClassLoader().loadClass(name).newInstance(); - provider.init(config, ctx, validity); - } - return provider; - } - - /** - * Returns the configuration properties of the {@link LogsearchKrbFilter} - * without the prefix. The returned properties are the same that the - * {@link #getConfiguration(String, FilterConfig)} method returned. - * - * @return the configuration properties. - */ - protected Properties getConfiguration() { - return config; - } - - /** - * Returns the authentication handler being used. - * - * @return the authentication handler being used. - */ - protected AuthenticationHandler getAuthenticationHandler() { - return authHandler; - } - - /** - * Returns if a random secret is being used. - * - * @return if a random secret is being used. - */ - protected boolean isRandomSecret() { - return secretProvider != null && secretProvider.getClass() == RandomSignerSecretProvider.class; - } - - /** - * Returns if a custom implementation of a SignerSecretProvider is being used. - * - * @return if a custom implementation of a SignerSecretProvider is being used. - */ - protected boolean isCustomSignerSecretProvider() { - Class clazz = secretProvider != null ? secretProvider.getClass() : null; - return clazz != FileSignerSecretProvider.class && clazz != - RandomSignerSecretProvider.class && clazz != ZKSignerSecretProvider - .class; - } - - /** - * Returns the validity time of the generated tokens. - * - * @return the validity time of the generated tokens, in seconds. - */ - protected long getValidity() { - return validity / 1000; - } - - /** - * Returns the cookie domain to use for the HTTP cookie. - * - * @return the cookie domain to use for the HTTP cookie. - */ - protected String getCookieDomain() { - return cookieDomain; - } - - /** - * Returns the cookie path to use for the HTTP cookie. - * - * @return the cookie path to use for the HTTP cookie. - */ - protected String getCookiePath() { - return cookiePath; - } - - /** - * Destroys the filter. - *

- * It invokes the {@link AuthenticationHandler#destroy()} method to release any resources it may hold. - */ - @Override - public void destroy() { - if (authHandler != null) { - authHandler.destroy(); - authHandler = null; - } - } - - /** - * Returns the filtered configuration (only properties starting with the specified prefix). The property keys - * are also trimmed from the prefix. The returned {@link Properties} object is used to initialized the - * {@link AuthenticationHandler}. - *

- * This method can be overriden by subclasses to obtain the configuration from other configuration source than - * the web.xml file. - * - * @param configPrefix configuration prefix to use for extracting configuration properties. - * @param filterConfig filter configuration object - * - * @return the configuration to be used with the {@link AuthenticationHandler} instance. - * - * @throws ServletException thrown if the configuration could not be created. - */ - protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException { - Properties props = new Properties(); - if(filterConfig != null){ - Enumeration names = filterConfig.getInitParameterNames(); - if(names != null){ - while (names.hasMoreElements()) { - String name = (String) names.nextElement(); - if (name != null && configPrefix != null && name.startsWith(configPrefix)) { - String value = filterConfig.getInitParameter(name); - props.put(name.substring(configPrefix.length()), value); - } - } - } - } - return props; - } - - /** - * Returns the full URL of the request including the query string. - *

- * Used as a convenience method for logging purposes. - * - * @param request the request object. - * - * @return the full URL of the request including the query string. - */ - protected String getRequestURL(HttpServletRequest request) { - StringBuffer sb = request.getRequestURL(); - if (request.getQueryString() != null) { - sb.append("?").append(request.getQueryString()); - } - return sb.toString(); - } - - /** - * Returns the {@link AuthenticationToken} for the request. - *

- * It looks at the received HTTP cookies and extracts the value of the {@link AuthenticatedURL#AUTH_COOKIE} - * if present. It verifies the signature and if correct it creates the {@link AuthenticationToken} and returns - * it. - *

- * If this method returns null the filter will invoke the configured {@link AuthenticationHandler} - * to perform user authentication. - * - * @param request request object. - * - * @return the Authentication token if the request is authenticated, null otherwise. - * - * @throws IOException thrown if an IO error occurred. - * @throws AuthenticationException thrown if the token is invalid or if it has expired. - */ - protected AuthenticationToken getToken(HttpServletRequest request) throws IOException, AuthenticationException { - AuthenticationToken token = null; - String tokenStr = null; - Cookie[] cookies = request.getCookies(); - if (cookies != null) { - for (Cookie cookie : cookies) { - if (AuthenticatedURL.AUTH_COOKIE.equals(cookie.getName())) { - tokenStr = cookie.getValue(); - try { - tokenStr = signer.verifyAndExtract(tokenStr); - } catch (SignerException ex) { - throw new AuthenticationException(ex); - } - break; - } - } - } - if (tokenStr != null) { - token = AuthenticationToken.parse(tokenStr); - if(token != null){ - if (!token.getType().equals(authHandler.getType())) { - throw new AuthenticationException("Invalid AuthenticationToken type"); - } - if (token.isExpired()) { - throw new AuthenticationException("AuthenticationToken expired"); - } - } - } - return token; - } - - /** - * If the request has a valid authentication token it allows the request to continue to the target resource, - * otherwise it triggers an authentication sequence using the configured {@link AuthenticationHandler}. - * - * @param request the request object. - * @param response the response object. - * @param filterChain the filter chain object. - * - * @throws IOException thrown if an IO error occurred. - * @throws ServletException thrown if a processing error occurred. - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) - throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest) request; - logger.debug("LogsearchKrbFilter public filter path >>>>" +httpRequest.getPathInfo()); - boolean unauthorizedResponse = true; - int errCode = HttpServletResponse.SC_UNAUTHORIZED; - AuthenticationException authenticationEx = null; - HttpServletResponse httpResponse = (HttpServletResponse) response; - boolean isHttps = "https".equals(httpRequest.getScheme()); - try { - boolean newToken = false; - AuthenticationToken token; - try { - token = getToken(httpRequest); - } - catch (AuthenticationException ex) { - ex.printStackTrace(); - logger.warn("AuthenticationToken ignored: " + ex.getMessage()); - // will be sent back in a 401 unless filter authenticates - authenticationEx = ex; - token = null; - } - if (authHandler.managementOperation(token, httpRequest, httpResponse)) { - if (token == null) { - if (logger.isDebugEnabled()) { - logger.debug("Request [{}] triggering authentication", getRequestURL(httpRequest)); - } - token = authHandler.authenticate(httpRequest, httpResponse); - if (token != null && token.getExpires() != 0 && - token != AuthenticationToken.ANONYMOUS) { - token.setExpires(System.currentTimeMillis() + getValidity() * 1000); - } - newToken = true; - } - if (token != null) { - unauthorizedResponse = false; - if (logger.isDebugEnabled()) { - logger.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName()); - } - final AuthenticationToken authToken = token; - httpRequest = new HttpServletRequestWrapper(httpRequest) { - - @Override - public String getAuthType() { - return authToken.getType(); - } - - @Override - public String getRemoteUser() { - return authToken.getUserName(); - } - - @Override - public Principal getUserPrincipal() { - return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null; - } - }; - if (newToken && !token.isExpired() && token != AuthenticationToken.ANONYMOUS) { - String signedToken = signer.sign(token.toString()); - createAuthCookie(httpResponse, signedToken, getCookieDomain(), - getCookiePath(), token.getExpires(), isHttps); - } - doFilter(filterChain, httpRequest, httpResponse); - } - } else { - unauthorizedResponse = false; - } - } catch (AuthenticationException ex) { - // exception from the filter itself is fatal - ex.printStackTrace(); - errCode = HttpServletResponse.SC_FORBIDDEN; - authenticationEx = ex; - logger.warn("Authentication exception: " + ex.getMessage(), ex); - } - if (unauthorizedResponse) { - if (!httpResponse.isCommitted()) { - createAuthCookie(httpResponse, "", getCookieDomain(), - getCookiePath(), 0, isHttps); - // If response code is 401. Then WWW-Authenticate Header should be - // present.. reset to 403 if not found.. - if ((errCode == HttpServletResponse.SC_UNAUTHORIZED) - && (!httpResponse.containsHeader( - KerberosAuthenticator.WWW_AUTHENTICATE))) { - errCode = HttpServletResponse.SC_FORBIDDEN; - } - if (authenticationEx == null) { - boolean chk = true; - Collection headerNames = httpResponse.getHeaderNames(); - for(String headerName : headerNames){ - String value = httpResponse.getHeader(headerName); - if(headerName.equalsIgnoreCase("Set-Cookie") && value.startsWith(LOGSEARCH_SESSION_ID)){ - chk = false; - break; - } - } - String authHeader = httpRequest.getHeader("Authorization"); - if(authHeader == null && chk){ - filterChain.doFilter(request, response); - }else if(authHeader != null && authHeader.startsWith("Basic")){ - filterChain.doFilter(request, response); - } - } else { - httpResponse.sendError(errCode, authenticationEx.getMessage()); - } - } - } - } - - /** - * Delegates call to the servlet filter chain. Sub-classes my override this - * method to perform pre and post tasks. - */ - protected void doFilter(FilterChain filterChain, HttpServletRequest request, - HttpServletResponse response) throws IOException, ServletException { - filterChain.doFilter(request, response); - } - - /** - * Creates the Hadoop authentication HTTP cookie. - * - * @param token authentication token for the cookie. - * @param expires UNIX timestamp that indicates the expire date of the - * cookie. It has no effect if its value < 0. - * - * XXX the following code duplicate some logic in Jetty / Servlet API, - * because of the fact that Hadoop is stuck at servlet 2.5 and jetty 6 - * right now. - */ - private static void createAuthCookie(HttpServletResponse resp, String token, - String domain, String path, long expires, - boolean isSecure) { - StringBuilder sb = new StringBuilder(AuthenticatedURL.AUTH_COOKIE) - .append("="); - if (token != null && token.length() > 0) { - sb.append("\"").append(token).append("\""); - } - - if (path != null) { - sb.append("; Path=").append(path); - } - - if (domain != null) { - sb.append("; Domain=").append(domain); - } - - if (expires >= 0) { - Date date = new Date(expires); - SimpleDateFormat df = new SimpleDateFormat("EEE, " + - "dd-MMM-yyyy HH:mm:ss zzz"); - df.setTimeZone(TimeZone.getTimeZone("GMT")); - sb.append("; Expires=").append(df.format(date)); - } - - if (isSecure) { - sb.append("; Secure"); - } - - sb.append("; HttpOnly"); - resp.addHeader("Set-Cookie", sb.toString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java deleted file mode 100644 index fed86e855fe..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import java.io.IOException; - -import javax.inject.Inject; -import javax.servlet.FilterChain; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpSession; - -import org.apache.ambari.logsearch.common.LogSearchContext; -import org.apache.ambari.logsearch.manager.SessionManager; -import org.apache.ambari.logsearch.util.CommonUtil; -import org.apache.ambari.logsearch.web.model.User; -import org.apache.log4j.Logger; -import org.springframework.security.authentication.AnonymousAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.web.filter.GenericFilterBean; - -public class LogsearchSecurityContextFormationFilter extends GenericFilterBean { - - static Logger logger = Logger.getLogger(LogsearchSecurityContextFormationFilter.class); - - public static final String LOGSEARCH_SC_SESSION_KEY = "LOGSEARCH_SECURITY_CONTEXT"; - public static final String USER_AGENT = "User-Agent"; - - @Inject - SessionManager sessionManager; - - public LogsearchSecurityContextFormationFilter() { - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, - * javax.servlet.ServletResponse, javax.servlet.FilterChain) - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, - ServletException { - - try { - Authentication auth = SecurityContextHolder.getContext().getAuthentication(); - - if (auth instanceof AnonymousAuthenticationToken) { - // ignore - } else { - HttpServletRequest httpRequest = (HttpServletRequest) request; - HttpSession httpSession = httpRequest.getSession(false); - Cookie[] cookieList = httpRequest.getCookies(); - String msaCookie = null; - for (int i = 0; cookieList != null && i < cookieList.length; i++) { - if (cookieList[i].getName().equalsIgnoreCase("msa")) { - msaCookie = cookieList[i].getValue(); - } - } - if (msaCookie == null) { - HttpServletResponse httpResponse = (HttpServletResponse) response; - msaCookie = CommonUtil.genGUI(); - Cookie cookie = new Cookie("msa", msaCookie); - // TODO: Need to revisit this - cookie.setMaxAge(Integer.MAX_VALUE); - httpResponse.addCookie(cookie); - } - // [1]get the context from session - LogSearchContext context = (LogSearchContext) httpSession.getAttribute(LOGSEARCH_SC_SESSION_KEY); - if (context == null) { - context = new LogSearchContext(); - httpSession.setAttribute(LOGSEARCH_SC_SESSION_KEY, context); - } - LogSearchContext.setContext(context); - User user = sessionManager.processSuccessLogin(); - context.setUser(user); - } - chain.doFilter(request, response); - - } finally { - // [4]remove context from thread-local - LogSearchContext.resetContext(); - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchTrustedProxyFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchTrustedProxyFilter.java deleted file mode 100644 index 0737ee0e057..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchTrustedProxyFilter.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.dao.RoleDao; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.authentication.AbstractAuthenticationToken; -import org.springframework.security.authentication.AnonymousAuthenticationToken; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.context.SecurityContextHolder; -import org.springframework.security.core.userdetails.User; -import org.springframework.security.core.userdetails.UserDetails; -import org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter; -import org.springframework.security.web.authentication.WebAuthenticationDetails; -import org.springframework.security.web.util.matcher.RequestMatcher; - -import javax.servlet.FilterChain; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; -import java.util.Enumeration; -import java.util.List; - -/** - * Filter servlet to handle trusted proxy authentication. - * It is disabled by default (see: {@link AuthPropsConfig#isTrustedProxy()})
- * There are 4 main configuration properties of this filter (allow authentication only if these are matches with the request details):
- * - {@link AuthPropsConfig#getProxyUsers()} - Proxy users
- * - {@link AuthPropsConfig#getProxyUserGroups()} - Proxy groups
- * - {@link AuthPropsConfig#getProxyUserHosts()} - Proxy hosts
- * - {@link AuthPropsConfig#getProxyIp()} - Proxy server IPs
- */ -public class LogsearchTrustedProxyFilter extends AbstractAuthenticationProcessingFilter { - - private static final Logger LOG = LoggerFactory.getLogger(LogsearchTrustedProxyFilter.class); - - private static final String TRUSTED_PROXY_KNOX_HEADER = "X-Forwarded-For"; - - private AuthPropsConfig authPropsConfig; - - public LogsearchTrustedProxyFilter(RequestMatcher requestMatcher, AuthPropsConfig authPropsConfig) { - super(requestMatcher); - this.authPropsConfig = authPropsConfig; - } - - @Override - public Authentication attemptAuthentication(HttpServletRequest request, HttpServletResponse response) throws AuthenticationException, IOException, ServletException { - String doAsUserName = request.getParameter("doAs"); - final List authorities = RoleDao.createDefaultAuthorities(); - final UserDetails principal = new User(doAsUserName, "", authorities); - final Authentication finalAuthentication = new UsernamePasswordAuthenticationToken(principal, "", authorities); - WebAuthenticationDetails webDetails = new WebAuthenticationDetails(request); - ((AbstractAuthenticationToken) finalAuthentication).setDetails(webDetails); - SecurityContextHolder.getContext().setAuthentication(finalAuthentication); - LOG.info("Logged into Log Search User as doAsUser = {}", doAsUserName); - return finalAuthentication; - } - - @Override - public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { - Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - boolean skip = true; - if (authPropsConfig.isTrustedProxy() && !isAuthenticated(authentication) ) { - String doAsUserName = req.getParameter("doAs"); - String remoteAddr = req.getRemoteAddr(); - if (StringUtils.isNotEmpty(doAsUserName) && isTrustedProxySever(remoteAddr) - && isTrustedHost(getXForwardHeader((HttpServletRequest) req))) { - List grantedAuths = RoleDao.createDefaultAuthorities(); - if (!(isTrustedProxyUser(doAsUserName) || isTrustedProxyUserGroup(grantedAuths))) { - skip = false; - } - } - } - if (skip) { - chain.doFilter(req, res); - return; - } - super.doFilter(req, res, chain); - } - - private boolean isTrustedProxySever(String requestHosts) { - if (authPropsConfig.getProxyIp() == null || requestHosts == null) { - return false; - } - final List proxyServers = authPropsConfig.getProxyIp(); - return (proxyServers.size() == 1 && proxyServers.contains("*")) || authPropsConfig.getProxyIp().contains(requestHosts); - } - - private boolean isTrustedHost(String requestHosts) { - if (requestHosts == null) { - return false; - } - List trustedProxyHosts = authPropsConfig.getProxyUserHosts(); - return (trustedProxyHosts.size() == 1 && trustedProxyHosts.contains("*")) || trustedProxyHosts.contains(requestHosts); - } - - private boolean isTrustedProxyUser(String doAsUser) { - if (doAsUser == null) { - return false; - } - List trustedProxyUsers = authPropsConfig.getProxyUsers(); - return (trustedProxyUsers.size() == 1 && trustedProxyUsers.contains("*")) || trustedProxyUsers.contains(doAsUser); - - } - - private boolean isTrustedProxyUserGroup(List proxyUserGroup) { - if (proxyUserGroup == null) { - return false; - } - List trustedProxyGroups = authPropsConfig.getProxyUserGroups(); - if (trustedProxyGroups.size() == 1 && trustedProxyGroups.contains("*")) { - return true; - } else { - for (GrantedAuthority group : proxyUserGroup) { - if (trustedProxyGroups.contains(group.getAuthority())) { - return true; - } - } - } - return false; - } - - private boolean isAuthenticated(Authentication authentication) { - return authentication != null && !(authentication instanceof AnonymousAuthenticationToken) && authentication.isAuthenticated(); - } - - private String getXForwardHeader(HttpServletRequest httpRequest) { - Enumeration names = httpRequest.getHeaderNames(); - while (names.hasMoreElements()) { - String name = names.nextElement(); - Enumeration values = httpRequest.getHeaders(name); - String value = ""; - if (values != null) { - while (values.hasMoreElements()) { - value = values.nextElement(); - if (StringUtils.isNotBlank(value)) { - break; - } - } - } - if (StringUtils.trimToNull(name) != null - && StringUtils.trimToNull(value) != null) { - if (name.equalsIgnoreCase(TRUSTED_PROXY_KNOX_HEADER)) { - return value; - } - } - } - return ""; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java deleted file mode 100644 index e20c0fa6b69..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import java.io.IOException; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.log4j.Logger; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.web.authentication.RememberMeServices; -import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; - -public class LogsearchUsernamePasswordAuthenticationFilter extends UsernamePasswordAuthenticationFilter { - private static final Logger logger = Logger.getLogger(LogsearchUsernamePasswordAuthenticationFilter.class); - - public void setRememberMeServices(RememberMeServices rememberMeServices) { - super.setRememberMeServices(rememberMeServices); - } - - @Override - protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, - AuthenticationException failed) throws IOException, ServletException { - logger.info("login failed :::::" + failed.getMessage()); - super.unsuccessfulAuthentication(request, response, failed); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/NoServletContext.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/NoServletContext.java deleted file mode 100644 index f1663bc7d2b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/NoServletContext.java +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import java.io.InputStream; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Enumeration; -import java.util.EventListener; -import java.util.Map; -import java.util.Set; - -import javax.servlet.Filter; -import javax.servlet.FilterRegistration; -import javax.servlet.RequestDispatcher; -import javax.servlet.Servlet; -import javax.servlet.ServletContext; -import javax.servlet.ServletException; -import javax.servlet.ServletRegistration; -import javax.servlet.SessionCookieConfig; -import javax.servlet.SessionTrackingMode; -import javax.servlet.FilterRegistration.Dynamic; -import javax.servlet.descriptor.JspConfigDescriptor; - -public class NoServletContext implements ServletContext { - - @Override - public void setSessionTrackingModes( - Set sessionTrackingModes) { - } - - @Override - public boolean setInitParameter(String name, String value) { - return false; - } - - @Override - public void setAttribute(String name, Object object) { - } - - @Override - public void removeAttribute(String name) { - } - - @Override - public void log(String message, Throwable throwable) { - } - - @Override - public void log(Exception exception, String msg) { - } - - @Override - public void log(String msg) { - } - - @Override - public String getVirtualServerName() { - return null; - } - - @Override - public SessionCookieConfig getSessionCookieConfig() { - return null; - } - - @Override - public Enumeration getServlets() { - return null; - } - - @Override - public Map getServletRegistrations() { - return null; - } - - @Override - public ServletRegistration getServletRegistration(String servletName) { - return null; - } - - @Override - public Enumeration getServletNames() { - return null; - } - - @Override - public String getServletContextName() { - return null; - } - - @Override - public Servlet getServlet(String name) throws ServletException { - return null; - } - - @Override - public String getServerInfo() { - return null; - } - - @Override - public Set getResourcePaths(String path) { - return null; - } - - @Override - public InputStream getResourceAsStream(String path) { - return null; - } - - @Override - public URL getResource(String path) throws MalformedURLException { - return null; - } - - @Override - public RequestDispatcher getRequestDispatcher(String path) { - return null; - } - - @Override - public String getRealPath(String path) { - return null; - } - - @Override - public RequestDispatcher getNamedDispatcher(String name) { - return null; - } - - @Override - public int getMinorVersion() { - return 0; - } - - @Override - public String getMimeType(String file) { - return null; - } - - @Override - public int getMajorVersion() { - return 0; - } - - @Override - public JspConfigDescriptor getJspConfigDescriptor() { - return null; - } - - @Override - public Enumeration getInitParameterNames() { - return null; - } - - @Override - public String getInitParameter(String name) { - return null; - } - - @Override - public Map getFilterRegistrations() { - return null; - } - - @Override - public FilterRegistration getFilterRegistration(String filterName) { - return null; - } - - @Override - public Set getEffectiveSessionTrackingModes() { - return null; - } - - @Override - public int getEffectiveMinorVersion() { - return 0; - } - - @Override - public int getEffectiveMajorVersion() { - return 0; - } - - @Override - public Set getDefaultSessionTrackingModes() { - return null; - } - - @Override - public String getContextPath() { - return null; - } - - @Override - public ServletContext getContext(String uripath) { - return null; - } - - @Override - public ClassLoader getClassLoader() { - return null; - } - - @Override - public Enumeration getAttributeNames() { - return null; - } - - @Override - public Object getAttribute(String name) { - return null; - } - - @Override - public void declareRoles(String... roleNames) { - } - - @Override - public T createServlet(Class clazz) - throws ServletException { - return null; - } - - @Override - public T createListener(Class clazz) - throws ServletException { - return null; - } - - @Override - public T createFilter(Class clazz) - throws ServletException { - return null; - } - - @Override - public javax.servlet.ServletRegistration.Dynamic addServlet( - String servletName, Class servletClass) { - return null; - } - - @Override - public javax.servlet.ServletRegistration.Dynamic addServlet( - String servletName, Servlet servlet) { - return null; - } - - @Override - public javax.servlet.ServletRegistration.Dynamic addServlet( - String servletName, String className) { - return null; - } - - @Override - public void addListener(Class listenerClass) { - } - - @Override - public void addListener(T t) { - } - - @Override - public void addListener(String className) { - } - - @Override - public Dynamic addFilter(String filterName, - Class filterClass) { - return null; - } - - @Override - public Dynamic addFilter(String filterName, Filter filter) { - return null; - } - - @Override - public Dynamic addFilter(String filterName, String className) { - return null; - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/StatusProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/StatusProvider.java deleted file mode 100644 index d36a1f070c7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/StatusProvider.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.filters; - -import org.apache.ambari.logsearch.common.StatusMessage; - -public interface StatusProvider { - StatusMessage getStatusMessage(String requestUri); -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/listener/LogSearchSessionListener.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/listener/LogSearchSessionListener.java deleted file mode 100644 index 55101dbf0c9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/listener/LogSearchSessionListener.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.listener; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.servlet.http.HttpSessionEvent; -import javax.servlet.http.HttpSessionListener; - -public class LogSearchSessionListener implements HttpSessionListener { - - private Logger LOG = LoggerFactory.getLogger(LogSearchSessionListener.class); - - private int numberOfSessions = 0; - - @Override - public void sessionCreated(HttpSessionEvent event) { - synchronized (this) { - numberOfSessions++; - } - LOG.info(String.format("New session is created (Id: %s). Number of sessions: %d", event.getSession().getId(), numberOfSessions)); - } - - @Override - public void sessionDestroyed(HttpSessionEvent event) { - synchronized (this) { - numberOfSessions--; - } - LOG.info(String.format("Session destroyed (Id: %s). Number of sessions: %d", event.getSession().getId(), numberOfSessions)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Privilege.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Privilege.java deleted file mode 100644 index 949a1abce07..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Privilege.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.model; - -public class Privilege { - - private String name; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("Privilege [name="); - builder.append(name); - builder.append("]"); - return builder.toString(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Role.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Role.java deleted file mode 100644 index 9b084241aff..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/Role.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.model; - -import java.util.List; - -import org.springframework.security.core.GrantedAuthority; - -public class Role implements GrantedAuthority { - - private static final long serialVersionUID = 1L; - private String name; - - private List privileges; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - @Override - public String getAuthority() { - return this.name; - } - - public List getPrivileges() { - return privileges; - } - - public void setPrivileges(List privileges) { - this.privileges = privileges; - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("Role [name="); - builder.append(name); - builder.append(", privileges="); - builder.append(privileges); - builder.append("]"); - return builder.toString(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/User.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/User.java deleted file mode 100644 index 2bd0ed27e47..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/model/User.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.model; - -import java.util.Collection; -import java.util.List; - -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.userdetails.UserDetails; - -public class User implements UserDetails { - private static final long serialVersionUID = 1L; - - private String username; - private String password; - private String email; - private String firstName; - private String lastName; - - /* Spring Security fields*/ - private List authorities; - private boolean accountNonExpired = true; - private boolean accountNonLocked = true; - private boolean credentialsNonExpired = true; - private boolean enabled = true; - - public User(String userName2, String userPassword, List grantedAuths) { - this.username = userName2; - this.password = userPassword; - this.authorities = grantedAuths; - - } - - public User() { - // TODO Auto-generated constructor stub - } - - @Override - public String getUsername() { - return username; - } - - public void setUsername(String username) { - this.username = username; - } - - @Override - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getFirstName() { - return firstName; - } - - public void setFirstName(String firstName) { - this.firstName = firstName; - } - - public String getLastName() { - return lastName; - } - - public void setLastName(String lastName) { - this.lastName = lastName; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - @Override - public Collection getAuthorities() { - return this.authorities; - } - - public void setAuthorities(List authorities) { - this.authorities = authorities; - } - - - @Override - public boolean isAccountNonExpired() { - return this.accountNonExpired; - } - - public void setAccountNonExpired(boolean accountNonExpired) { - this.accountNonExpired = accountNonExpired; - } - - @Override - public boolean isAccountNonLocked() { - return this.accountNonLocked; - } - - public void setAccountNonLocked(boolean accountNonLocked) { - this.accountNonLocked = accountNonLocked; - } - - @Override - public boolean isCredentialsNonExpired() { - return this.credentialsNonExpired; - } - - public void setCredentialsNonExpired(boolean credentialsNonExpired) { - this.credentialsNonExpired = credentialsNonExpired; - } - - @Override - public boolean isEnabled() { - return this.enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("User [username="); - builder.append(username); - builder.append(", email="); - builder.append(email); - builder.append(", firstName="); - builder.append(firstName); - builder.append(", lastName="); - builder.append(lastName); - builder.append(", authorities="); - builder.append(authorities); - builder.append(", accountNonExpired="); - builder.append(accountNonExpired); - builder.append(", accountNonLocked="); - builder.append(accountNonLocked); - builder.append(", credentialsNonExpired="); - builder.append(credentialsNonExpired); - builder.append(", enabled="); - builder.append(enabled); - builder.append("]"); - return builder.toString(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java deleted file mode 100644 index 43854f1145c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.security.authentication.AuthenticationProvider; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; - -abstract class LogsearchAbstractAuthenticationProvider implements AuthenticationProvider { - - protected enum AuthMethod { - FILE, EXTERNAL_AUTH, SIMPLE, LDAP - }; - - @Override - public boolean supports(Class authentication) { - return UsernamePasswordAuthenticationToken.class.isAssignableFrom(authentication); - } - - /** - * GET Default GrantedAuthority - */ - protected List getAuthorities() { - List grantedAuths = new ArrayList<>(); - grantedAuths.add(new SimpleGrantedAuthority("ROLE_USER")); - return grantedAuths; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java deleted file mode 100644 index cfa948dbddf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import java.util.HashMap; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.util.JSONUtil; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.ldap.authentication.LdapAuthenticationProvider; -import org.springframework.security.web.authentication.WebAuthenticationDetails; - -@Named -public class LogsearchAuthenticationProvider extends LogsearchAbstractAuthenticationProvider { - private static final Logger logger = Logger .getLogger(LogsearchAuthenticationProvider.class); - private static final Logger auditLogger = Logger.getLogger("org.apache.ambari.logsearch.audit"); - - @Inject - private LogsearchFileAuthenticationProvider fileAuthenticationProvider; - - @Inject - private LogsearchExternalServerAuthenticationProvider externalServerAuthenticationProvider; - - @Inject - private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider; - - @Inject - private LogsearchLdapAuthenticationProvider ldapAuthenticationProvider; - - @Override - public Authentication authenticate(Authentication inAuthentication) throws AuthenticationException { - logger.info("Authenticating user:" + inAuthentication.getName() + ", userDetail=" + inAuthentication.toString()); - logger.info("authentication.class=" + inAuthentication.getClass().getName()); - - HashMap auditRecord = new HashMap<>(); - auditRecord.put("user", inAuthentication.getName()); - auditRecord.put("principal", inAuthentication.getPrincipal().toString()); - auditRecord.put("auth_class", inAuthentication.getClass().getName()); - if (inAuthentication instanceof UsernamePasswordAuthenticationToken) { - UsernamePasswordAuthenticationToken authClass = (UsernamePasswordAuthenticationToken) inAuthentication; - Object details = authClass.getDetails(); - if (details instanceof WebAuthenticationDetails) { - WebAuthenticationDetails webAuthentication = (WebAuthenticationDetails) details; - auditRecord.put("remote_ip", webAuthentication.getRemoteAddress()); - auditRecord.put("session", webAuthentication.getSessionId()); - } - } - - boolean isSuccess = false; - try { - Authentication authentication = inAuthentication; - AuthenticationException authException = null; - - for (AuthMethod authMethod : AuthMethod.values()) { - try { - authentication = doAuth(authentication, authMethod); - if (authentication != null && authentication.isAuthenticated()) { - logger.info("Authenticated using method=" + authMethod.name() + ", user=" + authentication.getName()); - auditRecord.put("result", "allowed"); - isSuccess = true; - auditRecord.put("authType", authMethod.name()); - return authentication; - } - } catch (AuthenticationException ex) { - if (authException == null) { - authException = ex; - } - } catch (Exception e) { - logger.error(e, e.getCause()); - } - } - - auditRecord.put("result", "denied"); - logger.warn("Authentication failed for user=" + inAuthentication.getName() + ", userDetail=" + inAuthentication.toString()); - if (authException != null) { - auditRecord.put("reason", authException.getMessage()); - throw authException; - } - return authentication; - } finally { - try { - String jsonStr = JSONUtil.toJson(auditRecord); - auditLogger.log(isSuccess ? Level.INFO : Level.WARN, jsonStr); - } - catch (Exception e) { - logger.error("Unable to add audit log entry", e); - } - } - } - - private Authentication doAuth(Authentication authentication, AuthMethod authMethod) { - switch (authMethod) { - case FILE: return fileAuthenticationProvider.authenticate(authentication); - case LDAP: return ldapAuthenticationProvider.authenticate(authentication); - case EXTERNAL_AUTH: return externalServerAuthenticationProvider.authenticate(authentication); - case SIMPLE: return simpleAuthenticationProvider.authenticate(authentication); - default: logger.error("Invalid authentication method :" + authMethod.name()); - } - return authentication; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java deleted file mode 100644 index 4449da1b47d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import java.util.ArrayList; -import java.util.List; - -import javax.inject.Inject; -import javax.inject.Named; - -import org.apache.ambari.logsearch.common.ExternalServerClient; -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.util.JSONUtil; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; - -/** - * - * Authentication provider to authenticate user from external-server using REST - * call - */ -@Named -public class LogsearchExternalServerAuthenticationProvider extends LogsearchAbstractAuthenticationProvider { - - private static Logger LOG = Logger.getLogger(LogsearchExternalServerAuthenticationProvider.class); - - private static enum PrivilegeInfo { - PERMISSION_LABEL("permission_label"), - PERMISSION_NAME("permission_name"), - PRINCIPAL_NAME("principal_name"), - PRINCIPAL_TYPE("principal_type"), - PRIVILEGE_ID("privilege_id"), - TYPE("type"), - USER_NAME("user_name"); - - private String propertyKey; - - private PrivilegeInfo(String name) { - this.propertyKey = name; - } - - public String toString() { - return propertyKey; - } - } - - @Inject - private ExternalServerClient externalServerClient; - - @Inject - private AuthPropsConfig authPropsConfig; - - /** - * Authenticating user from external-server using REST call - * - * @param authentication the authentication request object. - * @return a fully authenticated object including credentials. - * @throws AuthenticationException if authentication fails. - */ - @Override - public Authentication authenticate(Authentication authentication) throws AuthenticationException { - if (!authPropsConfig.isAuthExternalEnabled()) { - LOG.debug("external server auth is disabled."); - return authentication; - } - - String username = authentication.getName(); - String password = (String) authentication.getCredentials(); - if (StringUtils.isBlank(username)) { - throw new BadCredentialsException("Username can't be null or empty."); - } - if (StringUtils.isBlank(password)) { - throw new BadCredentialsException("Password can't be null or empty."); - } - password = StringEscapeUtils.unescapeHtml(password); - username = StringEscapeUtils.unescapeHtml(username); - - try { - String finalLoginUrl = authPropsConfig.getExternalAuthLoginUrl().replace("$USERNAME", username); - String responseObj = (String) externalServerClient.sendGETRequest(finalLoginUrl, String.class, username, password); - if (!isAllowedRole(responseObj)) { - LOG.error(username + " doesn't have permission"); - throw new BadCredentialsException("Invalid User"); - } - } catch (Exception e) { - LOG.error("Login failed for username :" + username + " Error :" + e.getLocalizedMessage()); - throw new BadCredentialsException("Bad credentials"); - } - authentication = new UsernamePasswordAuthenticationToken(username, password, getAuthorities()); - return authentication; - } - - /** - * Return true/false based on PEMISSION NAME return boolean - */ - private boolean isAllowedRole(String responseJson) { - - List permissionNames = new ArrayList<>(); - JSONUtil.getValuesOfKey(responseJson, PrivilegeInfo.PERMISSION_NAME.toString(), permissionNames); - List allowedRoleList = authPropsConfig.getAllowedRoles(); - if (permissionNames.isEmpty() || allowedRoleList.size() < 1 || responseJson == null) { - return false; - } - return permissionNames.stream().anyMatch(allowedRoleList::contains); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java deleted file mode 100644 index 8c12e0a38e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import java.util.Collection; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.util.CommonUtil; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.userdetails.UserDetails; -import org.springframework.security.core.userdetails.UserDetailsService; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthenticationProvider { - - private static final Logger logger = Logger.getLogger(LogsearchFileAuthenticationProvider.class); - - @Inject - private AuthPropsConfig authPropsConfig; - - @Inject - private UserDetailsService userDetailsService; - - @Override - public Authentication authenticate(Authentication authentication) throws AuthenticationException { - if (!authPropsConfig.isAuthFileEnabled()) { - logger.debug("File auth is disabled."); - return authentication; - } - - String username = authentication.getName(); - String password = (String) authentication.getCredentials(); - if (StringUtils.isBlank(username)) { - throw new BadCredentialsException("Username can't be null or empty."); - } - if (StringUtils.isBlank(password)) { - throw new BadCredentialsException("Password can't be null or empty."); - } - // html unescape - password = StringEscapeUtils.unescapeHtml(password); - username = StringEscapeUtils.unescapeHtml(username); - - UserDetails user = userDetailsService.loadUserByUsername(username); - if (user == null) { - logger.error("Username not found."); - throw new BadCredentialsException("User not found."); - } - if (StringUtils.isEmpty(user.getPassword())) { - logger.error("Password can't be null or empty."); - throw new BadCredentialsException("Password can't be null or empty."); - } - String encPassword = CommonUtil.encryptPassword(username, password); - if (!encPassword.equals(user.getPassword())) { - logger.error("Wrong password for user=" + username); - throw new BadCredentialsException("Wrong password."); - } - - Collection authorities = user.getAuthorities(); - authentication = new UsernamePasswordAuthenticationToken(username, encPassword, authorities); - return authentication; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java deleted file mode 100644 index 5cf81db8c5d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.dao.RoleDao; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.ldap.authentication.LdapAuthenticationProvider; -import org.springframework.security.ldap.authentication.LdapAuthenticator; -import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; - -import javax.inject.Inject; -import java.util.Collection; - -public class LogsearchLdapAuthenticationProvider extends LdapAuthenticationProvider { - - private static final Logger LOG = LoggerFactory.getLogger(LogsearchLdapAuthenticationProvider.class); - - @Inject - private AuthPropsConfig authPropsConfig; - - public LogsearchLdapAuthenticationProvider(LdapAuthenticator bindAuthenticator, LdapAuthoritiesPopulator ldapAuthoritiesPopulator) { - super(bindAuthenticator, ldapAuthoritiesPopulator); - } - - @Override - public Authentication authenticate(Authentication authentication) throws AuthenticationException { - if (!authPropsConfig.isAuthLdapEnabled()) { - LOG.debug("LDAP auth is disabled."); - return authentication; - } - authentication = super.authenticate(authentication); - final Collection authorities; - if (StringUtils.isBlank(authPropsConfig.getLdapAuthConfig().getLdapGroupSearchBase())) { - authorities = RoleDao.createDefaultAuthorities(); - } else { - authorities = authentication.getAuthorities(); - } - - authentication = new UsernamePasswordAuthenticationToken(authentication.getPrincipal(), authentication.getCredentials(), authorities); - return authentication; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java deleted file mode 100644 index 3506264031b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.web.model.User; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; - -import javax.inject.Inject; -import javax.inject.Named; - -@Named -public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuthenticationProvider { - - private static final Logger logger = Logger.getLogger(LogsearchSimpleAuthenticationProvider.class); - - @Inject - private AuthPropsConfig authPropsConfig; - - @Override - public Authentication authenticate(Authentication authentication) throws AuthenticationException { - if (!authPropsConfig.isAuthSimpleEnabled()) { - logger.debug("Simple auth is disabled"); - return authentication; - } - - String username = authentication.getName(); - String password = (String) authentication.getCredentials(); - username = StringEscapeUtils.unescapeHtml(username); - if (StringUtils.isBlank(username)) { - throw new BadCredentialsException("Username can't be null or empty."); - } - - User user = new User(); - user.setUsername(username); - authentication = new UsernamePasswordAuthenticationToken(username, password, getAuthorities()); - return authentication; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/default.properties b/ambari-logsearch/ambari-logsearch-server/src/main/resources/default.properties deleted file mode 100644 index 38c812fac5e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/default.properties +++ /dev/null @@ -1,49 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -#Note: Simple will be supported only if both file and ldap is disabled. -logsearch.auth.file.enable=true -logsearch.auth.ldap.enable=false -logsearch.auth.simple.enable=false - -#login config -logsearch.login.credentials.file=user_pass.json - -logsearch.cert.folder.location=/usr/lib/ambari-logsearch-portal/conf/keys -logsearch.cert.algorithm=sha256WithRSAEncryption - -management.security.enabled=false - -endpoints.auditevents.enabled=false -endpoints.configprops.enabled=false -endpoints.env.enabled=false -endpoints.dump.enabled=false -endpoints.heapdump.enabled=false -endpoints.metrics.enabled=true -endpoints.health.enabled=true -endpoints.health.path=/ping - -endpoints.actuator.enabled=false -endpoints.autoconfig.enabled=false -endpoints.beans.enabled=false -endpoints.flyway.enabled=false -endpoints.info.enabled=false -endpoints.liquibase.enabled=false -endpoints.mappings.enabled=false -endpoints.shutdown.enabled=false -endpoints.trace.enabled=false - -management.health.solr.enabled=false diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/info.properties b/ambari-logsearch/ambari-logsearch-server/src/main/resources/info.properties deleted file mode 100644 index 03d9c91cb97..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/info.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logsearch.app.version=1.0.0-${project.version} -logsearch.solr.version=${solr.version} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-server/src/main/resources/log4j.xml deleted file mode 100644 index 40868fa2663..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/log4j.xml +++ /dev/null @@ -1,86 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/logsearch.properties b/ambari-logsearch/ambari-logsearch-server/src/main/resources/logsearch.properties deleted file mode 100755 index bea61729bbe..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/logsearch.properties +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -logsearch.solr.zk_connect_string=localhost:2181 -logsearch.solr.collection.service.logs=hadoop_logs -logsearch.service.logs.split.interval.mins=15 -logsearch.collection.service.logs.numshards=3 -logsearch.collection.service.logs.replication.factor=2 -logsearch.solr.audit.logs.zk_connect_string=localhost:2181 -logsearch.solr.collection.audit.logs=audit_logs -logsearch.audit.logs.split.interval.mins=15 -logsearch.collection.audit.logs.numshards=2 -logsearch.collection.audit.logs.replication.factor=2 -logsearch.solr.config_set.folder=${LOGSEARCH_SERVER_RELATIVE_LOCATION:}src/main/configsets -logsearch.solr.audit.logs.config_set.folder=${LOGSEARCH_SERVER_RELATIVE_LOCATION:}src/main/configsets -logsearch.solr.collection.history=history -logsearch.solr.history.config.name=history -logsearch.collection.history.replication.factor=1 -logsearch.auth.file.enabled=true -logsearch.login.credentials.file=user_pass.json - -logsearch.auth.ldap.enabled=false -logsearch.auth.simple.enabled=false -logsearch.auth.external_auth.enabled=false - -logsearch.protocol=http -logsearch.config.zk_connect_string=localhost:2181 \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/roles.json b/ambari-logsearch/ambari-logsearch-server/src/main/resources/roles.json deleted file mode 100644 index f280b4ea8c7..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/roles.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "roles": { - "admin" : ["user", "admin"] - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/swagger/swagger.html b/ambari-logsearch/ambari-logsearch-server/src/main/resources/swagger/swagger.html deleted file mode 100644 index 50c2f661249..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/swagger/swagger.html +++ /dev/null @@ -1,136 +0,0 @@ - - - - - - Log Search REST API - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

- -
 
-
- - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/audit_log_txt.ftl b/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/audit_log_txt.ftl deleted file mode 100644 index 587e366b678..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/audit_log_txt.ftl +++ /dev/null @@ -1,42 +0,0 @@ -<#-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> ---------------------------------SUMMARY----------------------------------- -Users = ${usersSummary} -Resources = ${resourcesSummary} - - - - -Users Components/Access --------------------------------------------------------------------------- -<#if users??> - <#list users as user> -${user.data} - - - - - - - -Resources Components/Access --------------------------------------------------------------------------- -<#if resources??> - <#list resources as resource> -${resource.data} - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/service_log_txt.ftl b/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/service_log_txt.ftl deleted file mode 100644 index 8a5e19d16a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/templates/service_log_txt.ftl +++ /dev/null @@ -1,36 +0,0 @@ -<#-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> -**********************Summary********************** -Number of Logs : ${numberOfLogs} -From : ${from} -To : ${to} -Host : ${hosts} -Component : ${components} -Levels : ${levels} -Format : ${format} - -Included String: [${iString}] - -Excluded String: [${eString}] - -************************Logs*********************** -2016-09-26 11:49:19,723 WARN MainThread lock.py:60 - Releasing the lock. -<#if logs??> - <#list logs as log> -${log.data} - - diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/resources/user_pass.json b/ambari-logsearch/ambari-logsearch-server/src/main/resources/user_pass.json deleted file mode 100644 index 97a7f4571bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/resources/user_pass.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "users": [{ - "name": "Logsearch Admin", - "username": "admin", - "password": "admin", - "en_password": "" - }] -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch-env.sh b/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch-env.sh deleted file mode 100755 index d8a837fa1a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch-env.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Log Search extra options -export LOGSEARCH_JAVA_OPTS=${LOGSEARCH_JAVA_OPTS:-""} - -# Log Search debug options -# export LOGSEARCH_DEBUG=true -# exoprt LOGSEARCH_DEBUG_SUSPEND=n -export LOGSEARCH_DEBUG_PORT=5005 - -# Log Search memory -# export LOGSEARCH_JAVA_MEM="--Xmx1024m" - -# export LOG_PATH=/var/log/ambari-logsearch-logfeeder/ -# export LOG_FILE=logsearch.log - -# Pid file of the application -# export LOGSEARCH_PID_DIR=/var/run/ambari-logsearch-logfeeder -# export LOGSEARCH_PID_FILE=logfeeder.pid - -# SSL settings" -# export LOGSEARCH_SSL="true" -# export LOGSEARCH_KEYSTORE_LOCATION="/my/path/keystore.jks" -# export LOGSEARCH_KEYSTORE_TYPE="jks" -# export LOGSEARCH_TRUSTSTORE_LOCATION="/my/path/trutstore.jks" -# export LOGSEARCH_TRUSTSTORE_TYPE="jks" \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch.sh b/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch.sh deleted file mode 100755 index a51775d3085..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/main/scripts/logsearch.sh +++ /dev/null @@ -1,278 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "$LOGSEARCH_JAVA_MEM" = "" ]; then - LOGSEARCH_JAVA_MEM="-Xmx1g" -fi - -readlinkf(){ - # get real path on mac OSX - perl -MCwd -e 'print Cwd::abs_path shift' "$1"; -} - -if [ "$(uname -s)" = 'Linux' ]; then - SCRIPT_DIR="`dirname "$(readlink -f "$0")"`" -else - SCRIPT_DIR="`dirname "$(readlinkf "$0")"`" -fi - -LOGSEARCH_ROOT_DIR="`dirname \"$SCRIPT_DIR\"`" -LOGSEARCH_LIBS_DIR="$LOGSEARCH_ROOT_DIR/libs" -LOGSEARCH_WEBAPP_DIR="$LOGSEARCH_ROOT_DIR/webapp" - -if [ "$LOGSEARCH_CONF_DIR" = "" ]; then - if [ -d "$LOGSEARCH_ROOT_DIR/conf" ]; then - LOGSEARCH_CONF_DIR="$LOGSEARCH_ROOT_DIR/conf" - fi -fi - -if [ -f "$LOGSEARCH_CONF_DIR/logsearch-env.sh" ]; then - source $LOGSEARCH_CONF_DIR/logsearch-env.sh -fi - -JVM="java" - -if [ -x $JAVA_HOME/bin/java ]; then - JVM=$JAVA_HOME/bin/java -fi - -if [ ! -z "$LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE" ]; then - source $LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE -fi - -if [ -z "$LOGSEARCH_PID_FILE" ]; then - LOGSEARCH_DEFAULT_PID_DIR="/var/run/ambari-logsearch-portal" - if [ -d "$LOGSEARCH_DEFAULT_PID_DIR" ]; then - LOGSEARCH_PID_DIR=$LOGSEARCH_DEFAULT_PID_DIR - else - LOGSEARCH_PID_DIR=$HOME - fi - export LOGSEARCH_PID_FILE=$LOGSEARCH_PID_DIR/logsearch.pid -fi - -if [ -z "$LOG_FILE" ]; then - export LOG_FILE="logsearch.log" -fi - -LOGSEARCH_GC_LOGFILE="logsearch_gc.log" - -if [ -z "$LOG_PATH" ]; then - LOG_FILE="$HOME/$LOG_FILE" - LOGSEARCH_GC_LOGFILE="$HOME/$LOGSEARCH_GC_LOGFILE" -else - LOG_PATH_WITHOUT_SLASH=${LOG_PATH%/} - LOG_FILE="$LOG_PATH_WITHOUT_SLASH/$LOG_FILE" - LOGSEARCH_GC_LOGFILE="$LOG_PATH_WITHOUT_SLASH/$LOGSEARCH_GC_LOGFILE" -fi - -LOGSEARCH_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$LOGSEARCH_GC_LOGFILE" - -function print_usage() { - cat << EOF - - Usage: [] [] - - commands: - start Start Log Search - stop Stop Log Search - status Check Log Search status (pid file) - help Print usage - - - start command arguments: - -d, --debug Start java process in debug mode - -f, --foreground Start java process in foreground - -EOF -} - -function spinner() { - local pid=$1 - local delay=0.5 - local spinstr='|/-\' - while [ "$(ps aux | awk '{print $2}' | grep -w $pid)" ]; do - local temp=${spinstr#?} - printf " [%c] " "$spinstr" - local spinstr=$temp${spinstr%"$temp"} - sleep $delay - printf "\b\b\b\b\b\b" - done - printf " \b\b\b\b" -} - -function status() { - echo "Checking Log Search status ..." >&2 - if [ -f "$LOGSEARCH_PID_FILE" ]; then - LOGSEARCH_PID=`cat "$LOGSEARCH_PID_FILE"` - else - echo "Log Search pid not exists. (probably the process is not running)" >&2 - return 1 - fi - - if ps -p $LOGSEARCH_PID > /dev/null - then - echo "Log Search process is running. (pid: $LOGSEARCH_PID)" >&2 - return 0 - else - echo "Log Search process is not running." >&2 - return 1 - fi -} - -function start() { - exit_status=$(status; echo $?) - if [ "$exit_status" = "0" ]; then - echo "Skipping start process." - exit 0 - fi - - FG="false" - LOGSEARCH_DEBUG_SUSPEND=${LOGSEARCH_DEBUG_SUSPEND:-n} - LOGSEARCH_DEBUG_PORT=${LOGSEARCH_DEBUG_PORT:-"5005"} - - if [ "$LOGSEARCH_DEBUG" = "true" ]; then - LOGSEARCH_JAVA_OPTS="$LOGSEARCH_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGSEARCH_DEBUG_PORT,server=y,suspend=$LOGSEARCH_DEBUG_SUSPEND " - fi - - if [ "$LOGSEARCH_SSL" = "true" ]; then - LOGSEARCH_JAVA_OPTS="$LOGSEARCH_JAVA_OPTS -Djavax.net.ssl.keyStore=$LOGSEARCH_KEYSTORE_LOCATION -Djavax.net.ssl.keyStoreType=$LOGSEARCH_KEYSTORE_TYPE -Djavax.net.ssl.trustStore=$LOGSEARCH_TRUSTSTORE_LOCATION -Djavax.net.ssl.trustStoreType=$LOGSEARCH_TRUSTSTORE_TYPE" - fi - - if [ "$LOGSEARCH_JMX" = "true" ]; then - LOGSEARCH_JAVA_OPTS="$LOGSEARCH_JAVA_OPTS -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=2099" - fi - - if [ $# -gt 0 ]; then - while true; do - case "$1" in - -f|--foreground) - FG="true" - shift - ;; - -d|--debug) - if [ "$LOGSEARCH_DEBUG" != "true" ]; then - LOGSEARCH_JAVA_OPTS="$LOGSEARCH_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGSEARCH_DEBUG_PORT,server=y,suspend=$LOGSEARCH_DEBUG_SUSPEND " - fi - shift - ;; - *) - if [ "${1:0:2}" == "-D" ]; then - # pass thru any opts that begin with -D (java system props) - LOGSEARCH_JAVA_OPTS+=("$1") - echo "$LOGSEARCH_JAVA_OPTS" - shift - else - if [ "$1" != "" ]; then - print_usage - exit 1 - else - break - fi - fi - ;; - esac - done - fi - - if [ $FG == "true" ]; then - echo "Starting Log Search... (foreground) pid_file=$LOGSEARCH_PID_FILE" - echo "Run command $JVM -cp '$LOGSEARCH_CONF_DIR:$LOGSEARCH_WEBAPP_DIR:$LOGSEARCH_LIBS_DIR/*' $LOGSEARCH_GC_OPTS $LOGSEARCH_JAVA_OPTS $LOGSEARCH_JAVA_MEM org.apache.ambari.logsearch.LogSearch" - $JVM -cp "$LOGSEARCH_CONF_DIR:$LOGSEARCH_WEBAPP_DIR:$LOGSEARCH_LIBS_DIR/*" $LOGSEARCH_GC_OPTS $LOGSEARCH_JAVA_OPTS $LOGSEARCH_JAVA_MEM org.apache.ambari.logsearch.LogSearch - else - echo "Starting Log Search... Output file=$LOG_FILE pid_file=$LOGSEARCH_PID_FILE" - echo "Run command nohup $JVM -cp '$LOGSEARCH_CONF_DIR:$LOGSEARCH_WEBAPP_DIR:$LOGSEARCH_LIBS_DIR/*' $LOGSEARCH_GC_OPTS $LOGSEARCH_JAVA_OPTS $LOGSEARCH_JAVA_MEM org.apache.ambari.logsearch.LogSearch" - nohup $JVM -cp "$LOGSEARCH_CONF_DIR:$LOGSEARCH_WEBAPP_DIR:$LOGSEARCH_LIBS_DIR/*" $LOGSEARCH_GC_OPTS $LOGSEARCH_JAVA_OPTS $LOGSEARCH_JAVA_MEM org.apache.ambari.logsearch.LogSearch > $LOG_FILE 2>&1 & - fi -} - -function stop() { - LOGSEARCH_STOP_WAIT=3 - if [ -f "$LOGSEARCH_PID_FILE" ]; then - LOGSEARCH_PID=`cat "$LOGSEARCH_PID_FILE"` - fi - - if [ "$LOGSEARCH_PID" != "" ]; then - echo -e "Sending stop command to Log Search... Checking PID: $LOGSEARCH_PID." - kill $LOGSEARCH_PID - (loops=0 - while true - do - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGSEARCH_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - slept=$((loops * 2)) - if [ $slept -lt $LOGSEARCH_STOP_WAIT ]; then - sleep 2 - loops=$[$loops+1] - else - exit # subshell! - fi - else - exit # subshell! - fi - done) & - spinner $! - rm -f "$LOGSEARCH_PID_FILE" - else - echo -e "No Log Search process found to stop." - exit 0 - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGSEARCH_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo -e "Log Search process $LOGSEARCH_PID is still running; forcefully killing it now." - kill -9 $LOGSEARCH_PID - echo "Killed process $LOGSEARCH_PID" - rm -f "$LOGSEARCH_PID_FILE" - sleep 1 - else - echo "Log Search is stopped." - fi - - CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $LOGSEARCH_PID | sort -r | tr -d ' '` - if [ "$CHECK_PID" != "" ]; then - echo "ERROR: Failed to kill Log Search Java process $LOGSEARCH_PID ... script fails." - exit 1 - fi -} - -if [ $# -gt 0 ]; then - SCRIPT_CMD="$1" - shift -else - print_usage - exit 1 -fi - -case $SCRIPT_CMD in - start) - start ${1+"$@"} - ;; - stop) - stop - ;; - status) - status - ;; - help) - print_usage - exit 0 - ;; - *) - print_usage - exit 1 - ;; - -esac \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LabelFallbackHandlerTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LabelFallbackHandlerTest.java deleted file mode 100644 index 981a537627b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LabelFallbackHandlerTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.apache.ambari.logsearch.conf.UIMappingConfig; -import org.junit.Before; -import org.junit.Test; - -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class LabelFallbackHandlerTest { - - private LabelFallbackHandler underTest; - - @Before - public void setUp() { - final UIMappingConfig uiMappingConfig = new UIMappingConfig(); - uiMappingConfig.setLabelFallbackEnabled(true); - underTest = new LabelFallbackHandler(uiMappingConfig); - } - - @Test - public void testFallbackIgnore() { - // GIVEN - String testInput = "my_field"; - // WHEN - String result = underTest.fallbackIfRequired(testInput, "spec label", true, false, true, null, null); - // THEN - assertEquals("spec label", result); - } - - @Test - public void testFallbackUnderscore() { - // GIVEN - String testInput = "my_field"; - // WHEN - String result = underTest.fallback(testInput, true, false, true); - // THEN - assertEquals("My Field", result); - } - - @Test - public void testFallbackUnderscoreWithNull() { - // GIVEN - // WHEN - String result = underTest.fallback(null, true, false, true); - // THEN - assertNull(result); - } - - @Test - public void testFallbackCamelCase() { - // GIVEN - String testInput = "myField"; - // WHEN - String result = underTest.fallback(testInput, false, true, true); - // THEN - assertEquals("My Field", result); - } - - @Test - public void testFallbackCamelCaseWithEmptyString() { - // GIVEN - String testInput = ""; - // WHEN - String result = underTest.fallback(testInput, true, true, true); - // THEN - assertNull(result); - } - - @Test - public void testFallbackCamelCaseWithNull() { - // GIVEN - // WHEN - String result = underTest.fallback(null, true, true, true); - // THEN - assertNull(result); - } - - @Test - public void testFallbackCamelCaseWith1Letter() { - // GIVEN - String testInput = "d"; - // WHEN - String result = underTest.fallback(testInput, true, true, true); - // THEN - assertEquals("D", result); - } - - @Test - public void testFallbackWithRemovingPrefixes() { - // GIVEN - String testInput1 = "ws_request_id"; - String testInput2 = "std_request_username"; - // WHEN - String result1 = underTest.fallback(testInput1, true, true, true, Arrays.asList("ws_", "std_"), null); - String result2 = underTest.fallback(testInput2, true, true, true, Arrays.asList("ws_", "std_"), null); - // THEN - assertEquals("Request Id", result1); - assertEquals("Request Username", result2); - } - - @Test - public void testFallbackWithRemovingSuffixes() { - // GIVEN - String testInput1 = "request_id_i"; - String testInput2 = "request_username_s"; - // WHEN - String result1 = underTest.fallback(testInput1, true, true, true, null, Arrays.asList("_i", "_s")); - String result2 = underTest.fallback(testInput2, true, true, true, null, Arrays.asList("_i", "_s")); - // THEN - assertEquals("Request Id", result1); - assertEquals("Request Username", result2); - } - - @Test - public void testFallbackWithRemovingPrefixesWithoutAnyPrefix() { - // GIVEN - String testInput = "request_id"; - // WHEN - String result = underTest.fallback(testInput, true, true, true, Arrays.asList("ws_", "std_"), null); - // THEN - assertEquals("Request Id", result); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java deleted file mode 100644 index cd3374169b2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.common; - -import org.apache.ambari.logsearch.web.model.User; -import org.junit.Before; -import org.junit.Test; - -import junit.framework.Assert; - -public class LogSearchContextUtilTest { - - @Before - public void resetContext() { - LogSearchContext.resetContext(); - } - - @Test - public void testNoContext() { - Assert.assertNull(LogSearchContext.getCurrentUsername()); - } - - @Test - public void testUserSession() { - User user = new User("UserName", "Password", null); - - LogSearchContext context = new LogSearchContext(); - context.setUser(user); - - LogSearchContext.setContext(context); - - Assert.assertEquals(LogSearchContext.getCurrentUsername(), "UserName"); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapperTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapperTest.java deleted file mode 100644 index 8d7d15a6311..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/LogSearchLdapAuthorityMapperTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.junit.Test; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class LogSearchLdapAuthorityMapperTest { - - @Test - public void testSimpleMapping() { - // GIVE - Map roleGroupMapping = new HashMap<>(); - roleGroupMapping.put("apache1", "ROLE_USER"); - LogSearchLdapAuthorityMapper underTest = new LogSearchLdapAuthorityMapper(roleGroupMapping); - // WHEN - List result = new ArrayList<>(underTest.mapAuthorities(generateAuthorities())); - // THEN - assertEquals("ROLE_USER", result.get(0).toString()); - } - - @Test - public void testSimpleMappingWithoutRolePrefix() { - // GIVE - Map roleGroupMapping = new HashMap<>(); - roleGroupMapping.put("apache1", "USER"); - LogSearchLdapAuthorityMapper underTest = new LogSearchLdapAuthorityMapper(roleGroupMapping); - // WHEN - List result = new ArrayList<>(underTest.mapAuthorities(generateAuthorities())); - // THEN - assertEquals("ROLE_USER", result.get(0).toString()); - } - - @Test - public void testMultipleToTheSameMapping() { - // GIVE - Map roleGroupMapping = new HashMap<>(); - roleGroupMapping.put("apache1", "ROLE_USER"); - roleGroupMapping.put("APACHE2", "ROLE_USER"); - roleGroupMapping.put("role_apache3", "ROLE_USER"); - roleGroupMapping.put("ROLE_APACHE4", "ROLE_USER"); - LogSearchLdapAuthorityMapper underTest = new LogSearchLdapAuthorityMapper(roleGroupMapping); - // WHEN - List result = new ArrayList<>(underTest.mapAuthorities(generateAuthorities())); - // THEN - assertEquals("ROLE_USER", result.get(0).toString()); - assertEquals(1, result.size()); - } - - @Test - public void testMultipleRoles() { - // GIVE - Map roleGroupMapping = new HashMap<>(); - roleGroupMapping.put("apache1", "ROLE_USER"); - roleGroupMapping.put("APACHE2", "ROLE_ADMIN"); - LogSearchLdapAuthorityMapper underTest = new LogSearchLdapAuthorityMapper(roleGroupMapping); - // WHEN - List result = new ArrayList<>(underTest.mapAuthorities(generateAuthorities())); - // THEN - assertEquals(2, result.size()); - } - - private List generateAuthorities() { - List list = new ArrayList<>(); - list.add(new SimpleGrantedAuthority("apache1")); - list.add(new SimpleGrantedAuthority("APACHE2")); - list.add(new SimpleGrantedAuthority("role_apache3")); - list.add(new SimpleGrantedAuthority("ROLE_APACHE4")); - return list; - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java deleted file mode 100644 index 6b75d87bc4e..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ambari.logsearch.common; - -import java.util.Date; - -import org.junit.Assert; -import org.junit.Test; - -public class ManageStartEndTimeTest { - - @Test - public void testManageStartEndTime() { - ManageStartEndTime.manage(); - Date[] range = ManageStartEndTime.getStartEndTime(); - Assert.assertEquals(range[1].getTime() - range[0].getTime(), 60*60*1000); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/PropertiesSplitterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/PropertiesSplitterTest.java deleted file mode 100644 index 30cd76f7eea..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/common/PropertiesSplitterTest.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.common; - -import org.junit.Before; -import org.junit.Test; - -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class PropertiesSplitterTest { - - private PropertiesSplitter underTest; - - @Before - public void setUp() { - underTest = new PropertiesSplitter(); - } - - @Test - public void testParseList() { - // GIVEN - // WHEN - List values = underTest.parseList("v1,v2"); - // THEN - assertTrue(values.contains("v1")); - assertTrue(values.contains("v2")); - } - - @Test - public void testParseListWithEmptyString() { - // GIVEN - // WHEN - List values = underTest.parseList(""); - // THEN - assertTrue(values.isEmpty()); - } - - @Test - public void testParseMap() { - // GIVEN - // WHEN - Map keyValues = underTest.parseMap("k1:v1,k2:v2"); - // THEN - assertEquals("v1", keyValues.get("k1")); - assertEquals("v2", keyValues.get("k2")); - } - - @Test - public void testParseMapWithEmptyValue() { - // GIVEN - // WHEN - Map keyValues = underTest.parseMap("k1:v1,k2:"); - // THEN - assertEquals("v1", keyValues.get("k1")); - assertEquals("", keyValues.get("k2")); - } - - @Test - public void testParseMapWithMissingKey() { - // GIVEN - // WHEN - Map keyValues = underTest.parseMap("k1:v1,:v2"); - // THEN - assertEquals("v1", keyValues.get("k1")); - assertNull(keyValues.get("k2")); - assertEquals(1, keyValues.size()); - } - - @Test - public void testParseMapInMap() { - // GIVEN - // WHEN - Map> keyMapValues = underTest.parseMapInMap("K1#k1:v1,k2:v2;K2#k3:v3,k4:v4"); - // THEN - Map keyValues1 = keyMapValues.get("K1"); - Map keyValues2 = keyMapValues.get("K2"); - assertNotNull(keyValues1); - assertNotNull(keyValues2); - assertEquals("v1", keyValues1.get("k1")); - assertEquals("v2", keyValues1.get("k2")); - assertEquals("v3", keyValues2.get("k3")); - assertEquals("v4", keyValues2.get("k4")); - } - - @Test - public void testParseListInMap() { - // GIVEN - // WHEN - Map> listInMap = underTest.parseListInMap("K1:v1,v2;K2:v3,v4"); - // THEN - List valueList1 = listInMap.get("K1"); - List valueList2 = listInMap.get("K2"); - assertNotNull(valueList1); - assertNotNull(valueList2); - assertEquals("v1", valueList1.get(0)); - assertEquals("v2", valueList1.get(1)); - assertEquals("v3", valueList2.get(0)); - assertEquals("v4", valueList2.get(1)); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AbstractRequestConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AbstractRequestConverterTest.java deleted file mode 100644 index 0bff00ecf88..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AbstractRequestConverterTest.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.BaseLogRequest; -import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest; - -public class AbstractRequestConverterTest { - - public void fillBaseLogRequestWithTestData(BaseLogRequest request) { - fillCommonRequestWithTestData(request); - request.setFrom("2016-09-13T22:00:01.000Z"); - request.setTo("2016-09-14T22:00:01.000Z"); - request.setMustBe("logsearch_app,secure_log"); - request.setMustNot("hst_agent,system_message"); - request.setIncludeQuery("[{\"log_message\" : \"myincludemessage\"}]"); - request.setExcludeQuery("[{\"log_message\" : \"myexcludemessage\"}]"); - } - - public void fillCommonRequestWithTestData(CommonSearchRequest request) { - request.setStartIndex("0"); - request.setPage("0"); - request.setPageSize("25"); - request.setClusters("cl1"); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverterTest.java deleted file mode 100644 index 96e0c8291f2..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditBarGraphRequestQueryConverterTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditBarGraphQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class AuditBarGraphRequestQueryConverterTest extends AbstractRequestConverterTest { - - private AuditBarGraphRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new AuditBarGraphRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - AuditBarGraphRequest request = new AuditBarGraphQueryRequest(); - request.setUserList("joe,steven"); - // WHEN - fillBaseLogRequestWithTestData(request); - request.setUnit("+1HOUR"); - // THEN - SolrQuery query = underTest.convert(request); - assertEquals("?q=*%3A*&facet=true&facet.pivot=%7B%21range%3Dr1%7Drepo&facet.mincount=1&facet.limit=-1" + - "&facet.sort=index&facet.range=%7B%21tag%3Dr1%7DevtTime&f.evtTime.facet.range.start=2016-09-13T22%3A00%3A01.000Z" + - "&f.evtTime.facet.range.end=2016-09-14T22%3A00%3A01.000Z&f.evtTime.facet.range.gap=%2B1HOUR&rows=0&start=0" + - "&fq=cluster%3Acl1&fq=reqUser%3A%28joe+OR+steven%29&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage", - query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - AuditBarGraphRequest request = new AuditBarGraphQueryRequest(); - // WHEN - SolrQuery query = underTest.convert(request); - // THEN - assertEquals(Integer.valueOf(0), query.getRows()); - assertEquals(-1, query.getFacetLimit()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java deleted file mode 100644 index 601829a8e81..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditComponentQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.SimpleFacetQuery; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertEquals; - -public class AuditComponentRequestQueryConverterTest extends AbstractRequestConverterTest { - - private AuditComponentsRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new AuditComponentsRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - AuditComponentRequest request = new AuditComponentQueryRequest(); - fillCommonRequestWithTestData(request); - // WHEN - SimpleFacetQuery facetQuery = underTest.convert(request); - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(facetQuery); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo", - query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - AuditComponentRequest request = new AuditComponentQueryRequest(); - // WHEN - SimpleFacetQuery facetQuery = underTest.convert(request); - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(facetQuery); - // THEN - assertNotNull(facetQuery); - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo", - query.toQueryString()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditLogRequestConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditLogRequestConverterTest.java deleted file mode 100644 index 2ccc74129e3..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditLogRequestConverterTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditLogQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.SimpleQuery; - -import static org.junit.Assert.assertEquals; - -public class AuditLogRequestConverterTest extends AbstractRequestConverterTest { - - private AuditLogRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new AuditLogRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - AuditLogRequest request = new AuditLogQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setUserList("joe,steven"); - // WHEN - SimpleQuery simpleQuery = underTest.convert(request); - SolrQuery queryResult = new DefaultQueryParser().doConstructSolrQuery(simpleQuery); - // THEN - assertEquals("?q=*%3A*&start=0&rows=25&fq=repo%3A%28logsearch_app+OR+secure_log%29&fq=-repo%3A%28hst_agent+OR+system_message%29" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=cluster%3Acl1&fq=reqUser%3A%28joe+OR+steven%29&sort=evtTime+desc%2Cseq_num+desc", - queryResult.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - AuditLogRequest request = new AuditLogQueryRequest(); - // WHEN - SimpleQuery simpleQuery = underTest.convert(request); - SolrQuery queryResult = new DefaultQueryParser().doConstructSolrQuery(simpleQuery); - // THEN - assertEquals("?q=*%3A*&start=0&rows=99999&sort=evtTime+desc%2Cseq_num+desc", queryResult.toQueryString()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverterTest.java deleted file mode 100644 index 9d3d14547ab..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/AuditServiceLoadRequestQueryConverterTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest; -import org.apache.ambari.logsearch.model.request.impl.query.AuditServiceLoadQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class AuditServiceLoadRequestQueryConverterTest extends AbstractRequestConverterTest { - - private AuditServiceLoadRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new AuditServiceLoadRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - AuditServiceLoadRequest request = new AuditServiceLoadQueryRequest(); - fillBaseLogRequestWithTestData(request); - // WHEN - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=repo%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-repo%3A%28hst_agent+OR+system_message%29&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=10&facet.field=repo", solrQuery.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - AuditServiceLoadRequest request = new AuditServiceLoadQueryRequest(); - // WHEN - SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=10&facet.field=repo", - solrQuery.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java deleted file mode 100644 index 45745dac6d5..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogQueryRequest; -import org.apache.ambari.logsearch.util.SolrUtil; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.SimpleQuery; - -import static org.junit.Assert.assertEquals; - -public class BaseServiceLogRequestQueryConverterTest extends AbstractRequestConverterTest { - - private BaseServiceLogRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new BaseServiceLogRequestQueryConverter(); - } - - @Test - public void testConvertRequest() { - // GIVEN - ServiceLogRequest logRequest = new ServiceLogQueryRequest(); - fillBaseLogRequestWithTestData(logRequest); - logRequest.setLevel("FATAL,ERROR,WARN,UNKNOWN"); - logRequest.setFileName("/var/log/myfile-*-hdfs.log"); - logRequest.setComponentName("component"); - logRequest.setHostList("logsearch1.com,logsearch2.com"); - // WHEN - SimpleQuery query = underTest.convert(logRequest); - DefaultQueryParser defaultQueryParser = new DefaultQueryParser(); - SolrQuery solrQuery = defaultQueryParser.doConstructSolrQuery(query); - SolrUtil.removeDoubleOrTripleEscapeFromFilters(solrQuery); - // THEN - assertEquals("?q=*%3A*&start=0&rows=25&fq=type%3A%28logsearch_app+OR+secure_log%29&fq=-type%3A%28hst_agent+OR+system_message%29" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=cluster%3Acl1&fq=path%3A%5C%2Fvar%5C%2Flog%5C%2Fmyfile%5C-%5C*%5C-hdfs.log" + - "&fq=type%3Acomponent&fq=level%3A%28FATAL+OR+ERROR+OR+WARN+OR+UNKNOWN%29&fq=host%3A%28logsearch1.com+OR+logsearch2.com%29" + - "&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D&sort=logtime+desc%2Cseq_num+desc", - solrQuery.toQueryString()); - } - - @Test - public void testConvertRequestWithoutData() { - // GIVEN - ServiceLogRequest logRequest = new ServiceLogQueryRequest(); - // WHEN - SimpleQuery query = underTest.convert(logRequest); - // THEN - assertEquals(Integer.valueOf(99999), query.getRows()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverterTest.java deleted file mode 100644 index b26ab48cd7b..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/EventHistoryRequestQueryConverterTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.EventHistoryRequest; -import org.apache.ambari.logsearch.model.request.impl.query.EventHistoryQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class EventHistoryRequestQueryConverterTest extends AbstractRequestConverterTest { - - private EventHistoryRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new EventHistoryRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - EventHistoryRequest request = new EventHistoryQueryRequest(); - request.setRowType("myRowType"); // TODO: validate these 3 fields @Valid on EventHistoryRequest object -> not null - request.setFilterName("myFilterName"); - request.setClusters("cl1,cl2"); - // WHEN - SolrQuery queryResult = underTest.convert(request); - // THEN - assertEquals("?q=*%3A*&fq=rowtype%3AmyRowType&fq=filtername%3A*myFilterName*&fq=cluster%3A%28cl1+OR+cl2%29&start=0&rows=10&sort=filtername+asc", - queryResult.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverterTest.java deleted file mode 100644 index 669ef1ac3a0..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/FieldAuditLogRequestQueryConverterTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest; -import org.apache.ambari.logsearch.model.request.impl.query.FieldAuditLogQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class FieldAuditLogRequestQueryConverterTest extends AbstractRequestConverterTest { - - private FieldAuditLogRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new FieldAuditLogRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - FieldAuditLogRequest request = new FieldAuditLogQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setField("myfield"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=repo%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-repo%3A%28hst_agent+OR+system_message%29&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=-1&facet.pivot=myfield%2Crepo", - query.toQueryString()); - } - - @Test(expected = IllegalArgumentException.class) // TODO: later use @Valid on the fields to validate object - public void testConvertWithoutData() { - // GIVEN - FieldAuditLogRequest request = new FieldAuditLogQueryRequest(); - // WHEN - new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverterTest.java deleted file mode 100644 index 2818cfb1461..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/HostLogFilesRequestQueryConverterTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.HostLogFilesRequest; -import org.apache.ambari.logsearch.model.request.impl.query.HostLogFilesQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class HostLogFilesRequestQueryConverterTest extends AbstractRequestConverterTest { - - private HostLogFilesRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new HostLogFilesRequestQueryConverter(); - } - - @Test - public void testConvertHostNameOnly() { - // GIVEN - HostLogFilesRequest request = new HostLogFilesQueryRequest(); - request.setHostName("hostName"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=host%3A%28hostName%29&rows=0&facet=true&facet.mincount=1&facet.limit=-1&facet.pivot=type%2Cpath", - query.toQueryString()); - } - - @Test - public void testConvertHostNameAndComponentName() { - // GIVEN - HostLogFilesRequest request = new HostLogFilesQueryRequest(); - request.setHostName("hostName"); - request.setComponentName("componentName"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=host%3A%28hostName%29+AND+type%3A%28componentName%29&rows=0&facet=true&facet.mincount=1&facet.limit=-1" + - "&facet.pivot=type%2Cpath", query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestConverterTest.java deleted file mode 100644 index 02819f1636c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestConverterTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceAnyGraphQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogAnyGraphRequestConverterTest extends AbstractRequestConverterTest { - - private ServiceLogAnyGraphRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogAnyGraphRequestQueryConverter(); - } - - @Test - public void testConverter() { - // GIVEN - ServiceAnyGraphRequest request = new ServiceAnyGraphQueryRequest(); - request.setUnit("+1HOUR"); - fillBaseLogRequestWithTestData(request); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=-1&facet.field=level", - query.toQueryString()); - } - - @Test - public void testConverterWithoutData() { - // GIVEN - ServiceAnyGraphRequest request = new ServiceAnyGraphQueryRequest(); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1&facet.field=level", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java deleted file mode 100644 index f395c88e5ed..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogComponentLevelQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogComponentLevelRequestQueryConverterTest extends AbstractRequestConverterTest { - - private ServiceLogComponentLevelRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogComponentLevelRequestQueryConverter(); - } - - @Test - public void testConverter() { - // GIVEN - ServiceLogComponentLevelRequest request = new ServiceLogComponentLevelQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setComponentName("mycomponent"); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=type%3Amycomponent&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29&fq=cluster%3Acl1" + - "&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Clevel", - query.toQueryString()); - } - - @Test - public void testConverterWithoutData() { - // GIVEN - ServiceLogComponentLevelRequest request = new ServiceLogComponentLevelQueryRequest(); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B*+TO+*%5D&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29&facet=true" + - "&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Clevel", query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java deleted file mode 100644 index 8ece866f791..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogComponentHostQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogComponentRequestFacetQueryConverterTest extends AbstractRequestConverterTest { - - private ServiceLogComponentRequestFacetQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogComponentRequestFacetQueryConverter(); - } - - @Test - public void testConverter() { - // GIVEN - ServiceLogComponentHostRequest request = new ServiceLogComponentHostQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setComponentName("mycomponent"); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=type%3Amycomponent&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29" + - "&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Chost%2Clevel&facet.pivot=type%2Clevel", - query.toQueryString()); - } - - @Test - public void testConverterWithoutData() { - // GIVEN - ServiceLogComponentHostRequest request = new ServiceLogComponentHostQueryRequest(); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B*+TO+*%5D&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29" + - "&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Chost%2Clevel&facet.pivot=type%2Clevel", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverterTest.java deleted file mode 100644 index 54d343545e9..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverterTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogLevelCountQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogLevelCountRequestQueryConverterTest extends AbstractRequestConverterTest { - - private ServiceLogLevelCountRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogLevelCountRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - ServiceLogLevelCountRequest request = new ServiceLogLevelCountQueryRequest(); - fillBaseLogRequestWithTestData(request); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=-1&facet.field=level", - query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - ServiceLogLevelCountRequest request = new ServiceLogLevelCountQueryRequest(); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1&facet.field=level", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverterTest.java deleted file mode 100644 index 026d273b484..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverterTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceGraphQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogLevelDateRangeRequestQueryConverterTest extends AbstractRequestConverterTest { - - private ServiceLogLevelDateRangeRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogLevelDateRangeRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - ServiceGraphRequest request = new ServiceGraphQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setUnit("+1HOUR"); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = underTest.convert(request); - // THEN - assertEquals("?q=*%3A*&facet=true&facet.pivot=%7B%21range%3Dr1%7Dlevel&facet.mincount=1&facet.limit=-1" + - "&facet.sort=index&facet.range=%7B%21tag%3Dr1%7Dlogtime&f.logtime.facet.range.start=2016-09-13T22%3A00%3A01.000Z" + - "&f.logtime.facet.range.end=2016-09-14T22%3A00%3A01.000Z&f.logtime.facet.range.gap=%2B1HOUR&rows=0&start=0&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29&fq=cluster%3Acl1&fq=type%3A%28logsearch_app+OR+secure_log%29&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage", query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - ServiceGraphRequest request = new ServiceGraphQueryRequest(); - request.setUnit("+1HOUR"); // minimal data for date range gap - request.setFrom("2016-09-13T22:00:01.000Z"); - request.setTo("2016-09-14T22:00:01.000Z"); - // WHEN - SolrQuery query = underTest.convert(request); - // THEN - assertEquals("?q=*%3A*&facet=true&facet.pivot=%7B%21range%3Dr1%7Dlevel&facet.mincount=1&facet.limit=-1&facet.sort=index" + - "&facet.range=%7B%21tag%3Dr1%7Dlogtime&f.logtime.facet.range.start=2016-09-13T22%3A00%3A01.000Z" + - "&f.logtime.facet.range.end=2016-09-14T22%3A00%3A01.000Z&f.logtime.facet.range.gap=%2B1HOUR&rows=0&start=0", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverterTest.java deleted file mode 100644 index 7c6c9fd68b8..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverterTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogHostComponentQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogTreeRequestFacetQueryConverterTest extends AbstractRequestConverterTest { - private ServiceLogTreeRequestFacetQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogTreeRequestFacetQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - ServiceLogHostComponentRequest request = new ServiceLogHostComponentQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29&fq=cluster%3Acl1&facet=true" + - "&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=host%2Ctype%2Clevel&facet.pivot=host%2Clevel", - query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - ServiceLogHostComponentRequest request = new ServiceLogHostComponentQueryRequest(); - request.setLevel("WARN,ERROR,FATAL"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B*+TO+*%5D&fq=level%3A%28WARN+OR+ERROR+OR+FATAL%29" + - "&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=host%2Ctype%2Clevel&facet.pivot=host%2Clevel", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverterTest.java deleted file mode 100644 index b2b701406cc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogTruncatedRequestQueryConverterTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest; -import org.apache.ambari.logsearch.model.request.impl.query.ServiceLogTruncatedQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class ServiceLogTruncatedRequestQueryConverterTest extends AbstractRequestConverterTest { - - private ServiceLogTruncatedRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new ServiceLogTruncatedRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - ServiceLogTruncatedRequest request = new ServiceLogTruncatedQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setScrollType("0"); - request.setNumberRows(10); - request.setId("id"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&start=0&rows=10&fq=type%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-type%3A%28hst_agent+OR+system_message%29&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage" + - "&fq=cluster%3Acl1&sort=logtime+desc%2Cseq_num+desc", - query.toQueryString()); - } - - @Test - public void testConvertWithoutData() { - // GIVEN - ServiceLogTruncatedRequest request = new ServiceLogTruncatedQueryRequest(); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&start=0&sort=logtime+desc%2Cseq_num+desc", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverterTest.java deleted file mode 100644 index 35977794e1c..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/StringFieldFacetQueryConverterTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; -import org.springframework.data.solr.core.query.SimpleQuery; - -import static org.junit.Assert.assertEquals; - -public class StringFieldFacetQueryConverterTest extends AbstractRequestConverterTest { - private StringFieldFacetQueryConverter underTest; - - @Before - public void setUp() { - underTest = new StringFieldFacetQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - String field = "myfield"; - // WHEN - SimpleQuery simpleQuery = underTest.convert(field); - SolrQuery queryResult = new DefaultQueryParser().doConstructSolrQuery(simpleQuery); - // THEN - assertEquals("?q=*%3A*&rows=0&facet=true&facet.mincount=1&facet.limit=-1&facet.field=myfield", - queryResult.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverterTest.java deleted file mode 100644 index b1b4d4964d4..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/TopFieldAuditLogRequestQueryConverterTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.TopFieldAuditLogRequest; -import org.apache.ambari.logsearch.model.request.impl.query.TopFieldAuditLogQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class TopFieldAuditLogRequestQueryConverterTest extends AbstractRequestConverterTest { - - private TopFieldAuditLogRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new TopFieldAuditLogRequestQueryConverter(); - } - - @Test - public void testConvert() { - // GIVEN - TopFieldAuditLogRequest request = new TopFieldAuditLogQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setTop(10); - request.setField("myfield"); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=repo%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-repo%3A%28hst_agent+OR+system_message%29&fq=cluster%3Acl1&facet=true&facet.mincount=1&facet.limit=10&facet.pivot=myfield%2Crepo", - query.toQueryString()); - } - - @Test(expected = IllegalArgumentException.class) // TODO: later use @Valid on the fields to validate object - public void testConvertWithoutData() { - // GIVEN - TopFieldAuditLogRequest request = new TopFieldAuditLogQueryRequest(); - // WHEN - new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverterTest.java deleted file mode 100644 index 1ec6414eb65..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/converter/UserExportRequestQueryConverterTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.converter; - -import org.apache.ambari.logsearch.model.request.impl.UserExportRequest; -import org.apache.ambari.logsearch.model.request.impl.query.UserExportQueryRequest; -import org.apache.solr.client.solrj.SolrQuery; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.solr.core.DefaultQueryParser; - -import static org.junit.Assert.assertEquals; - -public class UserExportRequestQueryConverterTest extends AbstractRequestConverterTest { - private UserExportRequestQueryConverter underTest; - - @Before - public void setUp() { - underTest = new UserExportRequestQueryConverter(); - } - - @Test - public void testConverter() { - // GIVEN - UserExportRequest request = new UserExportQueryRequest(); - fillBaseLogRequestWithTestData(request); - request.setFormat("myFormat"); - request.setClusters(null); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" + - "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=repo%3A%28logsearch_app+OR+secure_log%29" + - "&fq=-repo%3A%28hst_agent+OR+system_message%29&facet=true&facet.mincount=1&facet.limit=-1&facet.pivot=reqUser%2Crepo&facet.pivot=resource%2Crepo", - query.toQueryString()); - } - - @Test - public void testConverterWithoutData() { - // GIVEN - UserExportRequest request = new UserExportQueryRequest(); - // WHEN - SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request)); - // THEN - assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1" + - "&facet.pivot=reqUser%2Crepo&facet.pivot=resource%2Crepo", - query.toQueryString()); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/dao/RoleDaoTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/dao/RoleDaoTest.java deleted file mode 100644 index 3cb1f98bdfc..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/dao/RoleDaoTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.dao; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.core.GrantedAuthority; - -import java.util.Arrays; -import java.util.List; - -public class RoleDaoTest { - - private RoleDao underTest; - - @Before - public void setUp() { - underTest = new RoleDao(); - AuthPropsConfig authPropsConfig = new AuthPropsConfig(); - authPropsConfig.setFileAuthorization(true); - underTest.setAuthPropsConfig(authPropsConfig); - } - - @Test - public void testCreateDefaultAuthorities() { - // GIVEN - // WHEN - List authorityList = RoleDao.createDefaultAuthorities(); - // THEN - Assert.assertEquals("ROLE_USER", authorityList.get(0).getAuthority()); - } - - @Test - public void testGetRolesForUser() { - // GIVEN - List roles = Arrays.asList("admin", "user"); - underTest.getSimpleRolesMap().put("user1", roles); - // WHEN - List result1 = underTest.getRolesForUser("user1"); - List result2 = underTest.getRolesForUser("user2"); - // THEN - Assert.assertEquals(result1.size(), 2); - Assert.assertEquals(result2.size(), 0); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapperTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapperTest.java deleted file mode 100644 index d891301077f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/rest/error/GeneralExceptionMapperTest.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.apache.ambari.logsearch.rest.error; - -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; -import static javax.ws.rs.core.Response.Status.NOT_FOUND; -import static org.apache.ambari.logsearch.rest.error.GeneralExceptionMapper.INTERNAL_SERVER_ERROR_MESSAGE; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.manager.NotFoundException; -import org.junit.Test; - -import com.fasterxml.jackson.databind.exc.InvalidTypeIdException; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class GeneralExceptionMapperTest { - @Test - public void testToResponseAddsGeneralMessageWhenStatusIsGreaterOrEqualsTo500() { - Response response = GeneralExceptionMapper.toResponse(new Exception("Message not added"), INTERNAL_SERVER_ERROR); - StatusMessage statusMessage = (StatusMessage) response.getEntity(); - assertThat(statusMessage.getStatus(), is(INTERNAL_SERVER_ERROR.getStatusCode())); - assertThat(statusMessage.getMessage(), is(INTERNAL_SERVER_ERROR_MESSAGE)); - } - - @Test - public void testToResponseAddsMessageFromExceptionWhenStatusIsLessThan500() { - Response response = GeneralExceptionMapper.toResponse(new Exception("Message in exception"), BAD_REQUEST); - StatusMessage statusMessage = (StatusMessage) response.getEntity(); - assertThat(statusMessage.getStatus(), is(BAD_REQUEST.getStatusCode())); - assertThat(statusMessage.getMessage(), is("Message in exception")); - } - - @Test - public void testToResponseSetsTheGivenStatusCode() { - Response response = GeneralExceptionMapper.toResponse(new Exception("any"), BAD_REQUEST); - assertThat(response.getStatus(), is(BAD_REQUEST.getStatusCode())); - } - - @Test - public void testToResponseSetsApplicationJsonContentType() { - Response response = GeneralExceptionMapper.toResponse(new Exception("any"), BAD_REQUEST); - assertThat(response.getHeaders().get("Content-type").get(0), is(MediaType.APPLICATION_JSON_TYPE)); - } - - @Test - public void testToResponseSetsStatus500WhenUnexpectedException() { - Response response = new GeneralExceptionMapper().toResponse(new RuntimeException("Unexpected")); - assertThat(response.getStatus(), is(INTERNAL_SERVER_ERROR.getStatusCode())); - } - - @Test - public void testToResponseSetsPredefinedStatusWhenExceptionIsExpected() { - Response response = new GeneralExceptionMapper().toResponse(new NotFoundException("Something missing!")); - assertThat(response.getStatus(), is(NOT_FOUND.getStatusCode())); - } - - @Test - public void testToResponseSetsPredefinedStatusWhenExceptionIsDerivedFromExpectedException() { - InvalidTypeIdException derivedFromJsonMappingException = new InvalidTypeIdException(null, "Invalid type", null, "any type id"); - Response response = new GeneralExceptionMapper().toResponse(derivedFromJsonMappingException); - assertThat(response.getStatus(), is(BAD_REQUEST.getStatusCode())); - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/GlobalStateTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/GlobalStateTest.java deleted file mode 100644 index bcd01729d37..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/GlobalStateTest.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.apache.ambari.logsearch.web.filters; - -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.conf.SolrPropsConfig; -import org.apache.ambari.logsearch.conf.SolrServiceLogPropsConfig; -import org.apache.ambari.logsearch.conf.global.SolrCollectionState; -import org.apache.ambari.logsearch.conf.global.SolrServiceLogsState; -import org.junit.Before; -import org.junit.Test; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class GlobalStateTest { - - - private SolrCollectionState solrCollectionState; - private SolrPropsConfig solrPropsConfig; - - @Before - public void setUp() { - solrCollectionState = new SolrServiceLogsState(); - solrPropsConfig = new SolrServiceLogPropsConfig(); - solrPropsConfig.setCollection("test_collection"); - solrPropsConfig.setConfigName("test_config"); - } - - @Test - public void testGetStatusMessageReturnsNullIfZnodeAndSolrCollectionIsReady() { - solrCollectionState.setZnodeReady(true); - solrCollectionState.setSolrCollectionReady(true); - solrCollectionState.setConfigurationUploaded(true); - - GlobalStateProvider globalState = new GlobalStateProvider(solrCollectionState, solrPropsConfig); - assertThat(globalState.getStatusMessage("/api/v1/test"), is(nullValue())); - } - - @Test - public void testGetStatusMessageReturnsZnodeIsNotReady() { - solrCollectionState.setZnodeReady(false); - solrCollectionState.setConfigurationUploaded(false); - solrCollectionState.setSolrCollectionReady(false); - - GlobalStateProvider globalState = new GlobalStateProvider(solrCollectionState, solrPropsConfig); - StatusMessage statusMessage = globalState.getStatusMessage("/api/v1/test"); - assertThat(statusMessage.getMessage().contains("ZNode is not available"), is(true)); - } - - @Test - public void testGetStatusMessageReturnsZkConfingNotReady() { - solrCollectionState.setZnodeReady(true); - solrCollectionState.setConfigurationUploaded(false); - solrCollectionState.setSolrCollectionReady(false); - - GlobalStateProvider globalState = new GlobalStateProvider(solrCollectionState, solrPropsConfig); - StatusMessage statusMessage = globalState.getStatusMessage("/api/v1/test"); - assertThat(statusMessage.getMessage().contains("Collection configuration has not uploaded yet"), is(true)); - } - - @Test - public void testGetStatusMessageReturnsSolrCollectionNotReady() { - solrCollectionState.setZnodeReady(true); - solrCollectionState.setConfigurationUploaded(true); - solrCollectionState.setSolrCollectionReady(false); - - GlobalStateProvider globalState = new GlobalStateProvider(solrCollectionState, solrPropsConfig); - StatusMessage statusMessage = globalState.getStatusMessage("/api/v1/test"); - assertThat(statusMessage.getMessage().contains("Solr has not accessible yet"), is(true)); - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/LogsearchFilterTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/LogsearchFilterTest.java deleted file mode 100644 index bd874c29e65..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/filters/LogsearchFilterTest.java +++ /dev/null @@ -1,116 +0,0 @@ -package org.apache.ambari.logsearch.web.filters; - -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.strictMock; -import static org.easymock.EasyMock.verify; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; - -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Map; - -import javax.servlet.FilterChain; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.ambari.logsearch.common.StatusMessage; -import org.apache.ambari.logsearch.util.JSONUtil; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.web.util.matcher.RequestMatcher; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -public class LogsearchFilterTest { - private static final String REQUEST_URI = "/api/v1/test"; - private RequestMatcher requestMatcher; - private StatusProvider statusProvider; - private HttpServletRequest servletRequest; - private HttpServletResponse servletResponse; - private FilterChain filterChain; - - @Before - public void setUp() { - requestMatcher = strictMock(RequestMatcher.class); - statusProvider = strictMock(StatusProvider.class); - servletRequest = strictMock(HttpServletRequest.class); - servletResponse = strictMock(HttpServletResponse.class); - filterChain = strictMock(FilterChain.class); - - expect(servletRequest.getRequestURI()).andReturn(REQUEST_URI).anyTimes(); - } - - @Test - public void testDoFilterCallsFilterChainDoFilterIfRequestURIDoesNotMatch() throws Exception { - expect(requestMatcher.matches(servletRequest)).andReturn(false); - filterChain.doFilter(servletRequest, servletResponse); expectLastCall(); - - replay(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - - LogsearchFilter filter = new LogsearchFilter(requestMatcher, statusProvider); - filter.doFilter(servletRequest, servletResponse, filterChain); - - verify(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - } - - @Test - public void testDoFilterCallsFilterChainDoFilterIfNoError() throws Exception { - expect(requestMatcher.matches(servletRequest)).andReturn(true).anyTimes(); - expect(statusProvider.getStatusMessage(REQUEST_URI)).andReturn(null); - filterChain.doFilter(servletRequest, servletResponse); expectLastCall(); - - replay(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - - LogsearchFilter filter = new LogsearchFilter(requestMatcher, statusProvider); - filter.doFilter(servletRequest, servletResponse, filterChain); - - verify(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - } - - @Test - public void testDoFilterWritesStatusMessageInCaseOfAnError() throws Exception { - StatusMessage statusMessage = StatusMessage.with(INTERNAL_SERVER_ERROR, "Error occurred"); - StringWriter stringWriter = new StringWriter(); - - expect(requestMatcher.matches(servletRequest)).andReturn(true).anyTimes(); - expect(statusProvider.getStatusMessage(REQUEST_URI)).andReturn(statusMessage); - expect(servletRequest.getRequestURL()).andReturn(new StringBuffer(REQUEST_URI)).anyTimes(); - servletResponse.setStatus(statusMessage.getStatus()); expectLastCall(); - servletResponse.setContentType("application/json"); expectLastCall(); - expect(servletResponse.getWriter()).andReturn(new PrintWriter(stringWriter)); - - replay(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - - LogsearchFilter filter = new LogsearchFilter(requestMatcher, statusProvider); - filter.doFilter(servletRequest, servletResponse, filterChain); - - verify(requestMatcher, statusProvider, servletRequest, servletResponse, filterChain); - - Map map = JSONUtil.jsonToMapObject(stringWriter.toString()); - assertThat(map, is(not(nullValue()))); - assertThat(map.get("status"), is(statusMessage.getStatus())); - assertThat(map.get("message"), is(statusMessage.getMessage())); - } -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProviderTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProviderTest.java deleted file mode 100644 index a6817dada2f..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProviderTest.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.authentication.TestingAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertSame; -import static junit.framework.Assert.assertTrue; -import static org.easymock.EasyMock.strictMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; - -import java.lang.reflect.Field; - -public class LogsearchAuthenticationProviderTest { - private static final Authentication SUCCESSFUL_AUTHENTICATION = new TestingAuthenticationToken("principal", "credentials"); - private static final Authentication FAILED_AUTHENTICATION = new TestingAuthenticationToken("principal", "credentials"); - static { - SUCCESSFUL_AUTHENTICATION.setAuthenticated(true); - FAILED_AUTHENTICATION.setAuthenticated(false); - } - - private LogsearchAuthenticationProvider provider; - - private LogsearchFileAuthenticationProvider mockFileProvider; - private LogsearchExternalServerAuthenticationProvider mockExternalServerProvider; - private LogsearchSimpleAuthenticationProvider mockSimpleProvider; - - @Before - public void resetContext() throws Exception { - provider = new LogsearchAuthenticationProvider(); - - mockFileProvider = strictMock(LogsearchFileAuthenticationProvider.class); - mockExternalServerProvider = strictMock(LogsearchExternalServerAuthenticationProvider.class); - mockSimpleProvider = strictMock(LogsearchSimpleAuthenticationProvider.class); - - Field fileProviderField = LogsearchAuthenticationProvider.class.getDeclaredField("fileAuthenticationProvider"); - fileProviderField.setAccessible(true); - fileProviderField.set(provider, mockFileProvider); - - Field extarnalProviderField = LogsearchAuthenticationProvider.class.getDeclaredField("externalServerAuthenticationProvider"); - extarnalProviderField.setAccessible(true); - extarnalProviderField.set(provider, mockExternalServerProvider); - - Field simpleProviderField = LogsearchAuthenticationProvider.class.getDeclaredField("simpleAuthenticationProvider"); - simpleProviderField.setAccessible(true); - simpleProviderField.set(provider, mockSimpleProvider); - } - - @Test - public void testFileAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(SUCCESSFUL_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - Authentication authenticationResult = provider.authenticate(authentication); - assertSame(authenticationResult, SUCCESSFUL_AUTHENTICATION); - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testExternalAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockExternalServerProvider.authenticate(authentication)).andReturn(SUCCESSFUL_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - Authentication authenticationResult = provider.authenticate(authentication); - assertSame(authenticationResult, SUCCESSFUL_AUTHENTICATION); - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testSimpleAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockExternalServerProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockSimpleProvider.authenticate(authentication)).andReturn(SUCCESSFUL_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - Authentication authenticationResult = provider.authenticate(authentication); - assertSame(authenticationResult, SUCCESSFUL_AUTHENTICATION); - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testNoOneAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockExternalServerProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockSimpleProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - Authentication authenticationResult = provider.authenticate(authentication); - assertSame(authenticationResult, FAILED_AUTHENTICATION); - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testOneExceptionAndAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(SUCCESSFUL_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - Authentication authenticationResult = provider.authenticate(authentication); - assertSame(authenticationResult, SUCCESSFUL_AUTHENTICATION); - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testOneExceptionNoOneAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - expect(mockExternalServerProvider.authenticate(authentication)).andThrow(new AuthenticationException("msg1") {}); - expect(mockSimpleProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown AuthenticationException", false); - } catch(AuthenticationException e) { - assertEquals(e.getMessage(), "msg1"); - } - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } - - @Test - public void testTwoExceptionNoOneAuthenticates() { - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - expect(mockFileProvider.authenticate(authentication)).andThrow(new AuthenticationException("msg1") {}); - expect(mockExternalServerProvider.authenticate(authentication)).andThrow(new AuthenticationException("msg2") {}); - expect(mockSimpleProvider.authenticate(authentication)).andReturn(FAILED_AUTHENTICATION); - - replay(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown AuthenticationException", false); - } catch(AuthenticationException e) { - assertEquals(e.getMessage(), "msg1"); - } - - verify(mockFileProvider, mockSimpleProvider, mockExternalServerProvider); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProviderTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProviderTest.java deleted file mode 100644 index 7ec598d1761..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProviderTest.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.apache.ambari.logsearch.common.ExternalServerClient; -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.TestingAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.authority.SimpleGrantedAuthority; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertSame; -import static junit.framework.Assert.assertTrue; -import static org.easymock.EasyMock.strictMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; - -import java.lang.reflect.Field; -import java.util.Arrays; - -public class LogsearchExternalServerAuthenticationProviderTest { - - private LogsearchExternalServerAuthenticationProvider provider; - private AuthPropsConfig mockAuthPropsConfig; - private ExternalServerClient mockExternalServerClient; - - @Before - public void init() throws Exception { - provider = new LogsearchExternalServerAuthenticationProvider(); - mockAuthPropsConfig = strictMock(AuthPropsConfig.class); - mockExternalServerClient = strictMock(ExternalServerClient.class); - - Field authPropsConfigField = LogsearchExternalServerAuthenticationProvider.class.getDeclaredField("authPropsConfig"); - authPropsConfigField.setAccessible(true); - authPropsConfigField.set(provider, mockAuthPropsConfig); - - Field externalServerClientField = LogsearchExternalServerAuthenticationProvider.class.getDeclaredField("externalServerClient"); - externalServerClientField.setAccessible(true); - externalServerClientField.set(provider, mockExternalServerClient); - } - - @Test - public void testAuthenticationDisabled() { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(false); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - assertSame(provider.authenticate(authentication), authentication); - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationEmptyUser() { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("", "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationNullUser() { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken(null, "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - - @Test - public void testAuthenticationEmptyPassword() { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", ""); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Password can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationNullPassword() { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", null); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Password can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationUnsuccessful() throws Exception { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - expect(mockAuthPropsConfig.getExternalAuthLoginUrl()).andReturn("http://server.com?userName=$USERNAME"); - expect(mockAuthPropsConfig.getAllowedRoles()).andReturn(Arrays.asList("AMBARI.ADMINISTRATOR")); - expect(mockExternalServerClient.sendGETRequest("http://server.com?userName=principal", String.class, "principal", "credentials")) - .andReturn("{\"permission_name\": \"NOT.AMBARI.ADMINISTRATOR\" }"); - - replay(mockAuthPropsConfig, mockExternalServerClient); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch (BadCredentialsException e) { - assertEquals("Bad credentials", e.getMessage()); - } - - verify(mockAuthPropsConfig, mockExternalServerClient); - } - - @Test - public void testAuthenticationSuccessful() throws Exception { - expect(mockAuthPropsConfig.isAuthExternalEnabled()).andReturn(true); - expect(mockAuthPropsConfig.getExternalAuthLoginUrl()).andReturn("http://server.com?userName=$USERNAME"); - expect(mockAuthPropsConfig.getAllowedRoles()).andReturn(Arrays.asList("AMBARI.ADMINISTRATOR")); - expect(mockExternalServerClient.sendGETRequest("http://server.com?userName=principal", String.class, "principal", "credentials")) - .andReturn("{\"permission_name\": \"AMBARI.ADMINISTRATOR\" }"); - - replay(mockAuthPropsConfig, mockExternalServerClient); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - Authentication authenticationResult = provider.authenticate(authentication); - - assertEquals("principal", authenticationResult.getName()); - assertEquals("credentials", authenticationResult.getCredentials()); - assertEquals(1, authenticationResult.getAuthorities().size()); - assertEquals(new SimpleGrantedAuthority("ROLE_USER"), authenticationResult.getAuthorities().iterator().next()); - - verify(mockAuthPropsConfig, mockExternalServerClient); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProviderTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProviderTest.java deleted file mode 100644 index 407cc834bbb..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProviderTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.apache.ambari.logsearch.util.CommonUtil; -import org.apache.ambari.logsearch.web.model.User; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.TestingAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; -import org.springframework.security.core.userdetails.UserDetailsService; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertSame; -import static junit.framework.Assert.assertTrue; -import static org.easymock.EasyMock.strictMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; - -import java.lang.reflect.Field; -import java.util.Arrays; -import java.util.List; - -public class LogsearchFileAuthenticationProviderTest { - - private LogsearchFileAuthenticationProvider provider; - private AuthPropsConfig mockAuthPropsConfig; - private UserDetailsService mockUserDetailsService; - - @Before - public void init() throws Exception { - provider = new LogsearchFileAuthenticationProvider(); - mockAuthPropsConfig = strictMock(AuthPropsConfig.class); - mockUserDetailsService = strictMock(UserDetailsService.class); - - Field authPropsConfigField = LogsearchFileAuthenticationProvider.class.getDeclaredField("authPropsConfig"); - authPropsConfigField.setAccessible(true); - authPropsConfigField.set(provider, mockAuthPropsConfig); - - Field userDetailsServiceField = LogsearchFileAuthenticationProvider.class.getDeclaredField("userDetailsService"); - userDetailsServiceField.setAccessible(true); - userDetailsServiceField.set(provider, mockUserDetailsService); - } - - @Test - public void testAuthenticationDisabled() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(false); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - assertSame(provider.authenticate(authentication), authentication); - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationEmptyUser() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("", "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationNullUser() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken(null, "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - - @Test - public void testAuthenticationEmptyPassword() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", ""); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Password can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationNullPassword() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", null); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Password can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationUnknownUser() { - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - expect(mockUserDetailsService.loadUserByUsername("principal")).andReturn(null); - - replay(mockAuthPropsConfig, mockUserDetailsService); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch (BadCredentialsException e) { - assertEquals("User not found.", e.getMessage()); - } - - verify(mockAuthPropsConfig, mockUserDetailsService); - } - - @Test - public void testAuthenticationNoPassword() { - List grantedAuths = Arrays.asList(new SimpleGrantedAuthority("ROLE_USER")); - User user = new User("principal", null, grantedAuths); - - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - expect(mockUserDetailsService.loadUserByUsername("principal")).andReturn(user); - - replay(mockAuthPropsConfig, mockUserDetailsService); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch (BadCredentialsException e) { - assertEquals("Password can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig, mockUserDetailsService); - } - - @Test - public void testAuthenticationWrongPassword() { - List grantedAuths = Arrays.asList(new SimpleGrantedAuthority("ROLE_USER")); - User user = new User("principal", CommonUtil.encryptPassword("principal", "notCredentials"), grantedAuths); - - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - expect(mockUserDetailsService.loadUserByUsername("principal")).andReturn(user); - - replay(mockAuthPropsConfig, mockUserDetailsService); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch (BadCredentialsException e) { - assertEquals("Wrong password.", e.getMessage()); - } - - verify(mockAuthPropsConfig, mockUserDetailsService); - } - - @Test - public void testAuthenticationSuccessful() { - List grantedAuths = Arrays.asList(new SimpleGrantedAuthority("ROLE_USER")); - User user = new User("principal", CommonUtil.encryptPassword("principal", "credentials"), grantedAuths); - - expect(mockAuthPropsConfig.isAuthFileEnabled()).andReturn(true); - expect(mockUserDetailsService.loadUserByUsername("principal")).andReturn(user); - - replay(mockAuthPropsConfig, mockUserDetailsService); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - - Authentication authenticationResult = provider.authenticate(authentication); - assertEquals("principal", authenticationResult.getName()); - assertEquals(CommonUtil.encryptPassword("principal", "credentials"), authenticationResult.getCredentials()); - assertEquals(1, authenticationResult.getAuthorities().size()); - assertEquals(new SimpleGrantedAuthority("ROLE_USER"), authenticationResult.getAuthorities().iterator().next()); - - verify(mockAuthPropsConfig, mockUserDetailsService); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProviderTest.java b/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProviderTest.java deleted file mode 100644 index 7287012060d..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProviderTest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.ambari.logsearch.web.security; - -import org.apache.ambari.logsearch.conf.AuthPropsConfig; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.TestingAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.authority.SimpleGrantedAuthority; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertSame; -import static junit.framework.Assert.assertTrue; -import static org.easymock.EasyMock.strictMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; - -import java.lang.reflect.Field; - -public class LogsearchSimpleAuthenticationProviderTest { - - private LogsearchSimpleAuthenticationProvider provider; - private AuthPropsConfig mockAuthPropsConfig; - - @Before - public void init() throws Exception { - provider = new LogsearchSimpleAuthenticationProvider(); - mockAuthPropsConfig = strictMock(AuthPropsConfig.class); - - Field f = LogsearchSimpleAuthenticationProvider.class.getDeclaredField("authPropsConfig"); - f.setAccessible(true); - f.set(provider, mockAuthPropsConfig); - } - - @Test - public void testAuthenticationDisabled() { - expect(mockAuthPropsConfig.isAuthSimpleEnabled()).andReturn(false); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - assertSame(provider.authenticate(authentication), authentication); - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationEmptyUser() { - expect(mockAuthPropsConfig.isAuthSimpleEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("", "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationNullUser() { - expect(mockAuthPropsConfig.isAuthSimpleEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken(null, "credentials"); - - try { - provider.authenticate(authentication); - assertTrue("Should have thrown BadCredentialsException", false); - } catch(BadCredentialsException e) { - assertEquals("Username can't be null or empty.", e.getMessage()); - } - - verify(mockAuthPropsConfig); - } - - @Test - public void testAuthenticationSuccessful() { - expect(mockAuthPropsConfig.isAuthSimpleEnabled()).andReturn(true); - - replay(mockAuthPropsConfig); - - Authentication authentication = new TestingAuthenticationToken("principal", "credentials"); - - Authentication authenticationResult = provider.authenticate(authentication); - assertEquals("principal", authenticationResult.getName()); - assertEquals("credentials", authenticationResult.getCredentials()); - assertEquals(1, authenticationResult.getAuthorities().size()); - assertEquals(new SimpleGrantedAuthority("ROLE_USER"), authenticationResult.getAuthorities().iterator().next()); - - verify(mockAuthPropsConfig); - } -} diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/resources/logsearch.properties b/ambari-logsearch/ambari-logsearch-server/src/test/resources/logsearch.properties deleted file mode 100755 index 2715d1f6f46..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/resources/logsearch.properties +++ /dev/null @@ -1,33 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logsearch.solr.audit.logs.config.name=test_audit_logs_config_name -logsearch.collection.audit.logs.numshards=123 -logsearch.collection.audit.logs.replication.factor=456 -logsearch.solr.collection.audit.logs=test_audit_logs_collection - -logsearch.solr.service.logs.config.name=test_service_logs_config_name -logsearch.collection.service.logs.numshards=789 -logsearch.collection.service.logs.replication.factor=987 -logsearch.solr.collection.service.logs=test_service_logs_collection -logsearch.service.logs.split.interval.mins=1 - -logsearch.solr.history.config.name=test_history_logs_config_name -logsearch.collection.history.replication.factor=234 -logsearch.solr.collection.history=test_history_logs_collection - -logsearch.auth.file.enable=true -logsearch.login.credentials.file=user_pass.json -logsearch.roles.allowed=AMBARI.ADMINISTRATOR diff --git a/ambari-logsearch/ambari-logsearch-server/src/test/resources/user_pass.json b/ambari-logsearch/ambari-logsearch-server/src/test/resources/user_pass.json deleted file mode 100644 index 0a04afe8291..00000000000 --- a/ambari-logsearch/ambari-logsearch-server/src/test/resources/user_pass.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "users": [{ - "name": "Test User Name", - "username": "testUserName", - "password": "testUserPassword", - "en_password": "" - }] -} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/.angular-cli.json b/ambari-logsearch/ambari-logsearch-web/.angular-cli.json deleted file mode 100644 index 267f5e0c52c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/.angular-cli.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "./node_modules/@angular/cli/lib/config/schema.json", - "project": { - "name": "ambari-logsearch-web", - "ejected": true - }, - "apps": [ - { - "root": "src", - "assets": [ - "assets", - "favicon.ico" - ], - "index": "index.html", - "main": "main.ts", - "polyfills": "polyfills.ts", - "test": "test.ts", - "testTsconfig": "tsconfig.spec.json", - "prefix": "", - "environmentSource": "environments/environment.ts", - "environments": { - "dev": "environments/environment.ts", - "prod": "environments/environment.prod.ts" - } - } - ], - "lint": [ - { - "project": "src/tsconfig.app.json" - }, - { - "project": "src/tsconfig.spec.json" - }, - { - "project": "e2e/tsconfig.e2e.json" - } - ], - "defaults": { - "styleExt": "less" - }, - "packageManager": "yarn" -} diff --git a/ambari-logsearch/ambari-logsearch-web/.editorconfig b/ambari-logsearch/ambari-logsearch-web/.editorconfig deleted file mode 100644 index 6e87a003da8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/.editorconfig +++ /dev/null @@ -1,13 +0,0 @@ -# Editor configuration, see http://editorconfig.org -root = true - -[*] -charset = utf-8 -indent_style = space -indent_size = 2 -insert_final_newline = true -trim_trailing_whitespace = true - -[*.md] -max_line_length = off -trim_trailing_whitespace = false diff --git a/ambari-logsearch/ambari-logsearch-web/.gitignore b/ambari-logsearch/ambari-logsearch-web/.gitignore deleted file mode 100644 index d64ee8c8e7e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/.gitignore +++ /dev/null @@ -1,45 +0,0 @@ -# See http://help.github.com/ignore-files/ for more about ignoring files. - -# compiled output -/dist -/tmp -/out-tsc - -# dependencies -/node_modules - -# IDEs and editors -/.idea -.project -.classpath -.c9/ -*.launch -.settings/ -*.sublime-workspace - -# IDE - VSCode -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json - -# misc -/.sass-cache -/connect.lock -/coverage -/libpeerconnection.log -npm-debug.log -testem.log -/typings - -# e2e -/e2e/*.js -/e2e/*.map - -# System Files -.DS_Store -Thumbs.db - -# Development Test Files -webpack.config.dev.js diff --git a/ambari-logsearch/ambari-logsearch-web/README.md b/ambari-logsearch/ambari-logsearch-web/README.md deleted file mode 100644 index 10ad300415d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/README.md +++ /dev/null @@ -1,218 +0,0 @@ -# AmbariLogsearchWeb - -This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 1.0.0. - -## Development server - -Run `npm start` or `yarn start` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files. - -## Webpack Development Config -In order to use the UI without changing the main webpack config file (and commit accidentally) we can use a `webpack.config.dev.js` file for that. So you can set a service URL proxy. - -The content of the `webpack.config.dev.js` can be: -``` -const merge = require('webpack-merge'); -const baseConfig = require('./webpack.config.js'); - -module.exports = merge(baseConfig, { - devServer: { - historyApiFallback: true, - proxy: { - '/api': 'http://c7401.ambari.apache.org:61888/', // proxying the api requests - '/login': 'http://c7401.ambari.apache.org:61888/', // proxying the login action - '/logout': 'http://c7401.ambari.apache.org:61888/' // proxying the the logout action - } - } -}); -``` -And you can start it that way: `yarn start --config webpack.config.dev.js` - -## Code scaffolding - -Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive/pipe/service/class/module`. - -## Build - -Run `npm run build` or `yarn build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `npm run build-prod` or `yarn build-prod` command for a production build. - -## Running unit tests - -Run `npm test` or `yarn test` to execute the unit tests via [Karma](https://karma-runner.github.io). - -## Running end-to-end tests - -Run `npm run e2e` or `yarn e2e` to execute the end-to-end tests via [Protractor](http://www.protractortest.org/). -Before running the tests make sure you are serving the app via `npm start` or `yarn start`. - -## Further help - -To get more help on the Angular CLI use `ng help` or go check out the [Angular CLI README](https://github.com/angular/angular-cli/blob/master/README.md). - -# Application Info - -## Routing and URL params -We use [Matrix URIs](https://www.w3.org/DesignIssues/MatrixURIs.html) to route our components. That means within the components we use semicolon separated parameters. -You can create/generate Log Search Client application URL with the following paths and parameters - -URL pattern: `/logs/{logs-type};{filter-param1};...;{filter-paramN}` -Where the `{logs-type}` can be `serviceLogs` or `auditLogs` right now. - -For this screen the available URL params are the followings: -- `components`: - - type: filter parameter - - a comma separated list of components/services - - eg.: `components=activity_explorer,ambari_agent` -- `levels`: - - type: filter parameter - - a comma separated list of log levels - - eg.: `levels=INFO,WARN,ERROR,FATAL,DEBUG,TRACE,UNKNOW` -- `hosts`: - - type: filter parameter - - a comma separated list of hosts - - eg.: `hosts=c7401.ambari.apache.org,c7402.ambari.apache.org` -- `query`: - - type: filter parameter - - a JSON object with the following available keys as filters - - eg.: `[{"name": "level", "label": "Level", "value": "ERROR", "isExclude": false}]` - -### Time range matrix params -To filter for a range of time you can use the following matrix parameters: -- `timeRangeUnit`: the unit of the time - - `m`: minute - - `h`: hour - - `d`: day - - `w`: week - - `M`: month - - `y`: year -- `timeRangeType` - - `LAST`: it will count the time from the current moment back - - `PAST`: it will count the time from the end of the previous time unit (set up in `timeRangeUnit`) - - `CURRENT`: it will count the time from the end of the current time unit (set up in `timeRangeUnit`) - - `CUSTOM`: will check the `timeRangeStart` and the `timeRangeEnd` matrix parameters -- `timeRangeStart`: UTC + Time zone format, eg.: `timeRangeStart=2018-06-30T22:00:00.000Z` -- `timeRangeEnd`: UTC + Time zone format, eg.: `timeRangeStart=2018-06-30T23:00:00.000Z` - -#### Examples to defining time ranges: -Last 3 hours -`timeRangeType=LAST;timeRangeInterval=3;timeRangeUnit=h` -Last 7 days: -`timeRangeType=LAST;timeRangeInterval=7;timeRangeUnit=d` -Last 1 year -`timeRangeType=LAST;timeRangeInterval=1;timeRangeUnit=y` -Previous week -`timeRangeType=PAST;timeRangeUnit=w` -Previous month -`timeRangeType=PAST;timeRangeUnit=M` -Custom time range -`timeRangeType=CUSTOM;timeRangeStart=2018-07-01T10:06:00.000Z;timeRangeEnd=2018-07-02T13:06:00.000Z` - -##### The `query` param -###### Structure -The `query` matrix param is an array of filter params in JSON format. The item schema is the following: -``` -{ - name: string, - label: string, - value: string/number/boolean, - isExclude: boolean -} -``` -Where the `name` is for the field name that you want to filter, the `label` is what the filter input bar will display, the `value` is the filter value and the `isExclude` is a boolean value indicating if the given field value should be included or excluded from the search result. - -###### Available fields in the `query` matrix param for Service Logs -- `cluster` (label: Cluster) -- `method` (label: Method) -- `level` (label: Level) -- `ip` (label: IP) -- `key_log_messafe` (Key Log Message) -- `type` (label: Component) -- `path` (label: Path) -- `logtype` (label: Logtype) -- `file` (label: File) -- `line_number` (label: Line Number) -- `thread_name` (label: Thread) -- `host` (label: Host) -- `log_message`(label: Message) -- `logger_name` (label: Logger Name) -- `logfile_line_number` (label: Logfile Line Number) -- `logtime` (label: Log Time) -- `group` (label: Group) - -###### Example of a `query` matrix param - -``` -[{ - "name": "level", - "label": "Level", - "value": "ERROR", - "isExclude": false -},{ - "name": "type", - "label": "Component", - "value": "ambari_server", - "isExclude": true -}] -``` - -###### Available fields in the `query` matrix param for Audit Logs -- `cluster` (label: Cluster) -- `reason` (label: Reason) -- `ws_status` (label: Status) -- `agent` (label: Agent) -- `sess` (label: Session) -- `ws_repo_id` (label: Repo Id) -- `type` (label: Type) -- `path` (label: Path) -- `ws_details` (label: Details) -- `ugi` (label: UGI) -- `host` (label: Host) -- `case_id` (label: Case Id) -- `action` (label: Action) -- `id` (label: Id) -- `logger_name` (label: Logger Name) -- `text` (label: Text) -- `logfile_line_number` (label: Logfile Line Number) -- `ws_base_url` (label: Base Url) -- `level` (label: Level) -- `resource` (label: Resource) -- `resType` (label: Res Type) -- `ip` (label: IP) -- `req_self_id` (label: Req Self Id) -- `repoType` (label: Repo Type) -- `ws_roles` (label: Roles) -- `bundle_id` (label: Bundle Id) -- `cliType` (label: Client Type) -- `reqContext` (label: Req Context) -- `ws_result_status` (label: Result Status) -- `proxyUsers` (label: Proxy Users) -- `logType` (label: Log Type) -- `access` (label: Access Type) -- `dst` (label: DST) -- `perm` (label: Perm) -- `event_count` (label: Event Count) -- `repo` (label: Repo) -- `ws_request_id` (label: Request Id) -- `reqUser` (label: User) -- `task_id` (label: Task Id) -- `ws_consecutive_failures` (label: Consecutive Failures) -- `ws_stack_version` (label: Stack Version) -- `result` (label: Result) -- `ws_version_number` (label: Version Number) -- `reqData` (label: Req Data) -- `file` (label: File) -- `ws_repositories` (label: Repositories) -- `log_message` (label: Log Message) -- `ws_stack` (label: Stack) -- `agentHost` (label: Agent Host) -- `authType` (label: Auth Type) -- `ws_version_note` (label: Version Note) -- `policy` (label: Policy) -- `cliIP` (label: Client Ip) -- `ws_os` (label: Os) -- `ws_display_name` (label: Display Name) -- `ws_repo_version` (label: Repo Version) -- `evtTime` (label: Event Time) -- `req_caller_id` (label: Req Caller Id) -- `enforcer` (label: Access Enforcer) -- `ws_component` (label: Component) -- `ws_command` (label: Command) diff --git a/ambari-logsearch/ambari-logsearch-web/e2e/app.e2e-spec.ts b/ambari-logsearch/ambari-logsearch-web/e2e/app.e2e-spec.ts deleted file mode 100644 index ebfdcb97408..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/e2e/app.e2e-spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {AmbariLogsearchWebNewPage} from './app.po'; - -describe('ambari-logsearch-web-new App', () => { - let page: AmbariLogsearchWebNewPage; - - beforeEach(() => { - page = new AmbariLogsearchWebNewPage(); - }); - - it('should display title', () => { - page.navigateTo(); - expect(page.getParagraphText()).toEqual('Ambari Log Search'); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/e2e/app.po.ts b/ambari-logsearch/ambari-logsearch-web/e2e/app.po.ts deleted file mode 100644 index c446bfb03e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/e2e/app.po.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { browser, element, by } from 'protractor'; - -export class AmbariLogsearchWebNewPage { - navigateTo() { - return browser.get('/'); - } - - getParagraphText() { - return element(by.css('app-root h1')).getText(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/e2e/tsconfig.e2e.json b/ambari-logsearch/ambari-logsearch-web/e2e/tsconfig.e2e.json deleted file mode 100644 index ac7a3732579..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/e2e/tsconfig.e2e.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "../out-tsc/e2e", - "module": "commonjs", - "target": "es5", - "types":[ - "jasmine", - "node" - ] - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/karma.conf.js b/ambari-logsearch/ambari-logsearch-web/karma.conf.js deleted file mode 100644 index 08608d863bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/karma.conf.js +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Karma configuration file, see link for more information -// https://karma-runner.github.io/0.13/config/configuration-file.html - -module.exports = function (config) { - config.set({ - basePath: '', - frameworks: ['jasmine', '@angular/cli'], - plugins: [ - require('karma-jasmine'), - require('karma-phantomjs-launcher'), - require('karma-jasmine-html-reporter'), - require('karma-coverage-istanbul-reporter'), - require('@angular/cli/plugins/karma') - ], - client:{ - clearContext: false // leave Jasmine Spec Runner output visible in browser - }, - files: [ - { pattern: './src/test.ts', watched: false } - ], - preprocessors: { - './src/test.ts': ['@angular/cli'] - }, - mime: { - 'text/x-typescript': ['ts','tsx'] - }, - coverageIstanbulReporter: { - reports: ['html', 'lcovonly'], - fixWebpackSourcePaths: true - }, - angularCli: { - environment: 'dev' - }, - reporters: config.angularCli && config.angularCli.codeCoverage - ? ['progress', 'coverage-istanbul'] - : ['progress', 'kjhtml'], - port: 9876, - colors: true, - logLevel: config.LOG_INFO, - autoWatch: false, - browsers: ['PhantomJS'], - singleRun: true - }); -}; diff --git a/ambari-logsearch/ambari-logsearch-web/package.json b/ambari-logsearch/ambari-logsearch-web/package.json deleted file mode 100644 index 3639b54239f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/package.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "name": "ambari-logsearch-web", - "version": "3.0.0", - "license": "Apache-2.0", - "scripts": { - "ng": "ng", - "start": "webpack-dev-server --port=4200", - "build": "webpack", - "build-prod": "NODE_ENV=production webpack -p", - "test": "karma start ./karma.conf.js", - "lint": "ng lint", - "e2e": "protractor ./protractor.conf.js", - "pree2e": "webdriver-manager update --standalone false --gecko false --quiet" - }, - "dependencies": { - "@angular/animations": "^4.0.0", - "@angular/common": "^4.0.0", - "@angular/compiler": "^4.0.0", - "@angular/core": "^4.0.0", - "@angular/forms": "^4.0.0", - "@angular/http": "^4.0.0", - "@angular/platform-browser": "^4.0.0", - "@angular/platform-browser-dynamic": "^4.0.0", - "@angular/router": "^4.0.0", - "@ngrx/core": "^1.2.0", - "@ngrx/store": "^2.2.3", - "@ngrx/store-devtools": "3.2.4", - "@ngx-translate/core": "^6.0.1", - "@ngx-translate/http-loader": "^0.0.3", - "angular-moment-timezone": "^0.2.1", - "angular-pipes": "^6.5.3", - "angular2-moment": "^1.4.0", - "angular2-notifications": "0.4.46", - "bootstrap": "^3.3.7", - "core-js": "^2.4.1", - "d3": "^4.10.0", - "d3-scale-chromatic": "^1.1.1", - "font-awesome": "^4.7.0", - "jquery": "^1.12.4", - "moment": "^2.18.1", - "moment-timezone": "^0.5.13", - "ngx-bootstrap": "^2.0.5", - "rxjs": "^5.4.3", - "zone.js": "^0.8.4" - }, - "devDependencies": { - "@angular/cli": "^1.4.3", - "@angular/compiler-cli": "^4.0.0", - "@ngtools/webpack": "^1.7.1", - "@types/d3": "^4.10.0", - "@types/d3-scale-chromatic": "^1.1.0", - "@types/jasmine": "2.5.38", - "@types/jquery": "^1.10.33", - "@types/moment": "^2.13.0", - "@types/moment-timezone": "^0.2.34", - "@types/node": "~6.0.60", - "angular-in-memory-web-api": "^0.3.1", - "autoprefixer": "^6.5.3", - "circular-dependency-plugin": "^3.0.0", - "codelyzer": "~2.0.0", - "copy-webpack-plugin": "^4.0.1", - "css-loader": "^0.28.1", - "cssnano": "^3.10.0", - "exports-loader": "^0.6.3", - "file-loader": "^0.10.0", - "html-webpack-plugin": "^2.30.1", - "istanbul-instrumenter-loader": "^2.0.0", - "jasmine-core": "~2.5.2", - "jasmine-spec-reporter": "~3.2.0", - "karma": "~1.4.1", - "karma-cli": "~1.0.1", - "karma-coverage-istanbul-reporter": "^0.2.0", - "karma-jasmine": "~1.1.0", - "karma-jasmine-html-reporter": "^0.2.2", - "karma-phantomjs-launcher": "^1.0.4", - "less-loader": "^4.0.5", - "postcss-loader": "^1.3.3", - "postcss-url": "^5.1.2", - "protractor": "~5.1.0", - "randomatic": "^3.0.0", - "raw-loader": "^0.5.1", - "sass-loader": "^6.0.3", - "source-map-loader": "^0.2.0", - "style-loader": "^0.13.1", - "stylus-loader": "^3.0.1", - "ts-node": "~2.0.0", - "tslint": "~4.5.0", - "typescript": "~2.5.0", - "url-loader": "^0.5.7", - "webpack": "~3.6.0", - "webpack-concat-plugin": "1.4.0", - "webpack-dev-server": "~2.9.0" - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/pom.xml b/ambari-logsearch/ambari-logsearch-web/pom.xml deleted file mode 100644 index 953f0dfa74a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/pom.xml +++ /dev/null @@ -1,177 +0,0 @@ - - - - - ambari-logsearch - org.apache.ambari - 2.0.0.0-SNAPSHOT - - 4.0.0 - ambari-logsearch-web - Ambari Logsearch Web - 2.0.0.0-SNAPSHOT - org.apache.ambari - Ambari Logsearch Web - - - /tmp/logsearch_npm_config_tmp - v8.6.0 - v1.1.0 - - - - - - com.github.eirslett - frontend-maven-plugin - 1.6 - - ${node.version} - ${yarn.version} - ${project.build.directory}/webapp-build - false - - - ${logsearch.npm.config.tmp} - - - - - install node and yarn - generate-resources - - install-node-and-yarn - - - - yarn install - generate-resources - - yarn - - - install --ignore-engines --pure-lockfile - - - - webpack build - - webpack - - - generate-resources - - - production - - - -p - - - - run tests - test - - yarn - - - test - ${skipTests} - - - - - - maven-resources-plugin - 2.6 - - - process-webapp-sources - process-sources - - copy-resources - - - ${basedir}/target/webapp-build - - - ${basedir} - - e2e/** - src/** - .* - package.json - protactor.conf.js - yarn.lock - karma.conf.js - tsconfig.json - tslint.json - webpack.config.js - - - - - - - copy-resources - process-resources - - copy-resources - - - ${basedir}/target/classes/dist - - - ${basedir}/target/webapp-build/dist - false - - - - - - - - org.apache.rat - apache-rat-plugin - - - README.md - *.editorconfig - src/vendor/** - yarn.lock - node_modules/** - dist/** - **/*.json - **/*.log - **/*.txt - - - - - test - - check - - - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/protractor.conf.js b/ambari-logsearch/ambari-logsearch-web/protractor.conf.js deleted file mode 100644 index 6b4a31cf1b7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/protractor.conf.js +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Protractor configuration file, see link for more information -// https://github.com/angular/protractor/blob/master/lib/config.ts - -const { SpecReporter } = require('jasmine-spec-reporter'); - -exports.config = { - allScriptsTimeout: 11000, - specs: [ - './e2e/**/*.e2e-spec.ts' - ], - capabilities: { - 'browserName': 'chrome' - }, - directConnect: true, - baseUrl: 'http://localhost:4200/', - framework: 'jasmine', - jasmineNodeOpts: { - showColors: true, - defaultTimeoutInterval: 30000, - print: function() {} - }, - beforeLaunch: function() { - require('ts-node').register({ - project: 'e2e/tsconfig.e2e.json' - }); - }, - onPrepare() { - jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } })); - } -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/app-routing.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/app-routing.module.ts deleted file mode 100644 index a55e51a81f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/app-routing.module.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NgModule} from '@angular/core'; -import {RouterModule, Routes} from '@angular/router'; -import {LogsContainerComponent} from '@app/components/logs-container/logs-container.component'; -import {LoginFormComponent} from '@app/components/login-form/login-form.component'; -import {AuthGuardService} from '@app/services/auth-guard.service'; -import {TabGuard} from '@app/services/tab.guard'; -import {LogsBreadcrumbsResolverService} from '@app/services/logs-breadcrumbs-resolver.service'; -import {LoginScreenGuardService} from '@app/services/login-screen-guard.service'; - -const appRoutes: Routes = [{ - path: 'login', - component: LoginFormComponent, - data: { - breadcrumbs: 'login.title' - }, - canActivate: [LoginScreenGuardService] - }, { - path: 'logs/:activeTab', - component: LogsContainerComponent, - data: { - breadcrumbs: 'logs.title', - multiClusterFilter: true, - clusterParamKey: 'clusters' - }, - resolve: { - breadcrumbs: LogsBreadcrumbsResolverService - }, - canActivate: [AuthGuardService, TabGuard] - }, { - path: 'logs', - redirectTo: '/logs/serviceLogs', - pathMatch: 'full' - }, { - path: '', - redirectTo: '/logs/serviceLogs', - pathMatch: 'full' - }, { - path: '**', - redirectTo: '/logs/serviceLogs' - } -]; - -@NgModule({ - imports: [ - RouterModule.forRoot( - appRoutes, - { enableTracing: false, useHash: true } - ) - ], - exports: [ - RouterModule - ] -}) -export class AppRoutingModule {} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts deleted file mode 100644 index b72980ed9e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts +++ /dev/null @@ -1,234 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {BrowserModule} from '@angular/platform-browser'; -import {NgModule, CUSTOM_ELEMENTS_SCHEMA, Injector} from '@angular/core'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import {HttpModule, Http, XHRBackend, BrowserXhr, ResponseOptions, XSRFStrategy} from '@angular/http'; -import {InMemoryBackendService} from 'angular-in-memory-web-api'; -import {TypeaheadModule, TooltipModule} from 'ngx-bootstrap'; -import {TranslateModule, TranslateLoader} from '@ngx-translate/core'; -import {StoreModule} from '@ngrx/store'; -import {StoreDevtoolsModule} from '@ngrx/store-devtools'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {NgStringPipesModule} from 'angular-pipes'; -import {SimpleNotificationsModule} from 'angular2-notifications'; - -import {environment} from '@envs/environment'; - -import {SharedModule} from '@modules/shared/shared.module'; -import {AppLoadModule} from '@modules/app-load/app-load.module'; -import {ShipperModule} from '@modules/shipper/shipper.module'; - -import {ServiceInjector} from '@app/classes/service-injector'; - -import {HttpClientService} from '@app/services/http-client.service'; -import {UtilsService} from '@app/services/utils.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {UserSettingsService} from '@app/services/user-settings.service'; - -import {AppSettingsService} from '@app/services/storage/app-settings.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {AuditLogsService} from '@app/services/storage/audit-logs.service'; -import {AuditLogsGraphDataService} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService} from '@app/services/storage/service-logs.service'; -import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {GraphsService} from '@app/services/storage/graphs.service'; -import {HostsService} from '@app/services/storage/hosts.service'; -import {UserConfigsService} from '@app/services/storage/user-configs.service'; -import {ClustersService} from '@app/services/storage/clusters.service'; -import {ComponentsService} from '@app/services/storage/components.service'; -import {ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service'; -import {AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {AuthService} from '@app/services/auth.service'; -import {HistoryManagerService} from '@app/services/history-manager.service'; -import {reducer} from '@app/services/storage/reducers.service'; - -import {AppComponent} from '@app/components/app.component'; -import {LoginFormComponent} from '@app/components/login-form/login-form.component'; -import {TopMenuComponent} from '@app/components/top-menu/top-menu.component'; -import {MenuButtonComponent} from '@app/components/menu-button/menu-button.component'; -import {MainContainerComponent} from '@app/components/main-container/main-container.component'; -import {FiltersPanelComponent} from '@app/components/filters-panel/filters-panel.component'; -import {FilterButtonComponent} from '@app/components/filter-button/filter-button.component'; -import {AccordionPanelComponent} from '@app/components/accordion-panel/accordion-panel.component'; -import {CollapsiblePanelComponent} from '@app/components/collapsible-panel/collapsible-panel.component'; -import {LogMessageComponent} from '@app/components/log-message/log-message.component'; -import {LogLevelComponent} from '@app/components/log-level/log-level.component'; -import {PaginationComponent} from '@app/components/pagination/pagination.component'; -import {PaginationControlsComponent} from '@app/components/pagination-controls/pagination-controls.component'; -import {TimeHistogramComponent} from '@app/components/time-histogram/time-histogram.component'; -import {LogsContainerComponent} from '@app/components/logs-container/logs-container.component'; -import {ActionMenuComponent} from '@app/components/action-menu/action-menu.component'; -import {TimeZonePickerComponent} from '@app/components/timezone-picker/timezone-picker.component'; -import {NodeBarComponent} from '@app/components/node-bar/node-bar.component'; -import {SearchBoxComponent} from '@app/components/search-box/search-box.component'; -import {TimeRangePickerComponent} from '@app/components/time-range-picker/time-range-picker.component'; -import {DatePickerComponent} from '@app/components/date-picker/date-picker.component'; -import {LogContextComponent} from '@app/components/log-context/log-context.component'; -import {LogFileEntryComponent} from '@app/components/log-file-entry/log-file-entry.component'; -import {TabsComponent} from '@app/components/tabs/tabs.component'; -import {ServiceLogsTableComponent} from '@app/components/service-logs-table/service-logs-table.component'; -import {AuditLogsTableComponent} from '@app/components/audit-logs-table/audit-logs-table.component'; -import {AuditLogsEntriesComponent} from '@app/components/audit-logs-entries/audit-logs-entries.component'; -import {GraphLegendComponent} from '@app/components/graph-legend/graph-legend.component'; -import {HorizontalHistogramComponent} from '@app/components/horizontal-histogram/horizontal-histogram.component'; -import {GraphTooltipComponent} from '@app/components/graph-tooltip/graph-tooltip.component'; -import {GraphLegendItemComponent} from '@app/components/graph-legend-item/graph-legend-item.component'; -import {TimeLineGraphComponent} from '@app/components/time-line-graph/time-line-graph.component'; -import {ContextMenuComponent} from '@app/components/context-menu/context-menu.component'; -import {HistoryItemControlsComponent} from '@app/components/history-item-controls/history-item-controls.component'; -import {LogIndexFilterComponent} from '@app/components/log-index-filter/log-index-filter.component'; - -import {TimeZoneAbbrPipe} from '@app/pipes/timezone-abbr.pipe'; -import {TimerSecondsPipe} from '@app/pipes/timer-seconds.pipe'; -import {ComponentLabelPipe} from '@app/pipes/component-label'; -import {AppRoutingModule} from '@app/app-routing.module'; -import {AuthGuardService} from '@app/services/auth-guard.service'; -import {BreadcrumbsComponent} from '@app/components/breadrumbs/breadcrumbs.component'; -import {ClusterFilterComponent } from '@app/components/cluster-filter/cluster-filter.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {TranslateService as AppTranslateService} from '@app/services/translate.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {TabGuard} from '@app/services/tab.guard'; -import {LogsBreadcrumbsResolverService} from '@app/services/logs-breadcrumbs-resolver.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {LoginScreenGuardService} from '@app/services/login-screen-guard.service'; - -@NgModule({ - declarations: [ - AppComponent, - LoginFormComponent, - TopMenuComponent, - MenuButtonComponent, - MainContainerComponent, - FiltersPanelComponent, - FilterButtonComponent, - AccordionPanelComponent, - CollapsiblePanelComponent, - LogLevelComponent, - LogMessageComponent, - PaginationComponent, - PaginationControlsComponent, - TimeHistogramComponent, - LogsContainerComponent, - ActionMenuComponent, - TimeZonePickerComponent, - NodeBarComponent, - SearchBoxComponent, - TimeRangePickerComponent, - DatePickerComponent, - LogContextComponent, - LogFileEntryComponent, - TabsComponent, - ServiceLogsTableComponent, - AuditLogsTableComponent, - AuditLogsEntriesComponent, - GraphLegendComponent, - HorizontalHistogramComponent, - GraphTooltipComponent, - GraphLegendItemComponent, - TimeLineGraphComponent, - ContextMenuComponent, - HistoryItemControlsComponent, - LogIndexFilterComponent, - TimeZoneAbbrPipe, - TimerSecondsPipe, - ComponentLabelPipe, - BreadcrumbsComponent, - ClusterFilterComponent - ], - imports: [ - BrowserModule, - AppLoadModule, - FormsModule, - ReactiveFormsModule, - HttpModule, - TypeaheadModule.forRoot(), - TooltipModule.forRoot(), - TranslateModule.forRoot({ - loader: { - provide: TranslateLoader, - useFactory: AppTranslateService.httpLoaderFactory, - deps: [Http] - } - }), - SimpleNotificationsModule, - MomentModule, - MomentTimezoneModule, - NgStringPipesModule, - - SharedModule, - ShipperModule, - - StoreModule.provideStore(reducer), - StoreDevtoolsModule.instrumentOnlyWithExtension({ - maxAge: 5 - }), - - AppRoutingModule - ], - providers: [ - HttpClientService, - UtilsService, - RoutingUtilsService, - LogsContainerService, - ComponentGeneratorService, - UserSettingsService, - AppSettingsService, - AppStateService, - AuditLogsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - GraphsService, - HostsService, - UserConfigsService, - ClustersService, - ComponentsService, - ServiceLogsFieldsService, - AuditLogsFieldsService, - TabsService, - TabGuard, - LogsBreadcrumbsResolverService, - AuthService, - AuthGuardService, - HistoryManagerService, - ClusterSelectionService, - LogsFilteringUtilsService, - LogsStateService, - LoginScreenGuardService - ], - bootstrap: [AppComponent], - entryComponents: [ - NodeBarComponent, - HistoryItemControlsComponent - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] -}) -export class AppModule { - constructor(private injector: Injector) { - ServiceInjector.injector = this.injector; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.ts deleted file mode 100644 index d3d7d9567a0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface ActiveServiceLogEntry { - id: string; - host_name: string; - component_name: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.less deleted file mode 100644 index f24652cee67..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.less +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../../modules/shared/variables'; - -:host { - display: block; - - /deep/ .axis { - .domain { - stroke: @base-font-color; - } - .tick { - cursor: default; - line { - display: none; - } - } - } - - /deep/ .value { - cursor: pointer; - rect { - transition: opacity 250ms; - opacity: .8; - &:hover { - opacity: 1; - } - } - } - - graph-legend { - font-size: 1rem; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.ts deleted file mode 100644 index af6a9db943e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/graph.component.ts +++ /dev/null @@ -1,476 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { - AfterViewInit, OnChanges, SimpleChanges, ViewChild, ElementRef, Input, Output, EventEmitter, OnInit, OnDestroy -} from '@angular/core'; -import * as d3 from 'd3'; -import * as d3sc from 'd3-scale-chromatic'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/observable/fromEvent'; -import 'rxjs/add/operator/debounceTime'; -import { -GraphPositionOptions, GraphMarginOptions, GraphTooltipInfo, LegendItem, GraphEventData, GraphEmittedEvent -} from '@app/classes/graph'; -import {HomogeneousObject} from '@app/classes/object'; -import {ServiceInjector} from '@app/classes/service-injector'; -import {UtilsService} from '@app/services/utils.service'; -import {Subscription} from 'rxjs/Subscription'; - -export class GraphComponent implements AfterViewInit, OnChanges, OnInit, OnDestroy { - - @Input() - data: HomogeneousObject> = {}; - - @Input() - svgId: string = 'graph-svg'; - - @Input() - margin: GraphMarginOptions = { - top: 5, - right: 50, - bottom: 30, - left: 50 - }; - - @Input() - width: number; - - @Input() - height = 150; - - @Input() - tickPadding = 10; - - @Input() - colors: HomogeneousObject = {}; - - @Input() - labels: HomogeneousObject = {}; - - @Input() - chartLabel: string; - - /** - * Indicates whether the graph represents dependency on time - * @type {boolean} - */ - @Input() - isTimeGraph = false; - - /** - * Indicates whether X axis direction is right to left - * @type {boolean} - */ - @Input() - reverseXRange = false; - - /** - * Indicates whether Y axis direction is top to bottom - * @type {boolean} - */ - @Input() - reverseYRange = false; - - /** - * Indicates whether X axis ticks with fractional values should be displayed on chart (if any) - * @type {boolean} - */ - @Input() - allowFractionalXTicks = true; - - /** - * Indicates whether Y axis ticks with fractional values should be displayed on chart (if any) - * @type {boolean} - */ - @Input() - allowFractionalYTicks = true; - - /** - * Indicated whether Y values equal to 0 should be skipped in tooltip - * @type {boolean} - */ - @Input() - skipZeroValuesInTooltip = true; - - /** - * Indicates whether X axis event should be emitted with formatted string values that are displayed - * (instead of raw values) - * @type {boolean} - */ - @Input() - emitFormattedXTick = false; - - /** - * Indicates whether Y axis event should be emitted with formatted string values that are displayed - * (instead of raw values) - * @type {boolean} - */ - @Input() - emitFormattedYTick = false; - - @Output() - xTickContextMenu: EventEmitter> = new EventEmitter(); - - @Output() - yTickContextMenu: EventEmitter> = new EventEmitter(); - - @ViewChild('graphContainer') - graphContainerRef: ElementRef; - - @ViewChild('tooltip', { - read: ElementRef - }) - tooltipRef: ElementRef; - - private readonly xAxisClassName = 'axis-x'; - - private readonly yAxisClassName = 'axis-y'; - - protected utils: UtilsService; - - protected graphContainer: HTMLElement; - - private tooltip: HTMLElement; - - protected host; - - protected svg; - - protected xScale; - - protected yScale; - - protected xAxis; - - protected yAxis; - - /** - * Ordered array of color strings for data representation - * @type {string[]} - */ - protected orderedColors: string[]; - - /** - * This property is to hold the data of the bar where the mouse is over. - */ - protected tooltipInfo: GraphTooltipInfo | {} = {}; - - /** - * This is the computed position of the tooltip relative to the @graphContainer which is the container of the graph. - * It is set when the mousemoving over the figures in the @handleRectMouseMove method. - */ - private tooltipPosition: GraphPositionOptions; - - /** - * This property indicates if the tooltip should be positioned on the left side of the cursor or not. - * It should be true when the tooltip is out from the window. - * @type {boolean} - */ - private tooltipOnTheLeft = false; - - protected subscriptions: Subscription[] = []; - - /** - * This will return the information about the used levels and the connected colors and labels. - * The goal is to provide an easy property to the template to display the legend of the levels. - * @returns {LegendItem[]} - */ - legendItems: LegendItem[]; - - constructor() { - this.utils = ServiceInjector.injector.get(UtilsService); - } - - ngOnInit() { - this.subscriptions.push( - Observable.fromEvent(window, 'resize').debounceTime(100).subscribe(this.onWindowResize) - ); - this.setLegendItems(); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - ngAfterViewInit() { - this.graphContainer = this.graphContainerRef.nativeElement; - this.tooltip = this.tooltipRef.nativeElement; - this.host = d3.select(this.graphContainer); - this.createGraph(); - } - - ngOnChanges(changes: SimpleChanges) { - const dataChange = changes.data; - if (dataChange && dataChange.currentValue && !this.utils.isEmptyObject(dataChange.currentValue) - && (!dataChange.previousValue || this.utils.isEmptyObject(dataChange.previousValue)) - && this.utils.isEmptyObject(this.labels)) { - this.setDefaultLabels(); - } - if (changes.labels || changes.colors) { - this.setLegendItems(); - } - this.createGraph(); - } - - onWindowResize = () => { - this.createGraph(); - } - - protected createGraph(): void { - if (this.host && !this.utils.isEmptyObject(this.labels)) { - this.setup(); - this.buildSVG(); - this.populate(); - } - } - - /** - * Method that sets default labels map object based on data if no custom one is specified - */ - protected setDefaultLabels() { - const data = this.data; - const keys = Object.keys(data); - const labels = keys.reduce((keysReduced: HomogeneousObject, dataKey: string): HomogeneousObject => { - const newKeys = Object.keys(data[dataKey]); - const newKeysObj = newKeys.reduce((subKeys: HomogeneousObject, key: string): HomogeneousObject => { - return Object.assign(subKeys, { - [key]: key - }); - }, {}); - return Object.assign(keysReduced, newKeysObj); - }, {}); - this.labels = labels; - this.setLegendItems(); - } - - protected setLegendItems(): void { - if (this.colors && this.labels) { - this.legendItems = Object.keys(this.labels).map((key: string) => Object.assign({}, { - label: this.labels[key], - color: this.colors[key] - })); - } - } - - protected setup(): void { - const margin = this.margin; - if (this.utils.isEmptyObject(this.colors)) { - // set default color scheme for different values if no custom colors specified - const keys = Object.keys(this.labels); - const keysCount = keys.length; - const specterLength = keysCount > 2 ? keysCount : 3; // length of minimal available spectral scheme is 3 - let colorsArray; - if (keysCount > 2) { - colorsArray = Array.from(d3sc.schemeSpectral[keysCount]); - } else { - const minimalColorScheme = Array.from(d3sc.schemeSpectral[specterLength]); - colorsArray = minimalColorScheme.slice(0, keysCount); - } - this.orderedColors = colorsArray; - this.colors = keys.reduce((currentObject: HomogeneousObject, currentKey: string, index: number) => { - return Object.assign(currentObject, { - [currentKey]: colorsArray[index] - }); - }, {}); - } else { - const keysWithColors = this.colors, - keys = Object.keys(keysWithColors); - this.orderedColors = keys.reduce((array: string[], key: string): string[] => [...array, keysWithColors[key]], []); - } - this.width = this.graphContainer.clientWidth - margin.left - margin.right; - const xScale = this.isTimeGraph ? d3.scaleTime() : d3.scaleLinear(); - const yScale = d3.scaleLinear(); - const xScaleWithRange = this.reverseXRange ? xScale.range([this.width, 0]) : xScale.range([0, this.width]); - const yScaleWithRange = this.reverseYRange ? yScale.range([0, this.height]) : yScale.range([this.height, 0]); - this.xScale = xScaleWithRange; - this.yScale = yScaleWithRange; - } - - protected buildSVG(): void { - const margin = this.margin; - this.host.html(''); - this.svg = this.host.append('svg').attr('id', this.svgId).attr('width', this.graphContainer.clientWidth) - .attr('height', this.height + margin.top + margin.bottom).append('g') - .attr('transform', `translate(${margin.left},${margin.top})`); - } - - protected populate(): void {} - - /** - * Set the domain values for the x scale regarding the given data. - * @param formattedData - */ - protected setXScaleDomain(formattedData?: any): void {} - - /** - * Set the domain for the y scale regarding the given data. - * @param formattedData - */ - protected setYScaleDomain(formattedData?: any): void {} - - /** - * It draws the svg representation of the x axis. The goal is to set the ticks here, add the axis to the svg element - * and set the position of the axis. - * @param {number} ticksCount - optional parameter which sets number of ticks explicitly - * @param {number} leftOffset - */ - protected drawXAxis(ticksCount?: number, leftOffset?: number): void { - const axis = d3.axisBottom(this.xScale).tickFormat(this.xAxisTickFormatter).tickPadding(this.tickPadding); - if (ticksCount) { - axis.ticks(ticksCount); - } - this.xAxis = axis; - this.svg.append('g').attr('class', `axis ${this.xAxisClassName}`) - .attr('transform', `translate(${leftOffset || 0}, ${this.height})`) - .call(this.xAxis); - if (this.xTickContextMenu.observers.length) { - this.svg.selectAll(`.${this.xAxisClassName} .tick`).on('contextmenu', (tickValue: any, index: number): void => { - const tick = this.emitFormattedXTick ? this.xAxisTickFormatter(tickValue, index) : tickValue, - nativeEvent = d3.event; - this.xTickContextMenu.emit({tick, nativeEvent}); - event.preventDefault(); - }); - } - } - - /** - * It draws the svg representation of the y axis. The goal is to set the ticks here, add the axis to the svg element - * and set the position of the axis. - * @param {number} ticksCount - optional parameter which sets number of ticks explicitly - */ - protected drawYAxis(ticksCount?: number): void { - const axis = d3.axisLeft(this.yScale).tickFormat(this.yAxisTickFormatter).tickPadding(this.tickPadding); - if (ticksCount) { - axis.ticks(ticksCount); - } - this.yAxis = axis; - this.svg.append('g').attr('class', `axis ${this.yAxisClassName}`).call(this.yAxis); - if (this.yTickContextMenu.observers.length) { - this.svg.selectAll(`.${this.yAxisClassName} .tick`).on('contextmenu', (tickValue: any, index: number): void => { - const tick = this.emitFormattedYTick ? this.yAxisTickFormatter(tickValue, index) : tickValue, - nativeEvent = d3.event; - this.yTickContextMenu.emit({tick, nativeEvent}); - event.preventDefault(); - }); - } - }; - - /** - * Function that formats the labels for X axis ticks. - * Returns simple toString() conversion as default, can be overridden in ancestors. - * undefined value is returned for ticks to be skipped. - * @param tick - * @param {number} index - * @returns {string|undefined} - */ - protected xAxisTickFormatter = (tick: any, index: number): string | undefined => { - if (this.allowFractionalXTicks) { - return tick.toString(); - } else { - return Number.isInteger(tick) ? tick.toFixed(0) : undefined; - } - } - - /** - * Function that formats the labels for Y axis ticks. - * Returns simple toString() conversion as default, can be overridden in ancestors. - * undefined value is returned for ticks to be skipped. - * @param tick - * @param {number} index - * @returns {string|undefined} - */ - protected yAxisTickFormatter = (tick: any, index: number): string | undefined => { - if (this.allowFractionalYTicks) { - return tick.toString(); - } else { - return Number.isInteger(tick) ? tick.toFixed(0) : undefined; - } - } - - /** - * The goal is to handle the mouse over event on the svg elements so that we can populate the tooltip info object - * and set the initial position of the tooltip. So we call the corresponding methods. - * @param {GraphEventData} d The data for the currently "selected" figure - * @param {number} index The index of the current element in the selection - * @param elements The selection of the elements - */ - protected handleMouseOver = (d: GraphEventData, index: number, elements: HTMLElement[]): void => { - this.setTooltipDataFromChartData(d); - this.setTooltipPosition(); - } - - /** - * The goal is to handle the movement of the mouse over the svg elements, so that we can set the position of - * the tooltip by calling the @setTooltipPosition method. - */ - protected handleMouseMove = (): void => { - this.setTooltipPosition(); - } - - /** - * The goal is to reset the tooltipInfo object so that the tooltip will be hidden. - */ - protected handleMouseOut = (): void => { - this.tooltipInfo = {}; - } - - /** - * The goal is set the tooltip - * @param {GraphEventData} d - */ - protected setTooltipDataFromChartData(d: GraphEventData): void { - const {tick, ...data} = d.data, - levelColors = this.colors; - let tooltipKeys = Object.keys(levelColors); - if (this.skipZeroValuesInTooltip) { - tooltipKeys = tooltipKeys.filter((key: string): boolean => data[key] > 0) - } - this.tooltipInfo = { - data: tooltipKeys.map((key: string): object => Object.assign({}, { - color: this.colors[key], - label: this.labels[key], - value: data[key] - })), - title: tick - }; - } - - /** - * The goal of this function is to set the tooltip position regarding the d3.mouse event relative to the @graphContainer. - * Only if we have @tooltipInfo - */ - protected setTooltipPosition(): void { - if (this.tooltipInfo.hasOwnProperty('data')) { - const tooltip = this.tooltip, - relativeMousePosition = d3.mouse(this.graphContainer), - absoluteMousePosition = d3.mouse(document.body), - absoluteMouseLeft = absoluteMousePosition[0], - top = relativeMousePosition[1] - (tooltip.offsetHeight / 2), - tooltipWidth = tooltip.offsetWidth, - windowSize = window.innerWidth; - let left = relativeMousePosition[0]; - if (absoluteMouseLeft + tooltipWidth > windowSize) { - left = relativeMousePosition[0] - (tooltipWidth + 25); - } - this.tooltipOnTheLeft = left < relativeMousePosition[0]; - this.tooltipPosition = {left, top}; - } - }; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.less deleted file mode 100644 index bfafd40c2d3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.less +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../../modules/shared/mixins'; - -:host { - background: #FFF; // TODO add style according to actual design - - /deep/ svg { - cursor: crosshair; - } - - .chart-label, .time-gap { - color: @base-font-color; - font-size: 1.2rem; - text-align: center; - } - - footer { - .default-flex; - font-size: 1.2rem; - color: @base-font-color; - padding: 0 1em .5em; - } - - /deep/ rect.drag-area { - fill: #fff; - } - - /deep/ rect.unselected-drag-area { - fill: @graph-invert-selection-background; - opacity: .4; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.ts deleted file mode 100644 index 70412b20787..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/graph/time-graph.component.ts +++ /dev/null @@ -1,268 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {OnInit, Input, Output, EventEmitter} from '@angular/core'; -import * as d3 from 'd3'; -import * as moment from 'moment-timezone'; -import {AppSettingsService} from '@app/services/storage/app-settings.service'; -import {ChartTimeGap, GraphScaleItem} from '@app/classes/graph'; -import {ServiceInjector} from '@app/classes/service-injector'; -import {GraphComponent} from '@app/classes/components/graph/graph.component'; - -export class TimeGraphComponent extends GraphComponent implements OnInit { - - @Input() - tickTimeFormat: string = 'MM/DD HH:mm'; - - @Input() - historyStartEndTimeFormat: string = 'dddd, MMMM DD, YYYY'; - - @Input() - defaultChartTimeGap: ChartTimeGap = { - value: 1, - unit: 'h', - label: 'filter.timeRange.1hr' - }; - - @Output() - selectArea: EventEmitter = new EventEmitter(); - - readonly isTimeGraph: boolean = true; - - readonly allowFractionalXTicks: boolean = false; - - protected appSettings: AppSettingsService; - - protected dragArea: d3.Selection; - - protected dragStartX: number; - - protected minDragX: number; - - protected maxDragX: number; - - protected timeZone: string; - - /** - * This property holds the data structure describing the gaps between the xAxis ticks. - * The unit property can be: second, minute, hour, day - * The value is the number of the given unit. - */ - protected chartTimeGap: ChartTimeGap | null; - /** - * This is the rectangle element to represent the unselected time range on the left side of the selected time range - */ - protected leftDragArea: d3.Selection; - /** - * This is the rectangle element to represent the unselected time range on the right side of the selected time range - */ - protected rightDragArea: d3.Selection; - - constructor() { - super(); - this.appSettings = ServiceInjector.injector.get(AppSettingsService); - } - - ngOnInit() { - this.subscriptions.push( - this.appSettings.getParameter('timeZone').subscribe((value: string): void => { - this.timeZone = value; - this.createGraph(); - }) - ); - super.ngOnInit(); - } - - /** - * This is a Date object holding the value of the first tick of the xAxis. It is a helper getter for the template. - */ - protected get firstDateTick(): Date | undefined { - const ticks = this.xScale && this.xScale.ticks(); - return (ticks && ticks.length && ticks[0]) || undefined; - } - - /** - * This is a Date object holding the value of the last tick of the xAxis. It is a helper getter for the template. - */ - protected get lastDateTick(): Date | undefined { - const ticks = this.xScale && this.xScale.ticks(); - return (ticks && ticks.length && ticks[ticks.length - 1]) || undefined; - } - - protected xAxisTickFormatter = (tick: Date): string => { - return moment(tick).tz(this.timeZone).format(this.tickTimeFormat); - } - - protected setXScaleDomain(data: GraphScaleItem[]): void { - this.xScale.domain(d3.extent(data, item => item.tick)).nice().domain(); - } - - /** - * The goal is to calculate the time gap between the given dates. It will return an object representing the unit and - * the value in the given unit. Eg.: {unit: 'minute', value: 5} - * @param {Date} startDate - * @param {Date} endDate - * @returns {ChartTimeGap} - */ - protected getTimeGap(startDate: Date, endDate: Date): ChartTimeGap { - const startDateMoment = moment(startDate); - const endDateMoment = moment(endDate); - const diffInWeek: number = endDateMoment.diff(startDateMoment, 'weeks'); - const diffInDay: number = endDateMoment.diff(startDateMoment, 'days'); - const diffInHour: number = endDateMoment.diff(startDateMoment, 'hours'); - const diffInMin: number = endDateMoment.diff(startDateMoment, 'minutes'); - const diffInSec: number = endDateMoment.diff(startDateMoment, 'seconds'); - const value = diffInWeek >= 1 ? diffInWeek : ( - diffInDay >= 1 ? diffInDay : ( - diffInHour >= 1 ? diffInHour : (diffInMin >= 1 ? diffInMin : diffInSec) - ) - ); - const unit: string = diffInWeek >= 1 ? 'week' : ( - diffInDay >= 1 ? `day` : ( - diffInHour >= 1 ? `hour` : (diffInMin >= 1 ? `minute` : `second`) - ) - ); - const label = `histogram.gap.${unit}${value > 1 ? 's' : ''}`; - return { - value, - unit, - label - }; - } - - /** - * The goal is to have a simple function to set the time gap corresponding to the xScale ticks. - * It will reset the time gap if the xScale is not set or there are no ticks. - */ - protected setChartTimeGapByXScale(): void { - const ticks = this.xScale && this.xScale.ticks(); - if (ticks && ticks.length) { - this.setChartTimeGap(ticks[0], ticks[1] || ticks[0]); - } else { - this.resetChartTimeGap(); - } - } - - /** - * Simply reset the time gap property to null. - */ - protected resetChartTimeGap(): void { - this.chartTimeGap = this.defaultChartTimeGap; - } - - /** - * The goal is to have a single point where we set the chartTimeGap property corresponding the given timerange. - * @param {Date} startDate - * @param {Date} endDate - */ - protected setChartTimeGap(startDate: Date, endDate: Date): void { - const gap: ChartTimeGap = this.getTimeGap(startDate, endDate); - if (gap.value > 0) { - this.chartTimeGap = gap; - } - } - - protected getTimeRangeByXRanges(startX: number, endX: number): [number, number] { - const xScaleInterval = this.xScale.domain().map((point: Date): number => point.valueOf()); - const xScaleLength = xScaleInterval[1] - xScaleInterval[0]; - const ratio = xScaleLength / this.width; - return [Math.round(xScaleInterval[0] + ratio * startX), Math.round(xScaleInterval[0] + ratio * endX)]; - } - - /** - * The goal is to create the two shadow rectangle beside the selected area. Actually we blurout the not selected - * timeranges - * @param {number} startX This is the starting position of the drag event withing the container - * @param {number} currentX This is the ending point of the drag within the container - */ - protected createInvertDragArea(startX: number, currentX: number): void { - const height: number = this.height + this.margin.top + this.margin.bottom; - this.leftDragArea = this.svg.insert('rect').attr('height', height).attr('class', 'unselected-drag-area'); - this.rightDragArea = this.svg.insert('rect').attr('height', height).attr('class', 'unselected-drag-area'); - this.setInvertDragArea(startX, currentX); - } - - /** - * Set the position and the width of the blur/shadow rectangles of the unselected area(s). - * @param {number} startX The start point of the selected area. - * @param {number} currentX The end point of the selected area. - */ - protected setInvertDragArea(startX: number, currentX: number): void { - const left: number = Math.min(startX, currentX); - const right: number = Math.max(startX, currentX); - const rightAreaWidth: number = Math.max(0, this.width - right); - const leftAreaWidth: number = Math.max(0, left); - this.leftDragArea.attr('x', 0).attr('width', leftAreaWidth); - this.rightDragArea.attr('x', right).attr('width', rightAreaWidth); - } - - /** - * The goal is to have a single point where we remove the rectangles of the blur/shadow, unselected time range(s) - */ - protected clearInvertDragArea(): void { - this.leftDragArea.remove(); - this.rightDragArea.remove(); - } - - protected setDragBehavior(): void { - this.minDragX = this.margin.left; - this.maxDragX = this.graphContainer.clientWidth; - d3.selectAll(`svg#${this.svgId}`).call(d3.drag() - .on('start', (datum: undefined, index: number, containers: d3.ContainerElement[]): void => { - if (this.dragArea) { - this.dragArea.remove(); - } - this.dragStartX = Math.max(0, this.getDragX(containers[0]) - this.margin.left); - this.dragArea = this.svg.insert('rect', ':first-child').attr('x', this.dragStartX).attr('y', 0).attr('width', 0) - .attr('height', this.height).attr('class', 'drag-area'); - this.createInvertDragArea(this.dragStartX, this.dragStartX); - }) - .on('drag', (datum: undefined, index: number, containers: d3.ContainerElement[]): void => { - const mousePos = this.getDragX(containers[0]); - const currentX = Math.max(mousePos, this.minDragX) - this.margin.left; - const startX = Math.min(currentX, this.dragStartX); - const currentWidth = Math.abs(currentX - this.dragStartX); - this.dragArea.attr('x', startX).attr('width', currentWidth); - const timeRange = this.getTimeRangeByXRanges(startX, startX + currentWidth); - this.setChartTimeGap(new Date(timeRange[0]), new Date(timeRange[1])); - this.setInvertDragArea(startX, startX + currentWidth); - }) - .on('end', (): void => { - const dragAreaDetails = this.dragArea.node().getBBox(); - const startX = Math.max(0, dragAreaDetails.x); - const endX = Math.min(this.width, dragAreaDetails.x + dragAreaDetails.width); - if (endX !== startX) { - const dateRange: [number, number] = this.getTimeRangeByXRanges(startX, endX); - this.selectArea.emit(dateRange); - this.dragArea.remove(); - this.setChartTimeGap(new Date(dateRange[0]), new Date(dateRange[1])); - } - this.clearInvertDragArea(); - }) - ); - d3.selectAll(`svg#${this.svgId} .value, svg#${this.svgId} .axis`).call(d3.drag().on('start', (): void => { - d3.event.sourceEvent.stopPropagation(); - })); - } - - protected getDragX(element: d3.ContainerElement): number { - return d3.mouse(element)[0]; - } - - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.spec.ts deleted file mode 100644 index 05f80a7902d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.spec.ts +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogsTableComponent} from './logs-table-component'; - -describe('LogsTableComponent', () => { - let component; - - beforeEach(() => { - component = new LogsTableComponent(); - }); - - describe('#isColumnDisplayed()', () => { - const cases = [ - { - name: 'v1', - result: true, - title: 'column is displayed' - }, - { - name: 'l1', - result: false, - title: 'column is not displayed' - } - ]; - - beforeEach(() => { - component.displayedColumns = [ - { - label: 'l0', - value: 'v0' - }, - { - label: 'l1', - value: 'v1' - } - ]; - }); - - cases.forEach(test => { - it(test.title, () => { - expect(component.isColumnDisplayed(test.name)).toEqual(test.result); - }); - }); - }); -}); \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.ts deleted file mode 100644 index 0b8866ae627..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/components/logs-table/logs-table-component.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {OnChanges, SimpleChanges, Input} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {ListItem} from '@app/classes/list-item'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {AuditLog} from '@app/classes/models/audit-log'; - -export class LogsTableComponent implements OnChanges { - - ngOnChanges(changes: SimpleChanges) { - if (changes.hasOwnProperty('columns')) { - this.displayedColumns = this.columns.filter((column: ListItem): boolean => column.isChecked); - } - } - - @Input() - logs: ServiceLog[] | AuditLog[] = []; - - @Input() - columns: ListItem[] = []; - - @Input() - filtersForm: FormGroup; - - @Input() - totalCount: number = 0; - - displayedColumns: ListItem[] = []; - - isColumnDisplayed(key: string): boolean { - return this.displayedColumns.some((column: ListItem): boolean => column.value === key); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts deleted file mode 100644 index bb75786347b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Moment, unitOfTime} from 'moment'; -import {ListItem} from '@app/classes/list-item'; -import {TimeRangeType, SortingType} from '@app/classes/string'; - -export interface TimeUnit { - type: TimeRangeType; - unit: unitOfTime.DurationConstructor; - interval?: number; -} - -export interface CustomTimeRange { - type: 'CUSTOM'; - start?: Moment; - end?: Moment; -} - -export interface SortingConditions { - key: string; - type: SortingType; -} - -export interface TimeUnitListItem extends ListItem { - value: TimeUnit | CustomTimeRange; -} - -export interface SortingListItem extends ListItem { - value: SortingConditions; -} - -export interface FilterCondition { - label?: string; - options?: (ListItem | TimeUnitListItem[])[]; - defaultSelection?: ListItem | ListItem[] | number | boolean; - iconClass?: string; - fieldName?: string; -} - -export interface SearchBoxParameter { - name: string; - value: string; - isExclude: boolean; -} - -export interface SearchBoxParameterProcessed extends SearchBoxParameter { - id: number; - label: string; -} - -export interface SearchBoxParameterTriggered { - item: ListItem; - isExclude: boolean; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/graph.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/graph.ts deleted file mode 100644 index 9690dbcfec7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/graph.ts +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface GraphPositionOptions { - top: number; - left: number; -} - -export interface GraphMarginOptions extends GraphPositionOptions { - right: number; - bottom: number; -} - -export interface GraphTooltipInfo { - data: object[]; - title: string | number; -} - -export interface LegendItem { - label: string; - color: string; -} - -export interface GraphScaleItem { - tick: number; - [key: string]: number; -} - -export interface ChartTimeGap { - value: number; - unit: string; - label: string; -} - -export interface GraphEventData extends Array { - data: GraphScaleItem; -} - -export type GraphLinePoint = GraphScaleItem & { - color: string; -} - -export interface GraphLineData { - points: GraphScaleItem[]; - key: string; -} - -export interface GraphEmittedEvent { - tick: any; - nativeEvent: EventType; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.ts deleted file mode 100644 index 3a9b72c92d4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.ts +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface ListItem { - id?: string | number; - label?: string; - value: any; - iconClass?: string; - cssClass?: string; - isChecked?: boolean; - onSelect?: Function; - isDivider?: boolean; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-settings.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-settings.ts deleted file mode 100644 index 3ba5089263f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-settings.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as moment from 'moment-timezone'; -import {HomogeneousObject} from '@app/classes/object'; -import {Filter} from '@app/classes/models/filter'; - -export interface AppSettings { - timeZone: string; - logIndexFilters: HomogeneousObject>; -} - -export const defaultSettings: AppSettings = { - timeZone: moment.tz.guess(), - logIndexFilters: {} -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts deleted file mode 100644 index 2a4d4cc2116..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry'; -import {ListItem} from '@app/classes/list-item'; -import {DataAvailability, DataAvailabilityValues, LogsType} from '@app/classes/string'; - -export interface History { - items: ListItem[]; - currentId: number; -} - -export interface AppState { - isAuthorized: boolean; - isInitialLoading: boolean; - isLoginInProgress: boolean; - baseDataSetState: DataAvailability; - activeLogsType?: LogsType; - isServiceLogsFileView: boolean; - isServiceLogContextView: boolean; - activeLog: ActiveServiceLogEntry | null; - activeFilters: object; - history: History; -} - -export const initialState: AppState = { - isAuthorized: false, - isInitialLoading: false, - isLoginInProgress: false, - baseDataSetState: DataAvailabilityValues.NOT_AVAILABLE, - activeLogsType: 'serviceLogs', - isServiceLogsFileView: false, - isServiceLogContextView: false, - activeLog: null, - activeFilters: null, - history: { - items: [], - currentId: -1 - } -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/audit-log.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/audit-log.ts deleted file mode 100644 index 380f14f9eda..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/audit-log.ts +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Log} from '@app/classes/models/log'; - -export interface AuditLog extends Log { - policy?: string; - reason?: string; - result: number; - text?: string; - tags?: string[]; - resource?: string; - sess?: string; - access?: string; - logType: string; - tags_str?: string; - resType?: string; - reqUser: string; - reqData?: string; - repoType: number; - repo: string; - proxyUsers?: string[]; - evtTime: number; - enforcer: string; - reqContext?: string; - cliType?: string; - cliIP?: string; - agent?: string; - agentHost?: string; - action?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/bar-graph.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/bar-graph.ts deleted file mode 100644 index d872bd06848..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/bar-graph.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CommonEntry} from '@app/classes/models/common-entry'; - -export interface BarGraph { - dataCount: CommonEntry[]; - name: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/common-entry.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/common-entry.ts deleted file mode 100644 index dad82ab24bc..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/common-entry.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface CommonEntry { - name: string; - value: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/count.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/count.ts deleted file mode 100644 index 02fc41c2d83..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/count.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface Count { - name: string; - count: number; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/filter.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/filter.ts deleted file mode 100644 index b3e01aade4e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/filter.ts +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogLevel} from '@app/classes/string'; - -export interface Filter { - label: string; - hosts: string[]; - defaultLevels: LogLevel[]; - overrideLevels: LogLevel[]; - expiryTime: string | null; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/graph.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/graph.ts deleted file mode 100644 index be31f196fd4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/graph.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface Graph { - name: string; - count: string; - dataList?: Graph[]; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log-type-tab.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log-type-tab.ts deleted file mode 100644 index 6b626cf0d5b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log-type-tab.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {HomogeneousObject} from '@app/classes/object'; - -export interface LogTypeTab { - id: string; - isActive?: boolean; - isCloseable?: boolean; - label: string; - activeFilters?: object; - appState?: HomogeneousObject; -} - -export const initialTabs: LogTypeTab[] = [ - { - id: 'serviceLogs', - isActive: true, - label: 'common.serviceLogs', - activeFilters: null, - appState: { - activeLogsType: 'serviceLogs', - isServiceLogsFileView: false - } - }, - { - id: 'auditLogs', - isActive: false, - label: 'common.auditLogs', - activeFilters: null, - appState: { - activeLogsType: 'auditLogs', - isServiceLogsFileView: false - } - } -]; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log.ts deleted file mode 100644 index c598e4176cb..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/log.ts +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface Log { - type: string; - _version_: number; - id: string; - file?: string; - seq_num: number; - bundle_id?: string; - case_id?: string; - log_message: string; - logfile_line_number: number; - line_number?: number; - message_md5: string; - cluster: string; - event_count: number; - event_md5: string; - event_dur_ms: number; - _ttl_: string; - _expire_at_: number; - _router_field_?: number; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/logs-state.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/logs-state.ts deleted file mode 100644 index 6587c0d0594..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/logs-state.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface LogsState { - activeTabId: string; -} - -export const defaultState: LogsState = { - activeTabId: '' -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-group.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-group.ts deleted file mode 100644 index d461b234476..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-group.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * A simple interface for the component's groups (aka services) - */ -export interface NodeGroup { - name: string; - label?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-item.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-item.ts deleted file mode 100644 index ca10578a582..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/node-item.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CommonEntry} from '@app/classes/models/common-entry'; -import {NodeGroup} from "@app/classes/models/node-group"; - -export interface NodeItem { - name: string; - type?: string; - value: string; - label?: string; - group?: NodeGroup; - isParent: boolean; - isRoot: boolean; - childs?: NodeItem[]; - logLevelCount?: CommonEntry[]; - vNodeList?: CommonEntry[]; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/service-log.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/service-log.ts deleted file mode 100644 index 2ac026cf433..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/service-log.ts +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Log} from '@app/classes/models/log'; - -export interface ServiceLog extends Log { - path: string; - host: string; - level: string; - logtime: number; - ip: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/solr-collection-state.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/solr-collection-state.ts deleted file mode 100644 index 0824dda679f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/solr-collection-state.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface SolrCollectionState { - znodeReady: boolean; - configurationUploaded: boolean; - solrCollectionReady: boolean; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/store.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/store.ts deleted file mode 100644 index 9e34b14e7b3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/store.ts +++ /dev/null @@ -1,209 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {ReflectiveInjector} from '@angular/core'; -import {Observable} from 'rxjs/Observable'; -import {Store, Action} from '@ngrx/store'; -import {AppSettings} from '@app/classes/models/app-settings'; -import {AppState} from '@app/classes/models/app-state'; -import {AuditLog} from '@app/classes/models/audit-log'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {BarGraph} from '@app/classes/models/bar-graph'; -import {Graph} from '@app/classes/models/graph'; -import {NodeItem} from '@app/classes/models/node-item'; -import {UserConfig} from '@app/classes/models/user-config'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {LogField} from '@app/classes/object'; -import {UtilsService} from '@app/services/utils.service'; -import {NotificationInterface} from '@modules/shared/interfaces/notification.interface'; -import {LogsState} from '@app/classes/models/logs-state'; -import { DataAvaibilityStatesModel } from '@app/modules/app-load/models/data-availability-state.model'; - -const storeActions = { - 'ARRAY.ADD': 'ADD', - 'ARRAY.ADD.START': 'ADD_TO_START', - 'ARRAY.ADD.UNIQUE': 'ADD_UNIQUE', - 'ARRAY.DELETE.PRIMITIVE': 'DELETE_PRIMITIVE', - 'ARRAY.DELETE.OBJECT': 'DELETE_OBJECT', - 'ARRAY.CLEAR': 'CLEAR', - 'ARRAY.MAP': 'MAP', - - 'OBJECT.SET': 'SET' - }, - provider = ReflectiveInjector.resolve([UtilsService]), - injector = ReflectiveInjector.fromResolvedProviders(provider), - utils = injector.get(UtilsService); - -export interface AppStore { - appSettings: AppSettings; - appState: AppState; - auditLogs: AuditLog[]; - auditLogsGraphData: BarGraph[]; - serviceLogs: ServiceLog[]; - serviceLogsHistogramData: BarGraph[]; - serviceLogsTruncated: ServiceLog[]; - graphs: Graph[]; - hosts: NodeItem[]; - userConfigs: UserConfig[]; - clusters: string[]; - components: NodeItem[]; - serviceLogsFields: LogField[]; - auditLogsFields: LogField[]; - tabs: LogTypeTab[]; - notifications: NotificationInterface[]; - logsState: LogsState; - dataAvailabilityStates: DataAvaibilityStatesModel; -} - -export class ModelService { - - protected modelName: string; - - protected store: Store; - - constructor(modelName: string, store: Store) { - this.modelName = modelName; - this.store = store; - } - - getAll(): Observable { - return this.store.select(this.modelName); - } - -} - -export class CollectionModelService extends ModelService { - - addInstance(instance: any): void { - this.addInstances([instance]); - } - - addInstances(instances: any[]): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.ADD']}_${this.modelName}`, - payload: instances - }); - } - - addInstancesToStart(instances: any[]): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.ADD.START']}_${this.modelName}`, - payload: instances - }); - } - - addUniqueInstances(instances: any[]): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.ADD.UNIQUE']}_${this.modelName}`, - payload: instances - }); - } - - deleteObjectInstance(instance: any): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.DELETE.OBJECT']}_${this.modelName}`, - payload: instance - }); - } - - deletePrimitiveInstance(instance: any): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.DELETE.PRIMITIVE']}_${this.modelName}`, - payload: instance - }); - } - - clear(): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.CLEAR']}_${this.modelName}` - }); - } - - mapCollection(modifier: (item: any) => any): void { - this.store.dispatch({ - type: `${storeActions['ARRAY.MAP']}_${this.modelName}`, - payload: { - modifier: modifier - } - }); - } - - findInCollection(findFunction): Observable { - return this.getAll().map((result: any[]): any => result.find(findFunction)); - } - - filterCollection(filterFunction): Observable { - return this.getAll().map((result: any[]): any[] => result.filter(filterFunction)); - } - -} - -export class ObjectModelService extends ModelService { - - getParameter(key: string): Observable { - return this.store.select(this.modelName, key); - } - - setParameter(key: string, value: any): void { - this.setParameters({ - [key]: value - }); - } - - setParameters(params: any): void { - this.store.dispatch({ - type: `${storeActions['OBJECT.SET']}_${this.modelName}`, - payload: params - }); - } - -} - -export function getCollectionReducer(modelName: string, defaultState: any = []): any { - return (state: any = defaultState, action: Action) => { - switch (action.type) { - case `${storeActions['ARRAY.ADD']}_${modelName}`: - return [...state, ...action.payload]; - case `${storeActions['ARRAY.ADD.START']}_${modelName}`: - return [...action.payload, ...state]; - case `${storeActions['ARRAY.ADD.UNIQUE']}_${modelName}`: - return utils.pushUniqueValues(state.slice(), action.payload); - case `${storeActions['ARRAY.DELETE.OBJECT']}_${modelName}`: - return state.filter(instance => instance.id !== action.payload.id); - case `${storeActions['ARRAY.DELETE.PRIMITIVE']}_${modelName}`: - return state.filter(item => item !== action.payload); - case `${storeActions['ARRAY.CLEAR']}_${modelName}`: - return []; - case `${storeActions['ARRAY.MAP']}_${modelName}`: - return state.map(action.payload.modifier); - default: - return state; - } - }; -} - -export function getObjectReducer(modelName: string, defaultState: any = {}) { - return (state: any = defaultState, action: Action): any => { - switch (action.type) { - case `${storeActions['OBJECT.SET']}_${modelName}`: - return Object.assign({}, state, action.payload); - default: - return state; - } - }; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/user-config.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/user-config.ts deleted file mode 100644 index f52761c1712..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/user-config.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface UserConfig { - id: string; - userName: string; - filtername: string; - values: string; - shareNameList: string[]; - rowType: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/object.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/object.ts deleted file mode 100644 index 2cb39b19e69..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/object.ts +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogLevel} from '@app/classes/string'; - -export type HomogeneousObject = {[key: string]: T}; - -export interface LogLevelObject { - name: LogLevel; - label: string; - color: string; -} - -/** - * This is an interface for the service and audit log fields. - */ -export interface LogField { - group?: string; // eg.: HDFS, Ambari, etc this prop is only used in Audit logs - label: string; - name: string; - filterable: boolean; // it can be used in a filter query - visible: boolean; // visible by default in the log list -} - -/** - * This is an interface for the service and audit log fields. - */ -export interface AuditFieldsDefinitionSet { - defaults: LogField[], - overrides: { - [key: string]: LogField[] - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.spec.ts deleted file mode 100644 index 8951dff85e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.spec.ts +++ /dev/null @@ -1,203 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {AuditLogsGraphQueryParams} from './audit-logs-graph-query-params'; - -describe('AuditLogsGraphQueryParams', () => { - - describe('constructor', () => { - const cases = [ - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:00:00.100Z' - }, - unit: '+100MILLISECOND', - title: 'less than 1s' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:00:01Z' - }, - unit: '+100MILLISECOND', - title: '1s' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:00:20Z' - }, - unit: '+500MILLISECOND', - title: 'between 1s and 30s' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:00:20Z' - }, - unit: '+500MILLISECOND', - title: '30s' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:00:40Z' - }, - unit: '+2SECOND', - title: 'between 30s and 1m' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:01:00Z' - }, - unit: '+2SECOND', - title: '1m' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:20:00Z' - }, - unit: '+1MINUTE', - title: 'between 1m and 30m' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T00:30:00Z' - }, - unit: '+2MINUTE', - title: '30m' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T01:00:00Z' - }, - unit: '+2MINUTE', - title: 'between 30m and 2h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T02:00:00Z' - }, - unit: '+5MINUTE', - title: '2h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T04:00:00Z' - }, - unit: '+5MINUTE', - title: 'between 2h and 6h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T06:00:00Z' - }, - unit: '+10MINUTE', - title: '6h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T08:00:00Z' - }, - unit: '+10MINUTE', - title: 'between 6h and 10h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T10:00:00Z' - }, - unit: '+10MINUTE', - title: '10h' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-01T22:00:00Z' - }, - unit: '+1HOUR', - title: 'between 10h and 1d' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-02T00:00:00Z' - }, - unit: '+1HOUR', - title: '1d' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-10T00:00:00Z' - }, - unit: '+8HOUR', - title: 'between 1d and 15d' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-01-16T00:00:00Z' - }, - unit: '+1DAY', - title: '15d' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-03-31T00:00:00Z' - }, - unit: '+1DAY', - title: 'between 15d and 3M' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-04-01T00:00:00Z' - }, - unit: '+1DAY', - title: '3M' - }, - { - options: { - from: '2017-01-01T00:00:00Z', - to: '2017-05-01T00:00:00Z' - }, - unit: '+1MONTH', - title: 'over 3M' - } - ]; - - cases.forEach(test => { - it(test.title, () => { - const paramsObject = new AuditLogsGraphQueryParams(test.options); - expect(paramsObject.unit).toEqual(test.unit); - }); - }); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.ts deleted file mode 100644 index 7e63acfc8d3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-graph-query-params.ts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {QueryParams} from '@app/classes/queries/query-params'; - -export class AuditLogsGraphQueryParams extends QueryParams { - constructor(options: AuditLogsGraphQueryParams) { - let unit; - const diffTimeStamp = new Date(options.to).valueOf() - new Date(options.from).valueOf(); - switch (true) { - case diffTimeStamp <= 1000: - unit = '+100MILLISECOND'; - break; - case diffTimeStamp <= 30000: - unit = '+500MILLISECOND'; - break; - case diffTimeStamp <= 60000: - unit = '+2SECOND'; - break; - case diffTimeStamp < 1800000: - unit = '+1MINUTE'; - break; - case diffTimeStamp < 7200000: - unit = '+2MINUTE'; - break; - case diffTimeStamp < 21600000: - unit = '+5MINUTE'; - break; - case diffTimeStamp <= 36000000: - unit = '+10MINUTE'; - break; - case diffTimeStamp <= 86400000: - unit = '+1HOUR'; - break; - case diffTimeStamp < 1296000000: - unit = '+8HOUR'; - break; - case diffTimeStamp <= 7776000000: - unit = '+1DAY'; - break; - default: - unit = '+1MONTH'; - break; - } - options.unit = unit; - super(options); - } - - from: string; - to: string; - unit?: string; - includeQuery?: string; - excludeQuery?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts deleted file mode 100644 index e9c0149f38a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogsListQueryParams} from '@app/classes/queries/logs-query-params'; - -export class AuditLogsListQueryParams extends LogsListQueryParams { - userList?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-top-resources-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-top-resources-query-params.ts deleted file mode 100644 index d5e9cc5c979..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-top-resources-query-params.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogsQueryParams} from '@app/classes/queries/logs-query-params'; - -export class AuditLogsTopResourcesQueryParams extends LogsQueryParams { - field: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/logs-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/logs-query-params.ts deleted file mode 100644 index 9e7f5d01e66..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/logs-query-params.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {SortingType} from '@app/classes/string'; -import {QueryParams} from '@app/classes/queries/query-params'; - -const defaultListParams = { - page: '0', - pageSize: '10' -}; - -export class LogsQueryParams extends QueryParams { - clusters?: string; - mustBe?: string; - mustNot?: string; - includeQuery?: string; - excludeQuery?: string; - from?: string; - to?: string; -} - -export class LogsListQueryParams extends LogsQueryParams { - constructor(options: LogsListQueryParams) { - let finalParams = Object.assign({}, defaultListParams, options); - const page = parseInt(finalParams.page), - pageSize = parseInt(finalParams.pageSize); - finalParams.startIndex = isNaN(page) || isNaN(pageSize) ? '' : (page * pageSize).toString(); - super(finalParams); - } - page: string; - pageSize: string; - startIndex: string; - sortBy?: string; - sortType?: SortingType; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/query-params.ts deleted file mode 100644 index 83c326117c5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/query-params.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export class QueryParams { - constructor(options: QueryParams) { - Object.assign(this, options); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.ts deleted file mode 100644 index ff83a8bfb9b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {AuditLogsGraphQueryParams} from '@app/classes/queries/audit-logs-graph-query-params'; - -export class ServiceLogsHistogramQueryParams extends AuditLogsGraphQueryParams { - clusters?: string; - level?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-query-params.ts deleted file mode 100644 index ad8844908c0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-query-params.ts +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {LogsListQueryParams} from '@app/classes/queries/logs-query-params'; - -export class ServiceLogsQueryParams extends LogsListQueryParams { - level?: string; - file_name?: string; - bundle_id?: string; - hostList?: string; - find?: string; - sourceLogId?: string; - keywordType?: string; - token?: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts deleted file mode 100644 index 071a443abb0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {QueryParams} from '@app/classes/queries/query-params'; -import {ScrollType} from '@app/classes/string'; - -const defaultParams = { - numberRows: '10', - scrollType: '' -}; - -export class ServiceLogsTruncatedQueryParams extends QueryParams { - constructor(options: ServiceLogsTruncatedQueryParams) { - const finalParams = Object.assign({}, defaultParams, options); - super(finalParams); - } - id: string; - host_name: string; - component_name: string; - numberRows: string; - scrollType: ScrollType; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-injector.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-injector.ts deleted file mode 100644 index 6db65cdfff3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-injector.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injector} from '@angular/core'; - -export class ServiceInjector { - static injector: Injector; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.ts deleted file mode 100644 index 15c05fb23fb..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface ServiceLogContextEntry { - id: string; - time: number; - level: string; - message: string; - fileName: string | null; - lineNumber: number | null; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/settings.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/settings.ts deleted file mode 100644 index dcef45783f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/settings.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {HomogeneousObject} from '@app/classes/object'; - -export type LevelOverridesConfig = HomogeneousObject<{ - defaults: boolean; - overrides: boolean; -}> - -export type LogIndexFilterComponentConfig = LevelOverridesConfig & { - name: string; - label: string; - hosts: string; - expiryTime: string | null; - hasOverrides?: boolean; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts deleted file mode 100644 index db1311faf7d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export type LogsType = 'auditLogs' | 'serviceLogs'; - -export type TimeRangeType = 'CURRENT' | 'LAST' | 'PAST'; - -export type SortingType = 'asc' | 'desc'; - -export type ScrollType = 'before' | 'after' | ''; - -export type LogLevel = 'FATAL' | 'ERROR' | 'WARN' | 'INFO' | 'DEBUG' | 'TRACE' | 'UNKNOWN'; - -export type DataAvailability = 'NOT_AVAILABLE' | 'LOADING' | 'AVAILABLE' | 'ERROR'; -export enum DataAvailabilityValues { - NOT_AVAILABLE = 'NOT_AVAILABLE', - LOADING = 'LOADING', - AVAILABLE = 'AVAILABLE', - ERROR = 'ERROR' -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.html deleted file mode 100644 index 1b134c32be0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.html +++ /dev/null @@ -1,25 +0,0 @@ - - -
-
- -
- -
-
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.less deleted file mode 100644 index 05a7bb40b6e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.less +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -.panel-body { - position: relative; - - .accordion-toggle { - position: absolute; - - // TODO get rid of magic numbers, base on actual design - right: 10px; - top: 15px; - - .toggle-icon { - .clickable-item; - - &:before { - content: '\F077'; - } - - &.collapsed:before { - content: '\F078'; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.spec.ts deleted file mode 100644 index 3706d51c8e1..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.spec.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {AccordionPanelComponent} from './accordion-panel.component'; - -describe('AccordionPanelComponent', () => { - let component: AccordionPanelComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [AccordionPanelComponent] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(AccordionPanelComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.ts deleted file mode 100644 index 131edcda072..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/accordion-panel/accordion-panel.component.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input, ContentChild, TemplateRef} from '@angular/core'; - -@Component({ - selector: 'accordion-panel', - templateUrl: './accordion-panel.component.html', - styleUrls: ['./accordion-panel.component.less'] -}) -export class AccordionPanelComponent { - - @Input() - toggleId: string; - - @ContentChild(TemplateRef) - template; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.html deleted file mode 100644 index 89703166132..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.html +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - -
- -
-
-

{{'logIndexFilter.caption' | translate}}

-
- -
-
-
- - -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.less deleted file mode 100644 index a8c6e05d61f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.less +++ /dev/null @@ -1,120 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; - -:host { - display: block; - menu-button { - margin: 0 1em; - /deep/ .stop-icon { - color: @exclude-color; - } - &.history-menu { - /deep/ ul { - li:not(.selection-all) { - margin: 0; - overflow: hidden; - position: relative; - transition: background-color 300ms ease-in, opacity 300ms ease-in, height 100ms 400ms ease-in; - &:before { - border-left: 1px solid darken(@unknown-color, 25%); - bottom: 0; - content: ""; - display: block; - left: 12px; - position: absolute; - top: 0; - } - &:after { - background: #fff; - border: 1px solid darken(@unknown-color, 25%); - border-radius: 100%; - content: ""; - height: 12px; - left: 7px; - position: absolute; - top: 6px; - transition: background-color 300ms; - width: 12px; - } - - .list-item-label.label-container { - border-radius: 3px; - display: flex; - margin: 0 3px 0 25px; - padding: 3px 25px 3px 1em; - .item-label-text { - flex-grow: 1; - padding-right: 1em; - } - /deep/ history-item-controls { - float: none; - justify-self: right; - } - } - - &.active > a, &:hover { - color: #262626; - text-decoration: none; - background-color: transparent; - .list-item-label.label-container { - background-color: #f5f5f5; - } - } - } - li:not(.selection-all):first-child { - &:before { - top: 50%; - } - } - li:not(.selection-all):last-child { - &:before { - bottom: 50%; - } - } - li:not(.selection-all):hover { - &:after { - background: @unknown-color; - } - } - } - } - } - /deep/ .modal-body { - min-height: 25vh; - padding-top: 0; - } - - /deep/ modal-dialog.log-index-filter .modal-header { - padding-top: 0; - padding-bottom: 15px; - header { - float: right; - } - dropdown-button button { - padding-left: 0; - padding-right: 0; - } - } - /deep/ modal-dialog.log-index-filter .modal-header { - min-height: 4rem; - header { - margin-left: auto; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.spec.ts deleted file mode 100644 index 4d84bb7dee7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.spec.ts +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {HistoryManagerService} from '@app/services/history-manager.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UserSettingsService} from '@app/services/user-settings.service'; -import {UtilsService} from '@app/services/utils.service'; -import {ModalDialogComponent} from '@app/modules/shared/components/modal-dialog/modal-dialog.component'; -import {TimerSecondsPipe} from '@app/pipes/timer-seconds.pipe'; -import {ComponentLabelPipe} from '@app/pipes/component-label'; - -import {ActionMenuComponent} from './action-menu.component'; -import { LogIndexFilterComponent } from '@app/components/log-index-filter/log-index-filter.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -import { DataAvailabilityStatesStore, dataAvailabilityStates } from '@app/modules/app-load/stores/data-availability-state.store'; - -describe('ActionMenuComponent', () => { - let component: ActionMenuComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - FormsModule, - ReactiveFormsModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs, - dataAvailabilityStates - }) - ], - declarations: [ - LogIndexFilterComponent, - ActionMenuComponent, - ModalDialogComponent, - TimerSecondsPipe, - ComponentLabelPipe - ], - providers: [ - ...MockHttpRequestModules, - HistoryManagerService, - LogsContainerService, - UserSettingsService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService, - DataAvailabilityStatesStore - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ActionMenuComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.ts deleted file mode 100644 index a293e95e989..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/action-menu/action-menu.component.ts +++ /dev/null @@ -1,171 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Component, OnInit, OnDestroy } from '@angular/core'; -import { FormGroup } from '@angular/forms'; -import { ActivatedRoute, Router } from '@angular/router'; - -import { Observable } from 'rxjs/Observable'; -import { BehaviorSubject } from 'rxjs/BehaviorSubject'; -import { Subscription } from 'rxjs/Subscription'; - -import { LogsContainerService } from '@app/services/logs-container.service'; -import { HistoryManagerService } from '@app/services/history-manager.service'; -import { UserSettingsService } from '@app/services/user-settings.service'; -import { ListItem } from '@app/classes/list-item'; -import { ClustersService } from '@app/services/storage/clusters.service'; -import { UtilsService } from '@app/services/utils.service'; - -@Component({ - selector: 'action-menu', - templateUrl: './action-menu.component.html', - styleUrls: ['./action-menu.component.less'] -}) -export class ActionMenuComponent implements OnInit, OnDestroy { - - isLogIndexFilterDisplayed$: Observable = this.route.queryParams - .map((params) => { - return params; - }) - .map((params): boolean => /^(show|yes|true|1)$/.test(params.logIndexFilterSettings)) - .distinctUntilChanged(); - - settingsForm: FormGroup = this.settings.settingsFormGroup; - - isModalSubmitDisabled = true; - - clustersListItems$: Observable = this.clustersService.getAll() - .map((clusterNames: string[]): ListItem[] => clusterNames.map(this.utilsService.getListItemFromString)) - .map((clusters: ListItem[]) => { - if (clusters.length && !clusters.some((item: ListItem) => item.isChecked)) { - clusters[0].isChecked = true; - } - return clusters; - }); - - selectedClusterName$: BehaviorSubject = new BehaviorSubject(''); - - subscriptions: Subscription[] = []; - - constructor( - private logsContainer: LogsContainerService, - private historyManager: HistoryManagerService, - private settings: UserSettingsService, - private route: ActivatedRoute, - private router: Router, - private clustersService: ClustersService, - private utilsService: UtilsService - ) { - } - - ngOnInit() { - this.subscriptions.push( - this.selectedClusterName$.subscribe( - (clusterName: string) => this.setModalSubmitDisabled(!(!!clusterName)) - ) - ); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - get undoItems(): ListItem[] { - return this.historyManager.undoItems; - } - - get redoItems(): ListItem[] { - return this.historyManager.redoItems; - } - - get historyItems(): ListItem[] { - return this.historyManager.activeHistory; - } - - get captureSeconds(): number { - return this.logsContainer.captureSeconds; - } - - setModalSubmitDisabled(isDisabled: boolean): void { - this.isModalSubmitDisabled = isDisabled; - } - - undoLatest(): void { - if (this.undoItems.length) { - this.historyManager.undo(this.undoItems[0]); - } - } - - redoLatest(): void { - if (this.redoItems.length) { - this.historyManager.redo(this.redoItems[0]); - } - } - - undo(item: ListItem): void { - this.historyManager.undo(item); - } - - redo(item: ListItem): void { - this.historyManager.redo(item); - } - - refresh(): void { - this.logsContainer.loadLogs(); - } - - onSelectCluster(cluster: string) { - this.selectedClusterName$.next(cluster); - } - - openLogIndexFilter(): void { - this.router.navigate(['.'], { - queryParamsHandling: 'merge', - queryParams: {logIndexFilterSettings: 'show'}, - relativeTo: this.route.root.firstChild - }); - } - - closeLogIndexFilter(): void { - this.route.queryParams.first().subscribe((queryParams) => { - const {logIndexFilterSettings, ...params} = queryParams; - this.router.navigate(['.'], { - queryParams: params, - relativeTo: this.route.root.firstChild - }); - }); - } - - saveLogIndexFilter(): void { - this.closeLogIndexFilter(); - this.settings.saveIndexFilterConfig(); - } - - startCapture(): void { - this.logsContainer.startCaptureTimer(); - } - - stopCapture(): void { - this.logsContainer.stopCaptureTimer(); - } - - cancelCapture(): void { - this.logsContainer.cancelCapture(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.html deleted file mode 100644 index c9f83131c19..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.html +++ /dev/null @@ -1,27 +0,0 @@ - - -
- -
- - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.less deleted file mode 100644 index b9eb9077655..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.less +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../modules/shared/main'; -:host { - background-color: @main-background-color; // TODO implement actual color - display: flex; - flex-direction: column; - line-height: @default-line-height; - min-height: 100vh; - - .navbar { - align-items: center; - background-color: @navbar-background-color; - border-radius: 0; - color: #fff; - margin-bottom: 0; - .stretch-flex; - h1 { - align-items: center; - align-self: stretch; - background: @navbar-logo-background-color url('/resources/assets/images/ambari-logo.png') no-repeat; - background-position: .5em center; - background-size: 25px; - display: flex; - font-size: 20px; - margin: 0 2em 0 0; - padding: 0 2em; - &.full-flex-width { - flex-basis: 100%; - } - } - breadcrumbs { - font-size: 20px; - .stretch-flex; - } - - /deep/ top-menu { - margin-left: auto; - margin-right: 5em; - } - &:not(.authorized) { - h1 { - margin: 0; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts deleted file mode 100644 index 2e8d75a32ef..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {TestBed, async} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {TranslationModules} from '@app/test-config.spec'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {HttpClientService} from '@app/services/http-client.service'; - -import {AppComponent} from './app.component'; -import {BreadcrumbsComponent} from '@app/components/breadrumbs/breadcrumbs.component'; -import {RouterTestingModule} from '@angular/router/testing'; -import {TypeaheadModule} from 'ngx-bootstrap'; - -describe('AppComponent', () => { - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => {} - }; - } - }; - TestBed.configureTestingModule({ - declarations: [AppComponent, BreadcrumbsComponent], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appState - }), - ...TranslationModules, - TypeaheadModule.forRoot() - ], - providers: [ - AppStateService, - { - provide: HttpClientService, - useValue: httpClient - } - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }).compileComponents(); - })); - - it('should create the app', () => { - const fixture = TestBed.createComponent(AppComponent); - const app = fixture.debugElement.componentInstance; - expect(app).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.ts deleted file mode 100644 index 09a82c2028c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component} from '@angular/core'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {Observable} from 'rxjs/Observable'; -import {Options} from 'angular2-notifications/src/options.type'; -import {notificationIcons} from '@modules/shared/services/notification.service'; -import { DataAvailability, DataAvailabilityValues } from '@app/classes/string'; - -@Component({ - selector: 'app-root', - templateUrl: './app.component.html', - styleUrls: ['./app.component.less', '../modules/shared/notifications.less'] -}) -export class AppComponent { - - isAuthorized$: Observable = this.appState.getParameter('isAuthorized'); - isBaseDataAvailable$: Observable = this.appState.getParameter('baseDataSetState') - .map((dataSetState: DataAvailability) => dataSetState === DataAvailabilityValues.AVAILABLE); - - notificationServiceOptions: Options = { - timeOut: 2000, - showProgressBar: true, - pauseOnHover: true, - preventLastDuplicates: 'visible', - theClass: 'app-notification', - icons: notificationIcons, - position: ['top', 'left'] - }; - - constructor( - private appState: AppStateService - ) {} - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.html deleted file mode 100644 index 6ebb92e28e4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.html +++ /dev/null @@ -1,36 +0,0 @@ - - - - - -
- - - - - - - -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.spec.ts deleted file mode 100644 index 51d1fdae0a7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.spec.ts +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {TabsComponent} from '@app/components/tabs/tabs.component'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {AuditLogsEntriesComponent} from './audit-logs-entries.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('AuditLogsEntriesComponent', () => { - let component: AuditLogsEntriesComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - AuditLogsEntriesComponent, - TabsComponent - ], - imports: [ - RouterTestingModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AuditLogsGraphDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(AuditLogsEntriesComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.ts deleted file mode 100644 index c0ab63d9bd7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-entries/audit-logs-entries.component.ts +++ /dev/null @@ -1,133 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {GraphEmittedEvent} from '@app/classes/graph'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject} from '@app/classes/object'; -import {AuditLog} from '@app/classes/models/audit-log'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {LogsContainerService} from '@app/services/logs-container.service'; - -@Component({ - selector: 'audit-logs-entries', - templateUrl: './audit-logs-entries.component.html' -}) -export class AuditLogsEntriesComponent { - - constructor(private logsContainer: LogsContainerService) { - } - - @Input() - logs: AuditLog[] = []; - - @Input() - columns: ListItem[] = []; - - @Input() - filtersForm: FormGroup; - - @Input() - totalCount: number = 0; - - tabs: LogTypeTab[] = [ - { - id: 'summary', - isActive: true, - label: 'common.summary' - }, - { - id: 'logs', - isActive: false, - label: 'common.logs' - } - ]; - - /** - * Id of currently active tab (Summary or Logs) - * @type {string} - */ - activeTab: string = 'summary'; - - /** - * 'left' CSS property value for context menu dropdown - * @type {number} - */ - contextMenuLeft: number = 0; - - /** - * 'top' CSS property value for context menu dropdown - * @type {number} - */ - contextMenuTop: number = 0; - - readonly usersGraphTitleParams = { - number: this.logsContainer.topUsersCount - }; - - readonly resourcesGraphTitleParams = { - number: this.logsContainer.topResourcesCount - }; - - private readonly resourceFilterParameterName: string = 'resource'; - - /** - * Text for filtering be resource type (set from Y axis tick of Resources chart) - * @type {string} - */ - private selectedResource: string = ''; - - get topResourcesGraphData(): HomogeneousObject> { - return this.logsContainer.topResourcesGraphData; - } - - get topUsersGraphData(): HomogeneousObject> { - return this.logsContainer.topUsersGraphData; - } - - get isContextMenuDisplayed(): boolean { - return Boolean(this.selectedResource); - } - - get contextMenuItems(): ListItem[] { - return this.logsContainer.queryContextMenuItems; - } - - setActiveTab(tab: LogTypeTab): void { - this.activeTab = tab.id; - } - - showContextMenu(event: GraphEmittedEvent): void { - this.contextMenuLeft = event.nativeEvent.clientX; - this.contextMenuTop = event.nativeEvent.clientY; - this.selectedResource = event.tick; - } - - updateQuery(event: ListItem): void { - this.logsContainer.queryParameterAdd.next({ - name: this.resourceFilterParameterName, - value: this.selectedResource, - isExclude: event.value - }); - } - - onContextMenuDismiss(): void { - this.selectedResource = ''; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.html deleted file mode 100644 index f9707261a32..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.html +++ /dev/null @@ -1,55 +0,0 @@ - - - -
- -
-
-
- - - - - - - - - - - - - - - - - - - - - - -
{{getColumnByName('evtTime').label | translate}}{{column.label | translate}}
{{log.evtTime | amTz: timeZone | amDateFormat: timeFormat}}{{log[column.value]}}
- -
-
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.less deleted file mode 100644 index d9b0a104469..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.less +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -th { - text-transform: uppercase; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.spec.ts deleted file mode 100644 index f65180d1649..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.spec.ts +++ /dev/null @@ -1,164 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import {StoreModule} from '@ngrx/store'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; -import {PaginationComponent} from '@app/components/pagination/pagination.component'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; - -import {AuditLogsTableComponent} from './audit-logs-table.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('AuditLogsTableComponent', () => { - let component: AuditLogsTableComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - AuditLogsTableComponent, - PaginationComponent, - DropdownListComponent - ], - imports: [ - RouterTestingModule, - FormsModule, - ReactiveFormsModule, - MomentModule, - MomentTimezoneModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - appState, - appSettings, - tabs, - clusters, - components, - hosts - }) - ], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - AppStateService, - AppSettingsService, - TabsService, - ClustersService, - ComponentsService, - HostsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(AuditLogsTableComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#getColumnByName()', () => { - const cases = [ - { - name: 'v1', - result: { - label: 'l1', - value: 'v1' - }, - title: 'item is present' - }, - { - name: 'l1', - result: undefined, - title: 'item is absent' - } - ]; - - beforeEach(() => { - component.columns = [ - { - label: 'l0', - value: 'v0' - }, - { - label: 'l1', - value: 'v1' - } - ]; - }); - - cases.forEach(test => { - it(test.title, () => { - expect(component.getColumnByName(test.name)).toEqual(test.result); - }); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.ts deleted file mode 100644 index fa5b1c55a3f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/audit-logs-table/audit-logs-table.component.ts +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component} from '@angular/core'; -import {ListItem} from '@app/classes/list-item'; -import {LogsTableComponent} from '@app/classes/components/logs-table/logs-table-component'; -import {LogsContainerService} from '@app/services/logs-container.service'; - -@Component({ - selector: 'audit-logs-table', - templateUrl: './audit-logs-table.component.html', - styleUrls: ['./audit-logs-table.component.less'] -}) -export class AuditLogsTableComponent extends LogsTableComponent { - - constructor(private logsContainer: LogsContainerService) { - super(); - } - - readonly customProcessedColumns: string[] = ['evtTime']; - - readonly timeFormat: string = 'YYYY-MM-DD HH:mm:ss,SSS'; - - private readonly logsType: string = 'auditLogs'; - - get filters(): any { - return this.logsContainer.filters; - } - - get timeZone(): string { - return this.logsContainer.timeZone; - } - - getColumnByName(name: string): ListItem | undefined { - return this.columns.find((column: ListItem): boolean => column.value === name); - } - - updateSelectedColumns(columns: string[]): void { - this.logsContainer.updateSelectedColumns(columns, this.logsType); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.html deleted file mode 100644 index 05ed10b9bed..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.html +++ /dev/null @@ -1,29 +0,0 @@ - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.less deleted file mode 100644 index 7e98fcee029..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.less +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - :host { - .breadcrumbs { - align-items: center; - display: flex; - color: #fff; - font-size: 20px; - a, a:visited, a:active, a:hover { - color: #fff; - text-decoration: none; - } - .breadcrumb-separator { - color: #999; - } - .breadcrumb-current { - color: #ccc; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.spec.ts deleted file mode 100644 index 2fc09a3e681..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.spec.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import { BreadcrumbsComponent } from './breadcrumbs.component'; -import {TranslationModules} from '@app/test-config.spec'; -import {FormsModule} from '@angular/forms'; -import {RouterTestingModule} from '@angular/router/testing'; - -describe('BreadcrumbsComponent', () => { - let component: BreadcrumbsComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - ...TranslationModules, - FormsModule - ], - declarations: [ BreadcrumbsComponent ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(BreadcrumbsComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.ts deleted file mode 100644 index a7eb0f96366..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/breadrumbs/breadcrumbs.component.ts +++ /dev/null @@ -1,103 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Component, Input, OnDestroy, OnInit} from '@angular/core'; -import {Subscription} from 'rxjs/Subscription'; -import {ActivatedRouteSnapshot, NavigationEnd, Router, RoutesRecognized} from '@angular/router'; -import {Title} from '@angular/platform-browser'; -import {TranslateService} from '@ngx-translate/core'; -import {Observable} from 'rxjs/Observable'; - -export interface BreadCrumb { - text: string; - path: string[]; -} - -@Component({ - selector: 'breadcrumbs', - templateUrl: './breadcrumbs.component.html', - styleUrls: ['./breadcrumbs.component.less'] -}) -export class BreadcrumbsComponent implements OnInit, OnDestroy { - - private subscriptions: Subscription[] = []; - - private crumbs: BreadCrumb[]; - - @Input() - addRootFirst: boolean = true; - - constructor( - private router: Router, - private titleService: Title, - private translateService: TranslateService - ) { } - - ngOnInit() { - this.subscriptions.push( - this.router.events.filter((event) => event instanceof NavigationEnd).subscribe(this.onNavigationEnd) - ); - this.onNavigationEnd(); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - getCrumbsFromRouterStateSnapshot(routerStateSnapshot: ActivatedRouteSnapshot): BreadCrumb[] { - let level = routerStateSnapshot; - const path: string[] = []; - const breadcrumbs: BreadCrumb[] = []; - while (level) { - if (level.url.length) { - path.push( - (level.parent ? '' : '/') // start with trailing slash if this is the root - + level.url.reduce((url, segment) => url += ('/' + segment.path), '') // build up the url by its segments - ); - if (level.data.breadcrumbs) { - let crumbs = level.data.breadcrumbs; - if (!Array.isArray(crumbs)) { - crumbs = [crumbs]; - } - crumbs.forEach(breadcrumbTitle => breadcrumbs.push({ - text: breadcrumbTitle, - path: path - })); - } - } - level = level.firstChild; - } - return breadcrumbs; - } - - setPageTite(pageTitle) { - Observable.combineLatest( - this.translateService.get('common.title'), - pageTitle ? this.translateService.get(pageTitle) : Observable.of('') - ).first().subscribe(([commonTitle, pageTite]) => { - this.titleService.setTitle(pageTitle ? `${commonTitle} - ${pageTite}` : commonTitle); - }); - } - - onNavigationEnd = (): void => { - this.crumbs = this.getCrumbsFromRouterStateSnapshot(this.router.routerState.snapshot.root); - if (this.crumbs.length) { - this.setPageTite(this.crumbs[this.crumbs.length - 1].text); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.html deleted file mode 100644 index a56ff29fb23..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.less deleted file mode 100644 index 6bd86273d04..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.less +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -filter-dropdown { - display: inline-block; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.spec.ts deleted file mode 100644 index 8a6cbc62547..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.spec.ts +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import { ClusterFilterComponent } from './cluster-filter.component'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import { - getCommonTestingBedConfiguration, MockHttpRequestModules, - TranslationModules -} from '@app/test-config.spec'; -import {FilterDropdownComponent} from '@modules/shared/components/filter-dropdown/filter-dropdown.component'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {StoreModule} from '@ngrx/store'; -import {auditLogs, AuditLogsService} from '@app/services/storage/audit-logs.service'; -import {serviceLogsTruncated, ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {components, ComponentsService} from '@app/services/storage/components.service'; -import {UtilsService} from '@app/services/utils.service'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {tabs, TabsService} from '@app/services/storage/tabs.service'; -import {serviceLogs, ServiceLogsService} from '@app/services/storage/service-logs.service'; -import {hosts, HostsService} from '@app/services/storage/hosts.service'; -import {MomentModule} from 'angular2-moment'; -import {auditLogsGraphData, AuditLogsGraphDataService} from '@app/services/storage/audit-logs-graph-data.service'; -import {serviceLogsHistogramData, ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {clusters, ClustersService} from '@app/services/storage/clusters.service'; -import {auditLogsFields, AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.service'; -import {appSettings, AppSettingsService} from '@app/services/storage/app-settings.service'; -import {appState, AppStateService} from '@app/services/storage/app-state.service'; -import {serviceLogsFields, ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import { DataAvailabilityStatesStore, dataAvailabilityStates } from '@app/modules/app-load/stores/data-availability-state.store'; - -describe('ClusterFilterComponent', () => { - let component: ClusterFilterComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule(getCommonTestingBedConfiguration({ - declarations: [ - FilterDropdownComponent, - DropdownListComponent, - ClusterFilterComponent - ], - imports: [ - RouterTestingModule, - FormsModule, - ReactiveFormsModule, - MomentModule, - MomentTimezoneModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - appState, - appSettings, - tabs, - clusters, - components, - hosts, - dataAvailabilityStates - }) - ], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - AppStateService, - AppSettingsService, - TabsService, - ClustersService, - ComponentsService, - HostsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService, - DataAvailabilityStatesStore - ] - })) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ClusterFilterComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.ts deleted file mode 100644 index 9921d417567..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/cluster-filter/cluster-filter.component.ts +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Component, OnDestroy, OnInit, ViewChild} from '@angular/core'; -import 'rxjs/add/operator/switchMap'; -import {Observable} from 'rxjs/Observable'; -import {ActivatedRouteSnapshot, Router, NavigationEnd} from '@angular/router'; -import {Subscription} from 'rxjs/Subscription'; - -import {ClustersService} from '@app/services/storage/clusters.service'; -import {UtilsService} from '@app/services/utils.service'; -import {ListItem} from '@app/classes/list-item'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {BehaviorSubject} from 'rxjs/BehaviorSubject'; -import {FilterDropdownComponent} from '@modules/shared/components/filter-dropdown/filter-dropdown.component'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {DataAvailabilityValues} from '@app/classes/string'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; -import { DataStateStoreKeys } from '@app/modules/app-load/services/app-load.service'; - -@Component({ - selector: 'cluster-filter', - templateUrl: './cluster-filter.component.html', - styleUrls: ['./cluster-filter.component.less'] -}) -export class ClusterFilterComponent implements OnInit, OnDestroy { - - @ViewChild('filterDropdown', { - read: FilterDropdownComponent - }) - filterDropdown: FilterDropdownComponent; - - private clusterSelectionStoreKey: BehaviorSubject = new BehaviorSubject(''); - - private clustersAsListItems$: Observable = this.clusterSelectionStoreKey.distinctUntilChanged() - .switchMap((selectionStoreKey: string) => Observable.combineLatest( - this.clusterSelectionStoreService.getParameter(selectionStoreKey), - this.clusterStoreService.getAll() - ).map(([selections, clusters]) => { - const selectedClusters = selections ? (Array.isArray(selections) ? selections : [selections]) : selections; - return clusters.map((cluster) => Object.assign(this.utilsService.getListItemFromString(cluster), { - isChecked: selectedClusters && selectedClusters.indexOf(cluster) > -1 - }) - ); - }) - ).startWith([]); - - private readonly defaultUseMultiSelection = true; - private useMultiSelection: BehaviorSubject = new BehaviorSubject(false); - - private subscriptions: Subscription[] = []; - - constructor( - private clusterStoreService: ClustersService, - private utilsService: UtilsService, - private router: Router, - private clusterSelectionStoreService: ClusterSelectionService, - private routingUtilsService: RoutingUtilsService, - private dataAvaibilityStateStore: DataAvailabilityStatesStore - ) { } - - ngOnInit() { - this.subscriptions.push( - this.router.events.filter(routes => routes instanceof NavigationEnd).subscribe(this.onNavigationEnd) - ); - this.actualizeDropdownSelectionByActivatedRouteSnapshot(this.router.routerState.root.snapshot); - } - - ngOnDestroy() { - if (this.subscriptions) { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - } - - private getClusterSelectionStoreKeyFromActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot): string { - return this.routingUtilsService.getDataFromActivatedRouteSnapshot(routeSnapshot, 'clusterSelectionStoreKey') - || (routeSnapshot.firstChild && routeSnapshot.firstChild.url[0].path); - } - - private setClusterSelectionStoreKeyFromActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot): void { - const clusterSelectionStoreKey: string = this.getClusterSelectionStoreKeyFromActivatedRouteSnapshot(routeSnapshot); - if (clusterSelectionStoreKey !== this.clusterSelectionStoreKey.getValue()) { - this.clusterSelectionStoreKey.next(clusterSelectionStoreKey); - } - } - - private setUseMultiSelectionFromActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot): void { - const multiClusterFilter: boolean | null = this.routingUtilsService.getDataFromActivatedRouteSnapshot( - routeSnapshot, 'multiClusterFilter' - ); - if (this.useMultiSelection.getValue() !== multiClusterFilter) { - this.useMultiSelection.next( - typeof multiClusterFilter === 'boolean' ? multiClusterFilter : this.defaultUseMultiSelection - ); - } - } - - private setDropdownSelectionByActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot): void { - const clusterParamKey: string = this.routingUtilsService.getDataFromActivatedRouteSnapshot(routeSnapshot, 'clusterParamKey'); - let clusterSelection = this.routingUtilsService.getParamFromActivatedRouteSnapshot(routeSnapshot, clusterParamKey || 'cluster'); - if (clusterSelection) { - clusterSelection = this.useMultiSelection.getValue() ? clusterSelection.split(/[,;]/) : clusterSelection; - if (Array.isArray(clusterSelection)) { - clusterSelection = clusterSelection.map( - (clusterName: string) => Object.assign(this.utilsService.getListItemFromString(clusterName), { - isChecked: true - }) - ); - } else { - clusterSelection = Object.assign(this.utilsService.getListItemFromString(clusterSelection), { - isChecked: true - }); - } - this.dataAvaibilityStateStore.getParameter(DataStateStoreKeys.CLUSTERS_DATA_KEY) - .filter((state: DataAvailabilityValues) => state === DataAvailabilityValues.AVAILABLE) - .first() - .subscribe(() => { - this.filterDropdown.updateSelection(clusterSelection); - }); - } else { - this.filterDropdown.updateSelection(null); - } - } - - private actualizeDropdownSelectionByActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot): void { - this.setClusterSelectionStoreKeyFromActivatedRouteSnapshot(routeSnapshot); - this.setUseMultiSelectionFromActivatedRouteSnapshot(routeSnapshot); - this.setDropdownSelectionByActivatedRouteSnapshot(routeSnapshot); - } - - private onNavigationEnd = (): void => { - this.actualizeDropdownSelectionByActivatedRouteSnapshot(this.router.routerState.root.snapshot); - } - - onDropDownSelectionChanged = (values): void => { - this.setSelectionInClusterSelectionStore(values); - } - - private setSelectionInClusterSelectionStore = (values): void => { - this.clusterSelectionStoreService.getParameter(this.clusterSelectionStoreKey.getValue()).first() - .subscribe(currentCluster => { - if (!this.utilsService.isEqual(currentCluster, values)) { - this.clusterSelectionStoreService.setParameter(this.clusterSelectionStoreKey.getValue(), values); - } - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.html deleted file mode 100644 index ce1e87221d5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.html +++ /dev/null @@ -1,28 +0,0 @@ - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.less deleted file mode 100644 index 79d25f39321..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.less +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -.panel-collapsible { - position: relative; - .panel-heading { - .clickable-item; - background-color: @panel-heading; - border: 0 none; - color: @base-font-color; - display: flex; - flex-direction: row; - font-size: 1.25rem; - a, a:hover, a:visited { - color: @base-font-color; - text-decoration: none; - } - } - .panel-body { - padding: 5px; - } - &.panel-collapsed { - .panel-body { - height: 0; - overflow: hidden; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.spec.ts deleted file mode 100644 index f92f30ac476..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.spec.ts +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {DebugElement} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {By} from '@angular/platform-browser'; -import {TranslationModules} from '@app/test-config.spec'; -import {HttpClientService} from '@app/services/http-client.service'; - -import {CollapsiblePanelComponent} from './collapsible-panel.component'; - -describe('CollapsiblePanelComponent', () => { - let component: CollapsiblePanelComponent; - let fixture: ComponentFixture; - let de: DebugElement; - let el: HTMLElement; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - } - } - }; - TestBed.configureTestingModule({ - declarations: [CollapsiblePanelComponent], - imports: TranslationModules, - providers: [ - { - provide: HttpClientService, - useValue: httpClient - } - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(CollapsiblePanelComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - de = fixture.debugElement.query(By.css('div.panel')); - el = de.nativeElement; - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - it('should call toggleCollapsed', () => { - let mockEvent: MouseEvent = document.createEvent('MouseEvent'); - mockEvent.initEvent('click', true, true); - spyOn(component,'toggleCollapsed'); - component.handleCollapseBtnClick(mockEvent); - expect(component.toggleCollapsed).toHaveBeenCalled(); - }); - - it('should prevent default action on event after toggle button click',() => { - let mockEvent: MouseEvent = document.createEvent('MouseEvent'); - mockEvent.initEvent('click', true, true); - spyOn(mockEvent,'preventDefault'); - component.handleCollapseBtnClick(mockEvent); - expect(mockEvent.preventDefault).toHaveBeenCalled(); - }); - - it('should negate the isCollapsed property', () => { - let valueBefore = component.isCollapsed; - component.toggleCollapsed(); - fixture.detectChanges(); - expect(component.isCollapsed).toEqual(!valueBefore); - }); - - it('should add `panel-collapsed` css class to the element when the isCollapsed is true', () => { - component.isCollapsed = true; - fixture.detectChanges(); - expect(el.className).toContain('panel-collapsed'); - }); - - it('should not have `panel-collapsed` css class on the element when the isCollapsed is false', () => { - component.isCollapsed = false; - fixture.detectChanges(); - expect(el.className).not.toContain('panel-collapsed'); - }); - - it('should display the openTitle if presented and the isCollapsed property is false', () => { - let title = 'Open title'; - let headingEl = el.querySelector('.panel-heading'); - component.openTitle = title; - component.isCollapsed = false; - fixture.detectChanges(); - expect(headingEl.textContent).toContain(title); - }); - - it('should display the collapsedTitle if it presented and the isCollapsed property is true', () => { - let title = 'Collapsed title'; - let headingEl = el.querySelector('.panel-heading'); - component.collapsedTitle = title; - component.isCollapsed = true; - fixture.detectChanges(); - expect(headingEl.textContent).toContain(title); - }); - - it('should display the title if openTitle is not presented and the isCollapsed property is false', () => { - let title = 'Title'; - let headingEl = el.querySelector('.panel-heading'); - component.openTitle = ''; - component.commonTitle = title; - component.isCollapsed = false; - fixture.detectChanges(); - expect(headingEl.textContent).toContain(title); - }); - - it('should display the title if collapsedTitle is not presented and the isCollapsed property is true', () => { - let title = 'Title'; - let headingEl = el.querySelector('.panel-heading'); - component.collapsedTitle = ''; - component.commonTitle = title; - component.isCollapsed = true; - fixture.detectChanges(); - expect(headingEl.textContent).toContain(title); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.ts deleted file mode 100644 index 5e86bfaca6b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/collapsible-panel/collapsible-panel.component.ts +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; - -enum Side { - LEFT = "left", - RIGHT = "right" -} - -/** - * The goal of this component to have a simple BS panel with a collapse link in the panel heading. So that adding - * components/content into the body of the panel we can hide and show the its content. - * @class CollapsiblePanelComponent - */ -@Component({ - selector: 'collapsible-panel', - templateUrl: './collapsible-panel.component.html', - styleUrls: ['./collapsible-panel.component.less'] -}) -export class CollapsiblePanelComponent { - - /** - * This is for the common title of the panel. If the openTitle or the collapsedTitle not set this will be displayed. - * @type {string} - */ - @Input() - commonTitle: string = ''; - - /** - * The panel's title for the opened state - * @type {string} - */ - @Input() - openTitle?: string; - - /** - * The panel's title fo the closed/collapsed state - * @type {string} - */ - @Input() - collapsedTitle?: string; - - /** - * This property indicates the position of the caret. It can be 'left' or 'right' - * @type {Side} - */ - @Input() - caretSide: Side = Side.LEFT; - - /** - * The flag to indicate the collapsed state. - * @type {boolean} - */ - @Input() - isCollapsed: boolean = false; - - /** - * The goal is to handle the click event of the collapse link/button. It will simply call the inside logic to toggle - * the collapsed state. The goal is to separate the functions by responsibility. - * @param {MouseEvent} ev - */ - handleCollapseBtnClick(ev: MouseEvent): void { - this.toggleCollapsed(); - ev.preventDefault(); - } - - /** - * The goal is to simply negate the current collapse state. - */ - toggleCollapsed(): void { - this.isCollapsed = !this.isCollapsed; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.html deleted file mode 100644 index b17cda6102e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.less deleted file mode 100644 index aea7274b188..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.less +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -ul.dropdown-menu { - position: fixed; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.spec.ts deleted file mode 100644 index afca6031681..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.spec.ts +++ /dev/null @@ -1,133 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule} from '@angular/forms'; -import {StoreModule} from '@ngrx/store'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; -import {UtilsService} from '@app/services/utils.service'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; - -import {ContextMenuComponent} from './context-menu.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('ContextMenuComponent', () => { - let component: ContextMenuComponent; - let fixture: ComponentFixture; - - const httpClient = { - get: () => { - return { - subscribe: () => { - } - }; - } - }; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - ContextMenuComponent, - DropdownListComponent - ], - imports: [ - RouterTestingModule, - ...TranslationModules, - StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), - FormsModule - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - LogsContainerService, - HostsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - ServiceLogsTruncatedService, - TabsService, - AuthService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ContextMenuComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.ts deleted file mode 100644 index 05414ec1b2e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/context-menu/context-menu.component.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnChanges, SimpleChanges, Input, Output, EventEmitter, HostBinding} from '@angular/core'; -import {ListItem} from '@app/classes/list-item'; - -@Component({ - selector: 'context-menu', - templateUrl: './context-menu.component.html', - styleUrls: ['./context-menu.component.less'] -}) -export class ContextMenuComponent implements OnChanges { - - ngOnChanges (changes: SimpleChanges): void { - if (changes.isDisplayed && changes.isDisplayed.currentValue) { - document.body.addEventListener('click', this.dismissContextMenu); - } - } - - @Input() - @HostBinding('class.open') - isDisplayed: boolean = false; - - @Input() - contextMenuItems: ListItem[] = []; - - /** - * 'left' CSS property value - * @type {number} - */ - @Input() - leftPosition: number = 0; - - /** - * 'top' CSS property value - * @type {number} - */ - @Input() - topPosition: number = 0; - - @Output() - contextMenuDismiss: EventEmitter = new EventEmitter(); - - @Output() - itemSelect: EventEmitter = new EventEmitter(); - - private dismissContextMenu = (): void => { - this.isDisplayed = false; - this.contextMenuDismiss.emit(); - document.body.removeEventListener('click', this.dismissContextMenu); - }; - - selectItem(event: ListItem): void { - this.itemSelect.emit(event); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.html deleted file mode 100644 index fcebe56ec25..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.html +++ /dev/null @@ -1,25 +0,0 @@ - - -
-
- - - - -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.spec.ts deleted file mode 100644 index dfd971142f8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.spec.ts +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import * as moment from 'moment-timezone'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; - -import {DatePickerComponent} from './date-picker.component'; - -describe('DatePickerComponent', () => { - let component: DatePickerComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [DatePickerComponent], - imports: [ - StoreModule.provideStore({ - appSettings - }) - ], - providers: [AppSettingsService] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(DatePickerComponent); - component = fixture.componentInstance; - component.time = moment(); - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.ts deleted file mode 100644 index 93ebe375c3b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/date-picker/date-picker.component.ts +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { - Component, OnInit, OnChanges, OnDestroy, SimpleChanges, Input, Output, EventEmitter, ViewChild, ElementRef -} from '@angular/core'; -import * as $ from 'jquery'; -import * as moment from 'moment'; -import '@vendor/js/bootstrap-datetimepicker.min'; -import {AppSettingsService} from '@app/services/storage/app-settings.service'; - -@Component({ - selector: 'date-picker', - templateUrl: './date-picker.component.html' -}) -export class DatePickerComponent implements OnInit, OnChanges, OnDestroy { - - constructor(private appSettings: AppSettingsService) { - } - - ngOnInit(): void { - this.appSettings.getParameter('timeZone').subscribe((value: string): void => { - this.destroyDatePicker(); - this.timeZone = value; - if (this.datePickerElement) { - this.createDatePicker(); - } - }); - this.createDatePicker(); - } - - ngOnChanges(changes: SimpleChanges): void { - if (changes.hasOwnProperty('time') && this.datePickerElement) { - this.setTime(changes.time.currentValue); - } - } - - ngOnDestroy(): void { - this.destroyDatePicker(); - } - - /** - * Value of time input field passed from parent component - * @type {Moment|Date|string} - */ - @Input() - time: moment.Moment | Date | string; - - @Output() - timeChange: EventEmitter = new EventEmitter(); - - @ViewChild('datepicker') - datePicker: ElementRef; - - private datePickerElement: any; - - private timeZone: string; - - private createDatePicker(): void { - this.datePickerElement = $(this.datePicker.nativeElement); - this.datePickerElement.datetimepicker({ - timeZone: this.timeZone - }); - this.setTime(this.time); - this.datePickerElement.on('dp.change', event => this.timeChange.emit(event.date)); - } - - private destroyDatePicker(): void { - const datePicker = this.datePickerElement; - if (datePicker) { - datePicker.data('DateTimePicker').destroy(); - } - } - - /** - * Set value to time input field - * @param {Moment|Date|string} time - */ - private setTime(time: moment.Moment | Date | string): void { - const timeMoment = moment.isMoment(time) ? time : moment(time); - this.datePickerElement.data('DateTimePicker').date(timeMoment); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts deleted file mode 100644 index 87c490c196e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {UtilsService} from '@app/services/utils.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; - -import {FilterButtonComponent} from './filter-button.component'; - -describe('FilterButtonComponent', () => { - let component: FilterButtonComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - } - } - }; - TestBed.configureTestingModule({ - declarations: [FilterButtonComponent], - imports: [ - StoreModule.provideStore({ - appSettings, - clusters, - components, - appState, - hosts, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - AppSettingsService, - ClustersService, - ComponentsService, - AppStateService, - HostsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - UtilsService, - { - provide: HttpClientService, - useValue: httpClient - }, - LogsContainerService, - AuthService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(FilterButtonComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.ts deleted file mode 100644 index af14925bd28..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.ts +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, forwardRef} from '@angular/core'; -import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms'; -import {ListItem} from '@app/classes/list-item'; -import {UtilsService} from '@app/services/utils.service'; -import {MenuButtonComponent} from '@app/components/menu-button/menu-button.component'; - -@Component({ - selector: 'filter-button', - templateUrl: '../menu-button/menu-button.component.html', - styleUrls: ['../menu-button/menu-button.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => FilterButtonComponent), - multi: true - } - ] -}) -export class FilterButtonComponent extends MenuButtonComponent implements ControlValueAccessor { - - private selectedItems: ListItem[] = []; - - private onChange: (fn: any) => void; - - constructor(private utils: UtilsService) { - super(); - } - - get selection(): ListItem[] { - return this.selectedItems; - } - - set selection(items: ListItem[]) { - this.selectedItems = items; - if (this.onChange) { - this.onChange(items); - } - } - - updateSelection(updates: ListItem | ListItem[]): void { - if (updates && (!Array.isArray(updates) || updates.length)) { - const items: ListItem[] = Array.isArray(updates) ? updates : [updates]; - if (this.isMultipleChoice) { - items.forEach((item: ListItem) => { - if (this.subItems && this.subItems.length) { - const itemToUpdate: ListItem = this.subItems.find((option: ListItem) => this.utils.isEqual(option.value, item.value)); - if (itemToUpdate) { - itemToUpdate.isChecked = item.isChecked; - } - } - }); - } else { - const selectedItem: ListItem = items.find((item: ListItem) => item.isChecked); - this.subItems.forEach((item: ListItem) => { - item.isChecked = !!selectedItem && this.utils.isEqual(item.value, selectedItem.value); - }); - } - } else { - this.subItems.forEach((item: ListItem) => item.isChecked = false); - } - const checkedItems = this.subItems.filter((option: ListItem): boolean => option.isChecked); - this.selection = checkedItems; - this.selectItem.emit(checkedItems.map((option: ListItem): any => option.value)); - if (this.dropdownList) { - this.dropdownList.doItemsCheck(); - } - } - - writeValue(items: ListItem[]) { - let listItems: ListItem[] = []; - if (items && items.length) { - listItems = items.map((item: ListItem) => { - return { - ...item, - isChecked: true - }; - }); - } - this.updateSelection(listItems); - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched() { - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html deleted file mode 100644 index 7385305ae91..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html +++ /dev/null @@ -1,57 +0,0 @@ - - -
-
- - - - -
-
- - - - - - -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less deleted file mode 100644 index c748c374921..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - display: block; - padding: @filters-panel-padding; - background-color: @filters-panel-background-color; - - .filter-input-container { - display: flex; - align-items: flex-start; - justify-content: flex-start; - - .search-button { - border: 1px solid @submit-color; - height: auto; - - &:last-child { - border-top-left-radius: 0; - border-bottom-left-radius: 0; - } - } - - .filter-input { - border: @input-border; - - &:not(:last-child) { - border-right-width: 0; - } - - &:first-child { - border-top-left-radius: @button-border-radius; - border-bottom-left-radius: @button-border-radius; - } - - &:last-child { - border-top-right-radius: @button-border-radius; - border-bottom-right-radius: @button-border-radius; - } - } - - search-box.filter-input:not(:last-child) { - border-right-width: @input-border-width; - margin-right: -1 * (@input-border-width); - } - - time-range-picker { - /deep/ .dropdown-menu { - left: @col-padding; - } - } - } - - .filter-buttons { - .default-flex; - } - - /deep/ .dropdown-menu { - .log-level-item { - .log-colors; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts deleted file mode 100644 index 3b853771418..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormGroup, FormControl} from '@angular/forms'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {UtilsService} from '@app/services/utils.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; - -import {FiltersPanelComponent} from './filters-panel.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('FiltersPanelComponent', () => { - let component: FiltersPanelComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - }; - } - }; - TestBed.configureTestingModule({ - declarations: [ - FiltersPanelComponent - ], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appSettings, - clusters, - components, - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appState, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - AppSettingsService, - ClustersService, - ComponentsService, - HostsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppStateService, - ServiceLogsTruncatedService, - TabsService, - LogsContainerService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(FiltersPanelComponent); - component = fixture.componentInstance; - component.filtersForm = new FormGroup({ - control: new FormControl() - }); - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts deleted file mode 100644 index df863a344f5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnDestroy, Input, ViewContainerRef, OnInit} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {Observable} from 'rxjs/Observable'; -import {Subject} from 'rxjs/Subject'; -import 'rxjs/add/observable/from'; -import 'rxjs/add/operator/defaultIfEmpty'; -import {FilterCondition, SearchBoxParameter, SearchBoxParameterTriggered} from '@app/classes/filtering'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject} from '@app/classes/object'; -import {LogsType} from '@app/classes/string'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {Subscription} from 'rxjs/Subscription'; - -@Component({ - selector: 'filters-panel', - templateUrl: './filters-panel.component.html', - styleUrls: ['./filters-panel.component.less'] -}) -export class FiltersPanelComponent implements OnDestroy, OnInit { - - @Input() - filtersForm: FormGroup; - - private subscriptions: Subscription[] = []; - - searchBoxItems$: Observable; - - searchBoxValueUpdate: Subject = new Subject(); - - private isServiceLogsFileView$: Observable = this.appState.getParameter('isServiceLogsFileView'); - - get containerEl(): Element { - return this.viewContainerRef.element.nativeElement; - } - - get filters(): HomogeneousObject { - return this.logsContainerService.filters; - } - - /** - * Object with options for search box parameter values - * @returns HomogeneousObject - */ - get options(): HomogeneousObject { - return Object.keys(this.filters).filter((key: string): boolean => { - const condition = this.filters[key]; - return Boolean(condition.fieldName && condition.options); - }).reduce((currentValue, currentKey) => { - const condition = this.filters[currentKey]; - return Object.assign(currentValue, { - [condition.fieldName]: condition.options - }); - }, {}); - } - - get queryParameterNameChange(): Subject { - return this.logsContainerService.queryParameterNameChange; - } - - get queryParameterAdd(): Subject { - return this.logsContainerService.queryParameterAdd; - } - - constructor(private logsContainerService: LogsContainerService, public viewContainerRef: ViewContainerRef, - private utils: UtilsService, private appState: AppStateService) { - } - - ngOnInit() { - this.subscriptions.push(this.appState.getParameter('activeLogsType').subscribe(this.onLogsTypeChange)); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - private onLogsTypeChange = (currentLogsType: LogsType): void => { - const logsType = this.logsContainerService.logsTypeMap[currentLogsType]; - const fieldsModel: any = logsType && logsType.fieldsModel; - let subType: string; - let fields: Observable; - switch (currentLogsType) { - case 'auditLogs': - fields = fieldsModel.getParameter(subType ? 'overrides' : 'defaults'); - if (subType) { - fields = fields.map(items => items && items[subType]); - } - break; - case 'serviceLogs': - fields = fieldsModel.getAll(); - break; - default: - fields = Observable.from([]); - break; - } - this.searchBoxItems$ = fields.defaultIfEmpty([]).map(items => items ? items.filter(field => field.filterable) : []) - .map(this.utils.logFieldToListItemMapper); - } - - isFilterConditionDisplayed(key: string): boolean { - return this.logsContainerService.isFilterConditionDisplayed(key); - } - - updateSearchBoxValue(): void { - this.searchBoxValueUpdate.next(); - } - - proceedWithExclude(item: string): void { - this.queryParameterNameChange.next({ - item: { - value: item - }, - isExclude: true - }); - } - - private onClearBtnClick = (): void => { - const defaults = this.logsContainerService.isServiceLogsFileView ? { - components: this.logsContainerService.filtersForm.controls['components'].value, - hosts: this.logsContainerService.filtersForm.controls['hosts'].value - } : {}; - this.logsContainerService.resetFiltersForms(defaults); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.html deleted file mode 100644 index 76cdd2f4243..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - - -{{label}} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.less deleted file mode 100644 index dc20dca970b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.less +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -:host { - padding-right: 1em; - - .color { - border-radius: 100%; - display: inline-block; - height: .8em; - width: .8em; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.spec.ts deleted file mode 100644 index f8a4bebe6f2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.spec.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {GraphLegendItemComponent} from './graph-legend-item.component'; - -describe('GraphLegendItemComponent', () => { - let component: GraphLegendItemComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [GraphLegendItemComponent] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(GraphLegendItemComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.ts deleted file mode 100644 index 127eb8db989..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend-item/graph-legend-item.component.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; - -@Component({ - selector: 'graph-legend-item', - templateUrl: './graph-legend-item.component.html', - styleUrls: ['./graph-legend-item.component.less'] -}) -export class GraphLegendItemComponent { - - /** - * Color of the corresponding graph item. Should be string in any CSS allowable format. - * @type {string} - */ - @Input() - color: string; - - @Input() - label: string; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.html deleted file mode 100644 index e756af6d478..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.spec.ts deleted file mode 100644 index e297e142d68..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.spec.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {GraphLegendItemComponent} from '@app/components/graph-legend-item/graph-legend-item.component'; - -import {GraphLegendComponent} from './graph-legend.component'; - -describe('GraphLegendComponent', () => { - let component: GraphLegendComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - GraphLegendComponent, - GraphLegendItemComponent - ], - imports: TranslationModules, - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(GraphLegendComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.ts deleted file mode 100644 index e273d4ebe59..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-legend/graph-legend.component.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; - -@Component({ - selector: 'graph-legend', - templateUrl: './graph-legend.component.html' -}) -export class GraphLegendComponent { - - @Input() - items = []; - - @Input() - labelClass: string = 'initial-color'; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.html deleted file mode 100644 index 1711ffccdcc..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.html +++ /dev/null @@ -1,22 +0,0 @@ - - -
{{title}}
-
- - {{item.value}} -
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.less deleted file mode 100644 index 8979592d221..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.less +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - background: #fff; - border-radius: 4px; - border: @input-border; - display: block; - font-size: .8em; - margin: 0 1.5em; - min-height: 2em; - min-width: 5em; - padding: .5em; - position: absolute; - - &:empty { - display: none; - } - - &::before { - .caret-mixin(6px, left, #fff); - left: -6px; - position: absolute; - top: calc(50% - 2px); - } - - &.tooltip-left { - &::before { - display: none; - } - - &::after { - .caret-mixin(6px, right, #fff); - right: -6px; - position: absolute; - top: calc(50% - 2px); - } - } - - .title { - padding: 0 0 .1em 0; - text-align: center; - } - - .data-item { - display: flex; - justify-content: space-between; - - graph-legend-item { - flex-grow: 3; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.spec.ts deleted file mode 100644 index 14fa60ea6b1..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.spec.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {GraphLegendItemComponent} from '@app/components/graph-legend-item/graph-legend-item.component'; - -import {GraphTooltipComponent} from './graph-tooltip.component'; - -describe('GraphTooltipComponent', () => { - let component: GraphTooltipComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - GraphTooltipComponent, - GraphLegendItemComponent - ], - imports: TranslationModules, - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(GraphTooltipComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.ts deleted file mode 100644 index 9d26a2e2b48..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/graph-tooltip/graph-tooltip.component.ts +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; - -@Component({ - selector: 'graph-tooltip', - templateUrl: './graph-tooltip.component.html', - styleUrls: ['./graph-tooltip.component.less'] -}) -export class GraphTooltipComponent { - - @Input() - title: string | number = ''; - - @Input() - data = []; - - @Input() - labelClass: string = 'initial-color'; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.html deleted file mode 100644 index e6978f1aa66..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.less deleted file mode 100644 index dfb99975712..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.less +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -:host { - float: right; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.spec.ts deleted file mode 100644 index 4dbaa2db582..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.spec.ts +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {HistoryItemControlsComponent} from './history-item-controls.component'; - -describe('HistoryItemControlsComponent', () => { - let component: HistoryItemControlsComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [HistoryItemControlsComponent] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(HistoryItemControlsComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.ts deleted file mode 100644 index 1975d9ac1ec..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/history-item-controls/history-item-controls.component.ts +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component} from '@angular/core'; - -@Component({ - selector: 'history-item-controls', - templateUrl: './history-item-controls.component.html', - styleUrls: ['./history-item-controls.component.less'] -}) -export class HistoryItemControlsComponent { - // TODO implement View details and Save filter actions -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.html deleted file mode 100644 index 015013f168d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.html +++ /dev/null @@ -1,22 +0,0 @@ - - -
- - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.less deleted file mode 100644 index acfedb72fe6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.less +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; - -:host { - padding-top: @graph-padding; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.spec.ts deleted file mode 100644 index 2c599165d8c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.spec.ts +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injector} from '@angular/core'; -import {async, ComponentFixture, TestBed, inject} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {ServiceInjector} from '@app/classes/service-injector'; -import {GraphLegendComponent} from '@app/components/graph-legend/graph-legend.component'; -import {GraphLegendItemComponent} from '@app/components/graph-legend-item/graph-legend-item.component'; -import {GraphTooltipComponent} from '@app/components/graph-tooltip/graph-tooltip.component'; -import {UtilsService} from '@app/services/utils.service'; - -import {HorizontalHistogramComponent} from './horizontal-histogram.component'; - -describe('HorizontalHistogramComponent', () => { - let component: HorizontalHistogramComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - HorizontalHistogramComponent, - GraphLegendComponent, - GraphLegendItemComponent, - GraphTooltipComponent - ], - imports: [ - ...TranslationModules - ], - providers: [ - UtilsService - ] - }) - .compileComponents(); - })); - - beforeEach(inject([Injector], (injector: Injector) => { - ServiceInjector.injector = injector; - fixture = TestBed.createComponent(HorizontalHistogramComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - })); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.ts deleted file mode 100644 index 8cc3149f0f4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/horizontal-histogram/horizontal-histogram.component.ts +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; -import * as d3 from 'd3'; -import {GraphComponent} from '@app/classes/components/graph/graph.component'; -import {HomogeneousObject} from '@app/classes/object'; - -@Component({ - selector: 'horizontal-histogram', - templateUrl: './horizontal-histogram.component.html', - styleUrls: ['../../classes/components/graph/graph.component.less', './horizontal-histogram.component.less'] -}) -export class HorizontalHistogramComponent extends GraphComponent { - - /** - * Thickness of horizontal bar of the graph - * @type {number} - */ - @Input() - barSize: number = 5; - - rowsCount: number; - - readonly reverseYRange: boolean = true; - - protected populate(): void { - const barSize = this.barSize, - data = this.data, - yValues = Object.keys(data), - keys = Object.keys(this.labels), - rowsCount = yValues.reduce((currentCount: number, currentKey: string): number => { - return currentCount + Object.keys(this.data[currentKey]).length; - }, 0), - formattedData = yValues.reduce((currentData, currentKey: string) => { - const currentValues = data[currentKey], - currentObjects = keys.map((key: string): HomogeneousObject => { - return { - [key]: currentValues[key] || 0 - }; - }); - return [...currentData, Object.assign({ - tick: currentKey - }, ...currentObjects)]; - }, []), - layers = d3.stack().keys(keys)(formattedData), - formattedLayers = d3.transpose(layers); - - this.rowsCount = rowsCount; - - this.setXScaleDomain(); - this.setYScaleDomain(); - - // drawing the axis - this.drawXAxis(); - this.drawYAxis(rowsCount); - - let i = 0; - - // populate the data and drawing the bars - this.svg.selectAll().data(formattedLayers).enter().append('g').attr('class', 'value') - .selectAll().data(item => item).enter().append('rect') - .attr('x', item => this.xScale(0) + 1).attr('y', item => { - if (item [0] !== item[1]) { - return this.yScale(i++) - this.barSize / 2; - } - }).attr('height', item => item[0] === item[1] ? '0' : barSize.toString()) - .attr('width', item => this.xScale(item[1]) - this.xScale(item[0])) - .style('fill', (item, index) => this.orderedColors[index]) - .on('mouseover', this.handleMouseOver) - .on('mousemove', this.handleMouseMove) - .on('mouseout', this.handleMouseOut); - } - - protected setXScaleDomain(): void { - const keys = Object.keys(this.data), - maxValues = keys.map((currentKey: string): number => this.utils.getMaxNumberInObject(this.data[currentKey]), 0), - maximum = Math.max(...maxValues); - this.xScale.domain([0, maximum]); - } - - protected setYScaleDomain(): void { - this.yScale.domain([0, this.rowsCount]); - } - - protected yAxisTickFormatter = (tick: any, index: number): string | undefined => { - const data = this.data, - keys = Object.keys(data); - let currentIndex = 0; - for (let i = 0; i < keys.length && i <= index; i++) { - const currentKey = keys[i]; - if (currentIndex === index) { - return currentKey; - } else { - currentIndex += Object.keys(data[currentKey]).length; - } - } - }; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html deleted file mode 100644 index b5eced00b07..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - -
- -
-
- -
-
- -
-
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less deleted file mode 100644 index 4055730f57a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; -:host { - /deep/ .modal-body { - display: flex; - flex-direction: column; - } - .logs { - flex-grow: 1; - overflow-y: auto; - margin: 1em 0; - } - .btn.btn-load-more { - width: 100%; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts deleted file mode 100644 index 82201ba23e6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {ModalComponent} from '@modules/shared/components/modal/modal.component'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {LogContextComponent} from './log-context.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -describe('LogContextComponent', () => { - let component: LogContextComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - LogContextComponent, - ModalComponent - ], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - LogsContainerService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogContextComponent); - component = fixture.componentInstance; - component.scrollToCurrentEntry = () => {}; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts deleted file mode 100644 index 338a154c4a0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input, ElementRef} from '@angular/core'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/map'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {ServiceLogContextEntry} from '@app/classes/service-log-context-entry'; - -@Component({ - selector: 'log-context', - templateUrl: './log-context.component.html', - styleUrls: ['./log-context.component.less'] -}) -export class LogContextComponent { - - @Input() - id: string; - - @Input() - hostName: string; - - @Input() - componentName: string; - - readonly currentLogClassName: string = 'alert-warning'; // TODO implement custom class name with actual styles - - firstEntryId: string; - - lastEntryId: string; - - logs: Observable = this.serviceLogsTruncatedStorage.getAll() - .map((logs: ServiceLog[]): ServiceLogContextEntry[] => { - if (logs.length) { - this.firstEntryId = logs[0].id; - this.lastEntryId = logs[logs.length - 1].id; - } - return logs.map((log: ServiceLog): ServiceLogContextEntry => { - return { - id: log.id, - time: log.logtime, - level: log.level, - message: log.log_message, - fileName: log.file, - lineNumber: log.line_number - }; - }); - }); - - constructor( - private element: ElementRef, - private logsContainer: LogsContainerService, - private serviceLogsTruncatedStorage: ServiceLogsTruncatedService, - private appState: AppStateService) {} - - closeLogContext(): void { - this.appState.setParameters({ - isServiceLogContextView: false, - activeLog: null - }); - this.serviceLogsTruncatedStorage.clear(); - this.firstEntryId = ''; - this.lastEntryId = ''; - } - - scrollToCurrentEntry() { - this.element.nativeElement.getElementsByClassName(this.currentLogClassName).item(0).scrollIntoView(); - } - - loadBefore(): void { - this.logsContainer.loadLogContext(this.firstEntryId, this.hostName, this.componentName, 'before'); - } - - loadAfter(): void { - this.logsContainer.loadLogContext(this.lastEntryId, this.hostName, this.componentName, 'after'); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html deleted file mode 100644 index 7d4c296898b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html +++ /dev/null @@ -1,20 +0,0 @@ - - -
{{time | amTz: timeZone |amDateFormat: timeFormat}} {{level}} {{fileName}}:{{lineNumber}} - {{message}}
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less deleted file mode 100644 index a8ebf5467aa..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - display: block; - - .log { - font-family: monospace; - white-space: pre-wrap; - - .log-level { - .log-colors; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts deleted file mode 100644 index 0ae7e678a94..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; - -import {LogFileEntryComponent} from './log-file-entry.component'; - -describe('LogFileEntryComponent', () => { - let component: LogFileEntryComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [LogFileEntryComponent], - imports: [ - StoreModule.provideStore({ - appSettings - }), - MomentModule, - MomentTimezoneModule - ], - providers: [ - AppSettingsService - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogFileEntryComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts deleted file mode 100644 index c0a7393b081..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; -import {AppSettingsService} from '@app/services/storage/app-settings.service'; - -@Component({ - selector: 'log-file-entry', - templateUrl: './log-file-entry.component.html', - styleUrls: ['./log-file-entry.component.less'] -}) -export class LogFileEntryComponent { - - constructor(private appSettings: AppSettingsService) { - appSettings.getParameter('timeZone').subscribe((value: string) => this.timeZone = value); - } - - @Input() - time: string = ''; - - @Input() - level: string = ''; - - @Input() - fileName?: string; - - @Input() - lineNumber?: string; - - @Input() - message: string = ''; - - readonly timeFormat: string = 'YYYY-MM-DD HH:mm:ss,SSS'; - - timeZone: string; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.html deleted file mode 100644 index f5dc84b2a8d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.html +++ /dev/null @@ -1,84 +0,0 @@ - -
-
- - - - - - -
{{'filter.components' | translate}} - - - {{'logIndexFilter.override' | translate}}
-
- -
- - - - - - - - - - - - - - - -
- - - - - - - -
- - - - - - -
-
-
{{'logIndexFilter.hostname' | translate}}
- -
-
-
{{'logIndexFilter.expiryDate' | translate}}
- -
-
-
-
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.less deleted file mode 100644 index a5c3957b44e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.less +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; -@import '../../modules/shared/variables'; -:host { - div.log-index-filter-content { - table { - &.table-header { - background-color: #fff; - margin-bottom: 0; - position: sticky; - top: -1px; - z-index: 10; - th { - padding: 8px 0; - } - } - .component-column { - width: 25%; - overflow: hidden; - text-overflow: ellipsis; - } - - .checkbox-column { - width: 7%; - padding: 4px 0; - /deep/ graph-legend-item { - padding-right: 1px; - @media (max-width: 1510px) { - .item-label { - display: block; - } - } - } - } - - tr.component-default-row { - background-color: lighten(@grey-color, 10%); - } - - .override-column { - width: 26%; - padding-right: 0; - overflow: hidden; - text-overflow: ellipsis; - } - - th.override-column { - padding-left: 20px; - } - - .overrides-toggle { - .clickable-item; - } - - input[type=checkbox] + label { - font-size: @table-font-size; - top: 0; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.spec.ts deleted file mode 100644 index 3b042aeb471..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.spec.ts +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule} from '@angular/forms'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UserSettingsService} from '@app/services/user-settings.service'; -import {UtilsService} from '@app/services/utils.service'; -import {DropdownButtonComponent} from '@modules/shared/components/dropdown-button/dropdown-button.component'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; - -import {LogIndexFilterComponent} from './log-index-filter.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {ComponentLabelPipe} from '@app/pipes/component-label'; - -import { dataAvailabilityStates, DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -describe('LogIndexFilterComponent', () => { - let component: LogIndexFilterComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - FormsModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs, - dataAvailabilityStates - }) - ], - declarations: [ - LogIndexFilterComponent, - DropdownButtonComponent, - DropdownListComponent, - ComponentLabelPipe - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - LogsContainerService, - UserSettingsService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService, - DataAvailabilityStatesStore - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogIndexFilterComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.ts deleted file mode 100644 index 65c22a4d815..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-index-filter/log-index-filter.component.ts +++ /dev/null @@ -1,212 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Component, OnInit, Input, Output, EventEmitter, forwardRef, OnDestroy, OnChanges, SimpleChanges } from '@angular/core'; -import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms'; -import { Observable } from 'rxjs/Observable'; -import 'rxjs/add/operator/map'; -import { BehaviorSubject } from 'rxjs/BehaviorSubject'; -import { Subscription } from 'rxjs/Subscription'; -import { Moment } from 'moment'; -import { ListItem } from '@app/classes/list-item'; -import { HomogeneousObject, LogLevelObject } from '@app/classes/object'; -import { LogIndexFilterComponentConfig } from '@app/classes/settings'; -import { LogLevel } from '@app/classes/string'; -import { LogsContainerService } from '@app/services/logs-container.service'; -import { UserSettingsService } from '@app/services/user-settings.service'; -import { UtilsService } from '@app/services/utils.service'; -import { ClustersService } from '@app/services/storage/clusters.service'; -import { HostsService } from '@app/services/storage/hosts.service'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; -import { DataAvailabilityValues, DataAvailability } from '@app/classes/string'; - -@Component({ - selector: 'log-index-filter', - templateUrl: './log-index-filter.component.html', - styleUrls: ['./log-index-filter.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => LogIndexFilterComponent), - multi: true - } - ] -}) -export class LogIndexFilterComponent implements OnInit, OnDestroy, OnChanges, ControlValueAccessor { - - @Output() - changeIsSubmitDisabled: EventEmitter = new EventEmitter(); - - private onChange: (fn: any) => void; - - readonly columns: LogLevelObject[] = this.logsContainer.logLevels; - - readonly levelNames: LogLevel[] = this.columns.map((level: LogLevelObject): LogLevel => level.name); - - clusters: Observable = this.clustersStorage.getAll(); - - hosts: Observable = this.hostsStorage.getAll(); - - clustersListItems: Observable = this.clusters.map((clusterNames: string[]): ListItem[] => { - return clusterNames.map(this.utils.getListItemFromString); - }); - - configsAvailabilityState$: Observable = this.dataAvailablilityStore.getParameter('logIndexFilter'); - configsAreLoading$: Observable = this.configsAvailabilityState$.distinctUntilChanged().map( - (state: DataAvailability) => state === DataAvailabilityValues.LOADING - ); - - @Input() - activeClusterName = ''; - - private subscriptions: Subscription[] = []; - - /** - * Configs for all clusters - */ - private configs: HomogeneousObject; - - activeClusterConfigs$: BehaviorSubject = new BehaviorSubject(null); - - constructor( - private logsContainer: LogsContainerService, - private settingsService: UserSettingsService, - private utils: UtilsService, - private clustersStorage: ClustersService, - private hostsStorage: HostsService, - private dataAvailablilityStore: DataAvailabilityStatesStore - ) { - } - - ngOnInit() { - this.changeIsSubmitDisabled.emit(true); - this.subscriptions.push( - this.clusters.subscribe((clusters: string[]) => this.settingsService.loadIndexFilterConfig(clusters)) - ); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - ngOnChanges(changes: SimpleChanges) { - if (changes.activeClusterName && this.configs) { - this.setCurrentConfig(); - } - } - - /** - * Configs for selected cluster - * @returns {LogIndexFilterComponentConfig[]} - */ - get activeClusterConfigs(): LogIndexFilterComponentConfig[] { - return this.configs[this.activeClusterName] || []; - } - - private setCurrentConfig() { - this.activeClusterConfigs$.next((this.configs && this.configs[this.activeClusterName]) || []); - } - - /** - * Select or unselect checkboxes for all log levels for given component - * @param {string} componentName - * @param {boolean} isChecked - * @param {boolean} isOverride - indicates whether levels for override are processed - */ - processAllLevelsForComponent(componentName: string, isChecked: boolean, isOverride: boolean = false): void { - const componentConfig = this.getComponentConfigs(componentName), - key = isOverride ? 'overrides' : 'defaults'; - this.levelNames.forEach((levelName: LogLevel) => componentConfig[levelName][key] = isChecked); - this.updateValue(); - } - - /** - * Select or unselect checkboxes for all components for given log level - * @param {LogLevel} levelName - * @param {boolean} isChecked - */ - processAllComponentsForLevel(levelName: LogLevel, isChecked: boolean): void { - this.activeClusterConfigs.forEach((component: LogIndexFilterComponentConfig): void => { - component[levelName].defaults = isChecked; - component[levelName].overrides = isChecked; - }); - this.updateValue(); - } - - /** - * Indicates whether all log levels for given component are checked - * @param {string} componentName - * @param {string} isOverride - indicates whether levels for override are overviewed - * @returns {boolean} - */ - isAllLevelsCheckedForComponent(componentName: string, isOverride: boolean = false): boolean { - const componentConfig = this.getComponentConfigs(componentName), - key = isOverride ? 'overrides' : 'defaults'; - return this.levelNames.every((levelName: LogLevel): boolean => componentConfig[levelName][key]); - } - - /** - * Indicates whether all components for given log level are checked - * @param {LogLevel} levelName - * @returns {boolean} - */ - isAllComponentsCheckedForLevel(levelName: LogLevel): boolean { - return this.activeClusterConfigs.every((component: LogIndexFilterComponentConfig): boolean => { - return component[levelName].defaults; - }); - } - - setActiveCluster(clusterName: string): void { - this.activeClusterName = clusterName; - this.changeIsSubmitDisabled.emit(false); - } - - getCheckBoxId(componentName: string, levelName: string, isOverride: boolean = false): string { - return `component_${componentName}_level_${levelName}${isOverride ? '_override' : ''}`; - } - - setExpiryTime(time: Moment, componentConfig): void { - componentConfig.expiryTime = time.toISOString(); - } - - private getComponentConfigs(componentName: string) { - return this.activeClusterConfigs.find((component: LogIndexFilterComponentConfig): boolean => { - return component.name === componentName; - }); - } - - writeValue(filters: HomogeneousObject): void { - this.configs = filters; - this.updateValue(); - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched(): void { - } - - updateValue(): void { - this.setCurrentConfig(); - if (this.onChange) { - this.onChange(this.configs); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.html deleted file mode 100644 index d72c9d33447..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.html +++ /dev/null @@ -1,18 +0,0 @@ - - -{{logEntry.level}} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.spec.ts deleted file mode 100644 index c13d373956f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.spec.ts +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {DebugElement} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {LogLevelComponent} from './log-level.component'; -import {By} from '@angular/platform-browser'; - -describe('LogLevelComponent', () => { - let component: LogLevelComponent; - let fixture: ComponentFixture; - let de: DebugElement; - let el: HTMLElement; - let logLevelMap = { - warn: 'fa-exclamation-triangle', - fatal: 'fa-exclamation-circle', - error: 'fa-exclamation-circle', - info: 'fa-info-circle', - debug: 'fa-bug', - trace: 'fa-random', - unknown: 'fa-question-circle' - }; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ LogLevelComponent ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogLevelComponent); - component = fixture.componentInstance; - component.logEntry = {level: 'unknown'}; - fixture.detectChanges(); - de = fixture.debugElement.query(By.css('i.fa')); - el = de.nativeElement; - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); - - Object.keys(logLevelMap).forEach((level) => { - describe(level, () => { - beforeEach(() => { - component.logEntry = {level: level}; - fixture.detectChanges(); - }); - it(`should return with the ${logLevelMap[level]} css class for ${level} log level`, () => { - expect(component.cssClass).toEqual(logLevelMap[level]); - }); - it(`should set the ${logLevelMap[level]} css class on the icon element`, () => { - expect(el.classList).toContain(logLevelMap[level]); - }); - }); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.ts deleted file mode 100644 index 61ca49516aa..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-level/log-level.component.ts +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Component, Input} from '@angular/core'; - -/** - * This is a simple UI component to display the log message. The goal is to be able to show one line and be collapsile - * to show the full log message with new lines. - * @class LogMessageComponent - */ -@Component({ - selector: 'log-level', - templateUrl: './log-level.component.html', - styleUrls: [] -}) -export class LogLevelComponent { - - static classMap: object = { - warn: 'fa-exclamation-triangle', - fatal: 'fa-exclamation-circle', - error: 'fa-exclamation-circle', - info: 'fa-info-circle', - debug: 'fa-bug', - trace: 'fa-random', - unknown: 'fa-question-circle' - }; - - /** - * This is the log entry object - * @type {object} - */ - @Input() - logEntry: any; - - get cssClass() { - return LogLevelComponent.classMap[((this.logEntry && this.logEntry.level) || 'unknown').toLowerCase()]; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.html deleted file mode 100644 index 70c7ed7b50e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.html +++ /dev/null @@ -1,24 +0,0 @@ - -
- -
{{ message }}
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.less deleted file mode 100644 index 3f86da36de0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.less +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@import '../../modules/shared/variables'; -:host { - .log-message-container { - display: block; - margin: 0; - padding: 0; - - .caret { - margin-top: -3px; - transition: transform 250ms; - transform: rotate(-90deg); - } - &.log-message-container-open { - .caret { - transform: rotate(0deg); - } - } - - .log-message-content { - max-height: calc(20em/14); // from Bootstrap - overflow: hidden; - padding-left: 1em; - position: relative; - word-wrap: break-word; - } - &.log-message-container-collapsible { - .log-message-content { - overflow: hidden; - padding-left: 0; - text-overflow: ellipsis; - white-space: nowrap; - } - } - &.log-message-container-open .log-message-content { - max-height: none; - white-space: pre-wrap; - } - - button, button:active { - background: none transparent; - border: none transparent; - color: @base-font-color; - cursor: pointer; - float: left; - height: 1em; - outline: none; - padding: 0 .15em; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.spec.ts deleted file mode 100644 index eafb1aa62c6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.spec.ts +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {NgStringPipesModule} from 'angular-pipes'; - -import {LogMessageComponent} from './log-message.component'; - -describe('LogMessageComponent', () => { - let component: LogMessageComponent; - let fixture: ComponentFixture; - const messages = { - noNewLine: 'There is no newline here.', - withNewLine: `This is the first line. - This is the second one.` - }; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [NgStringPipesModule], - declarations: [ LogMessageComponent ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogMessageComponent); - component = fixture.componentInstance; - component.message = messages.withNewLine; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); - - it('event handler should call the toggleOpen method', () => { - const mockEvent: MouseEvent = document.createEvent('MouseEvent'); - mockEvent.initEvent('click', true, true); - spyOn(component,'toggleOpen'); - component.onCaretClick(mockEvent); - expect(component.toggleOpen).toHaveBeenCalled(); - }); - - it('event handler should prevent the default behaviour of the action', () => { - const mockEvent: MouseEvent = document.createEvent('MouseEvent'); - mockEvent.initEvent('click', true, true); - spyOn(mockEvent,'preventDefault'); - component.onCaretClick(mockEvent); - expect(mockEvent.preventDefault).toHaveBeenCalled(); - }); - - it('calling the toggleOpen method should negate the isOpen property', () => { - const currentState = component.isOpen; - component.toggleOpen(); - expect(component.isOpen).toEqual(!currentState); - }); - - it('should set the addCaret prop to TRUE if the message prop has new line character.', () => { - component.message = messages.withNewLine; - component.reCalculateOnChange(); - component.checkAddCaret(); - expect(component['addCaret']).toEqual(true); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.ts deleted file mode 100644 index 10d1cca8e37..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-message/log-message.component.ts +++ /dev/null @@ -1,168 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { - Component, - Input, - AfterViewInit, - ElementRef, - ViewChild, - OnChanges, - OnInit, - OnDestroy, - SimpleChanges, - ChangeDetectorRef -} from '@angular/core'; -import {Subject} from 'rxjs/Subject'; -import {Subscription} from 'rxjs/Subscription'; -import 'rxjs/add/operator/auditTime'; - -/** - * This is a simple UI component to display the log message. The goal is to be able to show one line and be collapsile - * to show the full log message with new lines. - * @class LogMessageComponent - */ -@Component({ - selector: 'log-message', - templateUrl: './log-message.component.html', - styleUrls: ['./log-message.component.less'] -}) -export class LogMessageComponent implements AfterViewInit, OnChanges, OnInit, OnDestroy { - - /** - * This is the element reference to the message log container element. So that we can calculate if the caret should be - * displayed or not. - * @type ElementRef - */ - @ViewChild('content') content: ElementRef; - - /** - * This is the flag property to indicate if the content container is open or not. - * @type {boolean} - */ - @Input() - isOpen = false; - - /** - * This is a helper property to handle the changes on the parent component. The goal of this input is to be able to - * react when the parent component (currently the log-list component) has changed (its size) in a way that the - * LogMessageComponent should check if the caret should be visible or not. - */ - @Input() - refreshOn$: Subject; - - /** - * This will be shown as log message in the component - */ - @Input() - message: string; - - /** - * This is a private flag to check if it should display the caret or not, it depends on the size of the size of - * the content container element. Handled by the @checkAddCaret method - * @type {boolean} - */ - addCaret = false; - - private scrollWidth: number; - - /** - * This is a regexp tester to check if the log message is multiline text or single line. Doing by checking the new - * line characters. - * @type {RegExp} - */ - private readonly multiLineTestRegexp = /\r?\n|\r/; - - /** - * This is a primary check if the message content does contain new line (/n) characters. If so than we display the - * caret to give a possibility to the user to see the message as it is (pre-wrapped). - * @type {boolean} - */ - isMultiLineMessage = false; - - /** - * The array to collect all the subscriptions created by the instance in order to unsubscribe when the component - * destroyed - */ - protected subscriptions: Subscription[] = []; - - constructor(private cdRef: ChangeDetectorRef) {} - - /** - * This change handler's goal is to check if we should add the caret or not. Mainly it is because currently we have - * the LogListComponent where columns can be added or removed and we have to recheck the visibility of the caret every - * changes of the displayed columns. - * @param {SimpleChanges} changes - */ - ngOnChanges(changes: SimpleChanges): void { - if (changes.message !== undefined) { - this.message = this.message.trim(); - this.reCalculateOnChange(); - this.checkAddCaret(); - } - } - - ngOnInit() { - if (this.refreshOn$) { - this.subscriptions.push(this.refreshOn$.subscribe(this.checkAddCaret)); - } - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - /** - * The goal is to perform a initial caret display check when the component has been initialized. - */ - ngAfterViewInit(): void { - this.reCalculateOnChange(); - this.checkAddCaret(); - } - - reCalculateOnChange() { - this.isMultiLineMessage = this.multiLineTestRegexp.test(this.message); - this.scrollWidth = this.content.nativeElement.scrollWidth; - } - - /** - * The goal is to perform a height check on the content container element. It is based on the comparison of the - * scrollHeight and the clientHeight. - */ - checkAddCaret = (): void => { - this.addCaret = this.isMultiLineMessage || (this.scrollWidth > this.content.nativeElement.clientWidth); - this.cdRef.detectChanges(); - } - - /** - * This is the click event handler of the caret button element. It will only toggle the isOpen property so that the - * component element css classes will follow its state. - * @param ev {MouseEvent} - */ - onCaretClick(ev: MouseEvent) { - ev.preventDefault(); - this.toggleOpen(); - } - - /** - * This is a simple property toggle method of the @isOpen property. - * The goal is to separate this logic from the event handling and give a way to call it from anywhere. - */ - toggleOpen(): void { - this.isOpen = !this.isOpen; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.html deleted file mode 100644 index 3db75c691b6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.html +++ /dev/null @@ -1,33 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.less deleted file mode 100644 index 19d800d7ed4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.less +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; - -.login-form { - margin-top: @block-margin-top; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts deleted file mode 100644 index 3ec55fda7a8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule} from '@angular/forms'; -import {TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {AuthService} from '@app/services/auth.service'; - -import {LoginFormComponent} from './login-form.component'; -import {RouterTestingModule} from '@angular/router/testing'; -import {NotificationsService} from 'angular2-notifications'; -import {NotificationService} from '@app/modules/shared/services/notification.service'; - -describe('LoginFormComponent', () => { - let component: LoginFormComponent; - let fixture: ComponentFixture; - - const authMock = { - isError: false, - isAuthorized: false - }; - - const AuthServiceMock = { - login: () => { - return { - subscribe: (observer: (resp) => void, error: (resp) => void) => { - authMock.isAuthorized ? observer(authMock.isAuthorized) : error(authMock.isAuthorized); - } - }; - } - }; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [LoginFormComponent], - imports: [ - RouterTestingModule, - FormsModule, - ...TranslationModules, - StoreModule.provideStore({ - appState - }) - ], - providers: [ - AppStateService, - { - provide: AuthService, - useValue: AuthServiceMock - }, - NotificationsService, - NotificationService - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LoginFormComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#login()', () => { - const cases = [ - { - isError: true, - isLoginAlertDisplayed: true, - isAuthorized: false, - title: 'login failure' - }, - { - isError: false, - isLoginAlertDisplayed: false, - isAuthorized: true, - title: 'login success' - } - ]; - - cases.forEach(test => { - describe(test.title, () => { - beforeEach(() => { - authMock.isError = test.isError; - authMock.isAuthorized = test.isAuthorized; - component.login(); - }); - - it('isLoginAlertDisplayed', () => { - expect(component.isLoginAlertDisplayed).toEqual(test.isLoginAlertDisplayed); - }); - - }); - }); - - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.ts deleted file mode 100644 index 2f28411c101..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, ViewChild, OnInit, OnDestroy} from '@angular/core'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/finally'; -import {Subscription} from 'rxjs/Subscription'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {AuthService} from '@app/services/auth.service'; -import {TranslateService} from '@ngx-translate/core'; -import {FormGroup} from '@angular/forms'; - -@Component({ - selector: 'login-form', - templateUrl: './login-form.component.html', - styleUrls: ['./login-form.component.less'] -}) -export class LoginFormComponent implements OnInit, OnDestroy { - - username: string; - - password: string; - - isLoginAlertDisplayed: boolean; - - isLoginInProgress$: Observable = this.appState.getParameter('isLoginInProgress'); - - errorMessage: string; - - @ViewChild('loginForm') - loginForm: FormGroup; - - subscriptions: Subscription[] = []; - - constructor( - private authService: AuthService, - private appState: AppStateService, - private translateService: TranslateService - ) {} - - ngOnInit(): void { - this.subscriptions.push( - this.loginForm.valueChanges.subscribe(this.onLoginFormChange) - ); - } - - ngOnDestroy(): void { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - onLoginFormChange = (event) => { - this.isLoginAlertDisplayed = false; - } - - private onLoginSuccess = (result: Boolean): void => { - this.isLoginAlertDisplayed = false; - this.errorMessage = ''; - } - - private onLoginError = (resp: Boolean): void => { - this.translateService.get('authorization.error.401').first().subscribe((message: string) => { - this.errorMessage = message; - this.isLoginAlertDisplayed = true; - }); - } - - login() { - this.authService.login(this.username, this.password).subscribe(this.onLoginSuccess, this.onLoginError); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html deleted file mode 100644 index c319ca9174f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html +++ /dev/null @@ -1,84 +0,0 @@ - - -
-
-
- - -
-
-
-
- -
-
- - {{'filter.youAreInSnapshotView' | translate}} - -
-
- - - -
{{(!totalEventsFoundMessageParams.totalCount ? 'logs.noEventFound' : - (totalEventsFoundMessageParams.totalCount === 1 ? 'logs.oneEventFound' : 'logs.totalEventFound')) | translate: totalEventsFoundMessageParams}}
- -
- -
- - - - - - -
- -
- - {{'filter.refreshingLogListIn' | translate}} - - - - {{autoRefreshRemainingSeconds}} - {{'filter.capture.sec' | translate}} - - - -
- -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.less deleted file mode 100644 index ef61abe6240..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.less +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; -@import '../../modules/shared/variables'; - -:host { - display: block; - - .tabs-container, .auto-refresh-message { - background-color: @filters-panel-background-color; - } - .tabs-container { - border-bottom: 1px solid @table-border-color; - .tabs-menu-container { - .flex-vertical-align; - height: 62px; - action-menu { - margin-left: auto; - } - /deep/ tabs ul.nav.nav-tabs { - margin: 0; - } - } - } - - .fixed-filterbar { - filters-panel { - background-color: fadeout(@filters-panel-background-color, 5%); - box-shadow: 0 2px 2px rgba(0,0,0,.1); - left: 0; - margin: 0; - position: fixed; - top: 0; - width: 100%; - z-index: 1; - } - } - - .events-count { - margin-top: @block-margin-top; - } - - /deep/ collapsible-panel.service-logs-histogram { - .panel-heading { - header { - margin-left: auto; - } - } - } - - /deep/ modal-dialog.capture-dialog { - .modal-dialog { - max-width: 350px; - } - .modal-body { - display: flex; - flex-direction: column; - /deep/ circle-progress-bar { - display: inline-block; - align-self: center; - label { - font-size: 3rem; - font-weight: normal; - .unit { - color: @fluid-gray-2; - font-size: 1.2rem; - } - } - } - } - } - - .panel-capture-view { - padding: 1rem; - i { - color: @fluid-gray-1; - &.fa-play { - padding-right: 1rem; - } - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts deleted file mode 100644 index 78245e47e20..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts +++ /dev/null @@ -1,125 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {StoreModule} from '@ngrx/store'; -import {TooltipModule} from 'ngx-bootstrap'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {UtilsService} from '@app/services/utils.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {TabsComponent} from '@app/components/tabs/tabs.component'; - -import {LogsContainerComponent} from './logs-container.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('LogsContainerComponent', () => { - let component: LogsContainerComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - LogsContainerComponent, - TabsComponent - ], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appSettings, - appState, - clusters, - components, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - tabs, - hosts, - serviceLogsTruncated - }), - ...TranslationModules, - TooltipModule.forRoot(), - ], - providers: [ - ...MockHttpRequestModules, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - UtilsService, - LogsContainerService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LogsContainerComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - it('totalEventsFoundMessageParams should provide total count number', () => { - expect(Object.keys(component.totalEventsFoundMessageParams)).toContain('totalCount'); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts deleted file mode 100644 index 6b983fcac01..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts +++ /dev/null @@ -1,384 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnInit, ElementRef, ViewChild, HostListener, Input, OnDestroy, ChangeDetectorRef} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/debounceTime'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {AuditLogsGraphDataService} from '@app/services/storage/audit-logs-graph-data.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {AuditLog} from '@app/classes/models/audit-log'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {BarGraph} from '@app/classes/models/bar-graph'; -import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject, LogLevelObject} from '@app/classes/object'; -import {LogsType, LogLevel} from '@app/classes/string'; -import {FiltersPanelComponent} from '@app/components/filters-panel/filters-panel.component'; -import {Subscription} from 'rxjs/Subscription'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {ActivatedRoute, Router} from '@angular/router'; -import {BehaviorSubject} from 'rxjs/BehaviorSubject'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; - -@Component({ - selector: 'logs-container', - templateUrl: './logs-container.component.html', - styleUrls: ['./logs-container.component.less'] -}) -export class LogsContainerComponent implements OnInit, OnDestroy { - - private isFilterPanelFixedPostioned = false; - - tabs: Observable = this.tabsStorage.getAll().map((tabs: LogTypeTab[]) => { - return tabs.map((tab: LogTypeTab) => { - const params = this.logsFilteringUtilsService.getParamsFromActiveFilter( - tab.activeFilters, tab.appState.activeLogsType - ); - return Object.assign({}, tab, {params}); - }); - }); - - logsType: LogsType; - - serviceLogsHistogramData: HomogeneousObject>; - - auditLogsGraphData: HomogeneousObject>; - - serviceLogsHistogramColors: HomogeneousObject = this.logsContainerService.logLevels.reduce(( - currentObject: HomogeneousObject, level: LogLevelObject - ): HomogeneousObject => { - return Object.assign({}, currentObject, { - [level.name]: level.color - }); - }, {}); - - isServiceLogContextView = false; - - private activeTabId$: BehaviorSubject = new BehaviorSubject( - this.router.routerState.snapshot.root.firstChild && this.router.routerState.snapshot.root.firstChild.params.activeTab - ); - - @ViewChild('container') containerRef: ElementRef; - @ViewChild('filtersPanel') filtersPanelRef: FiltersPanelComponent; - - @Input() - routerPath: string[] = ['/logs']; - - private subscriptions: Subscription[] = []; - private paramsSyncInProgress: BehaviorSubject = new BehaviorSubject(false); - - isServiceLogsFileView$: Observable = this.appState.getParameter('isServiceLogsFileView'); - - constructor( - private appState: AppStateService, - private tabsStorage: TabsService, - private logsContainerService: LogsContainerService, - private logsFilteringUtilsService: LogsFilteringUtilsService, - private serviceLogsHistogramStorage: ServiceLogsHistogramDataService, - private auditLogsGraphStorage: AuditLogsGraphDataService, - private router: Router, - private activatedRoute: ActivatedRoute, - private logsStateService: LogsStateService - ) {} - - ngOnInit() { - this.logsContainerService.loadColumnsNames(); - // set te logsType when the activeLogsType state has changed - this.subscriptions.push( - this.appState.getParameter('activeLogsType').subscribe((value: LogsType) => this.logsType = value) - ); - // set the hhistogramm data - this.subscriptions.push( - this.serviceLogsHistogramStorage.getAll().subscribe((data: BarGraph[]): void => { - this.serviceLogsHistogramData = this.logsContainerService.getGraphData(data, this.logsContainerService.logLevels.map(( - level: LogLevelObject - ): LogLevel => { - return level.name; - })); - }) - ); - // audit graph data set - this.subscriptions.push( - this.auditLogsGraphStorage.getAll().subscribe((data: BarGraph[]): void => { - this.auditLogsGraphData = this.logsContainerService.getGraphData(data); - }) - ); - // service log context flag subscription - this.subscriptions.push( - this.appState.getParameter('isServiceLogContextView').subscribe((value: boolean): void => { - this.isServiceLogContextView = value; - }) - ); - - this.activatedRoute.params.first().map(params => params.activeTab).subscribe((tabId) => { - this.logsContainerService.setActiveTabById(tabId); - }); - - //// SYNC BETWEEN PARAMS AND FORM - // sync to filters form when the query params changed (only when there is no other way sync) - this.subscriptions.push( - this.activatedRoute.params.filter(() => !this.paramsSyncInProgress.getValue()) - .subscribe(this.onParamsChange) - ); - // Sync from form to params on form values change - this.subscriptions.push( - this.filtersForm.valueChanges - .filter(() => !this.logsContainerService.filtersFormSyncInProgress.getValue()) - .subscribe(this.onFiltersFormChange) - ); - //// SYNC BETWEEN PARAMS AND FORM END - - //// TAB CHANGE - // when the activeTabId$ behaviour subject change, this depends on the params' changes - this.subscriptions.push( - this.activeTabId$.distinctUntilChanged().subscribe(this.onActiveTabIdChange) - ); - - // set the position of the filter panel depending on the scroll height: so it is fixed when it would be out from the screen - this.subscriptions.push( - Observable.fromEvent(window, 'scroll').debounceTime(10).subscribe(this.setFixedPositionValue) - ); - - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - get filtersForm(): FormGroup { - return this.logsContainerService.filtersForm; - }; - - get totalCount(): number { - return this.logsContainerService.totalCount; - } - - get autoRefreshRemainingSeconds(): number { - return this.logsContainerService.autoRefreshRemainingSeconds; - } - get autoRefreshInterval(): number { - return this.logsContainerService.autoRefreshInterval; - } - get captureTimeRangeCache(): ListItem { - return this.logsContainerService.captureTimeRangeCache; - } - - get autoRefreshMessageParams(): object { - return { - remainingSeconds: this.autoRefreshRemainingSeconds - }; - } - - /** - * The goal is to provide the single source for the parameters of 'xyz events found' message. - * @returns {Object} - */ - get totalEventsFoundMessageParams(): {totalCount: number} { - return { - totalCount: this.totalCount - }; - } - - get isServiceLogsFileView(): boolean { - return this.logsContainerService.isServiceLogsFileView; - } - - get activeLog(): ActiveServiceLogEntry | null { - return this.logsContainerService.activeLog; - } - - get auditLogs(): Observable { - return this.logsContainerService.auditLogs; - } - - get auditLogsColumns(): Observable { - return this.logsContainerService.auditLogsColumns; - } - - get serviceLogs(): Observable { - return this.logsContainerService.serviceLogs; - } - - get serviceLogsColumns(): Observable { - return this.logsContainerService.serviceLogsColumns; - } - - // - // SECTION: TABS - // - - /** - * Set the active params in the store corresponding to the URL param (activeTab) - * @param {string} tabId The 'activeTab' segment of the URL (eg.: #/logs/serviceLogs where the serviceLogs is the activeTab parameter) - */ - private onActiveTabIdChange = (tabId: string): void => { - this.logsContainerService.setActiveTabById(tabId); - } - - // - // SECTION END: TABS - // - - // - // SECTION: FILTER SYNCHRONIZATION - // - - /** - * Turn on the 'query params in sync' flag, so that the query to form sync don't run. - * So when we actualize the query params to reflect the filters form values we have to turn of the back sync (query params change to form) - */ - private paramsSyncStart = (): void => { - this.paramsSyncInProgress.next(true); - } - /** - * Turn off the 'query params in sync' flag - */ - private paramsSyncStop = (): void => { - this.paramsSyncInProgress.next(false); - } - - /** - * The goal is to make the app always bookmarkable. - * @param filters - */ - private syncFiltersToParams(filters): void { - const params = this.logsFilteringUtilsService.getParamsFromActiveFilter( - filters, this.logsContainerService.activeLogsType - ); - this.paramsSyncStart(); // turn on the 'sync in progress' flag - this.router.navigate([params], { relativeTo: this.activatedRoute }) - .then(this.paramsSyncStop, this.paramsSyncStop) // turn off the 'sync in progress' flag - .catch(this.paramsSyncStop); // turn off the 'sync in progress' flag - } - - /** - * This will call the LogsContainerService to reset the filter form with the given values. - * It will add default values where it is missing from the object. - * @param values {[key: string]: any} The new values for the filter form - */ - private resetFiltersForm(values: {[key: string]: any}): void { - if (Object.keys(values).length) { - this.logsContainerService.resetFiltersForms({ - ...this.logsFilteringUtilsService.defaultFilterSelections, - ...values - }); - } - } - - /** - * It will request the LogsContainerService to store the given filters to the given tab - * in order to apply these filters when there is no filter params in the URL. - * @param filters {[key: string]: any} The values for the filters form - * @param tabId string The tab where it should be stored (in activeFilters property) - */ - private syncFilterToTabStore(filters: {[key: string]: any}, tabId: string): void { - this.logsContainerService.syncFiltersToTabFilters(filters, tabId); - } - - /** - * Handle the filters' form changes and sync it to the query parameters - * @param values The new filter values. This is the raw value of the form group - */ - private onFiltersFormChange = (filters): void => { - this.syncFiltersToParams(filters); - } - - private onParamsChange = (params: {[key: string]: any}) => { - const {activeTab, ...filtersParams} = params; - this.tabsStorage.findInCollection((tab: LogTypeTab) => tab.id === params.activeTab) - .first() - .subscribe((tab) => { - if (tab) { - const filtersFromParams: {[key: string]: any} = this.logsFilteringUtilsService.getFilterFromParams( - filtersParams, - tab.appState.activeLogsType - ); - // we dont't have to reset the form with the new values when there is tab changes - // because the onActiveTabIdChange will call the setActiveTabById on LogsContainerService - // which will reset the form to the tab's activeFilters prop. - // If we do reset wvery time then the form will be reseted twice with every tab changes... not a big deal anyway - if (this.activeTabId$.getValue() === activeTab) { - this.resetFiltersForm(filtersFromParams); - } - this.syncFilterToTabStore(filtersFromParams, activeTab); - this.activeTabId$.next(activeTab); - } - }); - } - - // - // SECTION END: FILTER SYNCHRONIZATION - // - - /** - * The goal is to set the fixed position of the filter panel when it is scrolled to the top. So that the panel - * can be always visible for the user. - */ - private setFixedPositionValue = (): void => { - const el: Element = this.containerRef.nativeElement; - const top: number = el.getBoundingClientRect().top; - const valueBefore: boolean = this.isFilterPanelFixedPostioned; - if (valueBefore !== (top <= 0)) { - const fpEl: Element = this.filtersPanelRef.containerEl; - this.isFilterPanelFixedPostioned = top <= 0; - const filtersPanelHeight: number = fpEl.getBoundingClientRect().height; - const containerPaddingTop: number = parseFloat(window.getComputedStyle(el).paddingTop); - const htmlEl: HTMLElement = this.containerRef.nativeElement; - if (this.isFilterPanelFixedPostioned) { - htmlEl.style.paddingTop = (containerPaddingTop + filtersPanelHeight) + 'px'; - } else { - htmlEl.style.paddingTop = (containerPaddingTop - filtersPanelHeight) + 'px'; - } - } - } - - setCustomTimeRange(startTime: number, endTime: number): void { - this.logsContainerService.setCustomTimeRange(startTime, endTime); - } - - onSwitchTab(activeTab: LogTypeTab): void { - this.logsContainerService.switchTab(activeTab); - } - - onCloseTab(activeTab: LogTypeTab, newActiveTab: LogTypeTab): void { - const activateNewTab: boolean = activeTab.isActive; - this.tabsStorage.deleteObjectInstance(activeTab); - if (activateNewTab && newActiveTab) { - this.router.navigate(['/logs', ...this.logsFilteringUtilsService.getNavigationForTab(newActiveTab)]); - } - } - // - // CAPTURE FEATURES - // - cancelCapture(): void { - this.logsContainerService.cancelCapture(); - } - - clearCaptureTimeRangeCache(): void { - if (this.captureTimeRangeCache) { - this.filtersForm.controls.timeRange.setValue(this.captureTimeRangeCache); - this.logsContainerService.captureTimeRangeCache = null; - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html deleted file mode 100644 index 8ff5cc815a9..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html +++ /dev/null @@ -1,18 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts deleted file mode 100644 index 18adec7aba7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {HttpModule} from '@angular/http'; -import {StoreModule} from '@ngrx/store'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; - -import {MainContainerComponent} from './main-container.component'; - -describe('MainContainerComponent', () => { - let component: MainContainerComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [MainContainerComponent], - imports: [ - HttpModule, - StoreModule.provideStore({ - appState, - auditLogsFields, - serviceLogsFields - }) - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA], - providers: [ - AppStateService, - AuditLogsFieldsService, - ServiceLogsFieldsService - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(MainContainerComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts deleted file mode 100644 index cd0f1be4671..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnDestroy, OnInit} from '@angular/core'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {Subscription} from 'rxjs/Subscription'; - -@Component({ - selector: 'main-container', - templateUrl: './main-container.component.html' -}) -export class MainContainerComponent implements OnInit, OnDestroy{ - - private subscriptions: Subscription[] = []; - - constructor(private appState: AppStateService) {} - - ngOnInit() { - this.subscriptions.push( - this.appState.getParameter('isAuthorized').subscribe((value: boolean) => this.isAuthorized = value) - ); - this.subscriptions.push( - this.appState.getParameter('isInitialLoading').subscribe((value: boolean) => this.isInitialLoading = value) - ); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - isAuthorized: boolean = false; - - isInitialLoading: boolean = false; - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.html deleted file mode 100644 index 7061defaa78..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.html +++ /dev/null @@ -1,32 +0,0 @@ - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.less deleted file mode 100644 index f5f2079407f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.less +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - cursor: pointer; - display: inline-block; - position: relative; - &.disabled { - cursor: auto; - } - a { - text-align: center; - text-decoration: none; - .icon { - display: inline-block; - position: relative; - &.fa-caret-down { - padding: 0 .25em; - } - } - .menu-button-label { - display: block; - } - } - - .badge { - background: @badge-bg; - font-size: 1rem; - min-width: 1em; - padding: @badge-padding; - position: absolute; - top: 0; - right: -1em; - } - - .disabled { - * { - color: @unknown-color; - cursor: auto; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts deleted file mode 100644 index 4e77db597a3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts +++ /dev/null @@ -1,188 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; - -import {MenuButtonComponent} from './menu-button.component'; - -describe('MenuButtonComponent', () => { - let component: MenuButtonComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - } - } - }; - TestBed.configureTestingModule({ - declarations: [MenuButtonComponent], - imports: [ - StoreModule.provideStore({ - appSettings, - appState, - clusters, - components, - hosts, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - { - provide: HttpClientService, - useValue: httpClient - }, - LogsContainerService, - AuthService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(MenuButtonComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#hasSubItems', () => { - const cases = [ - { - subItems: null, - hasSubItems: false, - title: 'no sub-items' - }, - { - subItems: [], - hasSubItems: false, - title: 'empty sub-items array' - }, - { - subItems: [ - { - value: null - } - ], - hasSubItems: true, - title: 'sub-items present' - } - ]; - - cases.forEach((test) => { - it(test.title, () => { - component.subItems = test.subItems; - expect(component.hasSubItems).toEqual(test.hasSubItems); - }); - }); - }); - - describe('#hasCaret', () => { - const cases = [ - { - subItems: null, - hideCaret: false, - hasCaret: false, - title: 'no sub-items' - }, - { - subItems: [], - hideCaret: false, - hasCaret: false, - title: 'empty sub-items array' - }, - { - subItems: [ - { - value: null - } - ], - hideCaret: false, - hasCaret: true, - title: 'sub-items present, caret not hidden' - }, - { - subItems: [ - { - value: null - } - ], - hideCaret: true, - hasCaret: true, - title: 'sub-items present, caret hidden' - } - ]; - - cases.forEach((test) => { - it(test.title, () => { - component.subItems = test.subItems; - component.hideCaret = test.hideCaret; - expect(component.hasSubItems).toEqual(test.hasCaret); - }); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.ts deleted file mode 100644 index 788494cf983..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.ts +++ /dev/null @@ -1,234 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input, Output, ViewChild, ElementRef, EventEmitter} from '@angular/core'; -import {ListItem} from '@app/classes/list-item'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; - -@Component({ - selector: 'menu-button', - templateUrl: './menu-button.component.html', - styleUrls: ['./menu-button.component.less'] -}) -export class MenuButtonComponent { - - @ViewChild('dropdown') - dropdown: ElementRef; - - @ViewChild('dropdownList') - dropdownList: DropdownListComponent; - - @Input() - label?: string; - - @Input() - iconClass: string; - - @Input() - labelClass?: string; - - @Input() - subItems?: ListItem[]; - - @Input() - isMultipleChoice: boolean = false; - - @Input() - hideCaret: boolean = false; - - @Input() - isRightAlign: boolean = false; - - @Input() - additionalLabelComponentSetter?: string; - - @Input() - badge: string; - - @Input() - caretClass: string = 'fa-caret-down'; - - @Input() - useDropDownLocalFilter: boolean = false; - - /** - * The minimum time to handle a mousedown as a longclick. Default is 500 ms (0.5sec) - * @default 500 - * @type {number} - */ - @Input() - minLongClickDelay: number = 500; - - /** - * The maximum milliseconds to wait for longclick ends. The default is 0 which means no upper limit. - * @default 0 - * @type {number} - */ - @Input() - maxLongClickDelay: number = 0; - - @Input() - isDisabled: boolean = false; - - @Input() - listClass: string = ''; - - @Output() - buttonClick: EventEmitter = new EventEmitter(); - - @Output() - selectItem: EventEmitter = new EventEmitter(); - - /** - * This is a private property to indicate the mousedown timestamp, so that we can check it when teh click event - * has been triggered. - */ - private mouseDownTimestamp: number; - - /** - * Indicates if the dropdown list is open or not. So that we use internal state to display or hide the dropdown. - * @type {boolean} - */ - private dropdownIsOpen: boolean = false; - - get hasSubItems(): boolean { - return Boolean(this.subItems && this.subItems.length); - } - - get hasCaret(): boolean { - return this.hasSubItems && !this.hideCaret; - } - - /** - * Handling the click event on the component element. - * Two goal: - * - check if we have a 'longclick' event and open the dropdown (if any) when longclick event happened - * - trigger the action or the dropdown open depending on the target element (caret will open the dropdown otherwise - * trigger the action. - * @param {MouseEvent} event - */ - onMouseClick(event: MouseEvent): void { - if (!this.isDisabled) { - const el = event.target; - const now = Date.now(); - const mdt = this.mouseDownTimestamp; // mousedown time - const isLongClick = mdt && mdt + this.minLongClickDelay <= now && ( - !this.maxLongClickDelay || mdt + this.maxLongClickDelay >= now - ); - const openDropdown = this.hasSubItems && ( - el.classList.contains(this.caretClass) || isLongClick || !this.buttonClick.observers.length - ); - if (openDropdown && this.dropdown) { - if (this.toggleDropdown()) { - this.listenToClickOut(); - } - } else if (this.buttonClick.observers.length) { - this.buttonClick.emit(); - } - this.mouseDownTimestamp = 0; - } - event.preventDefault(); - } - - /** - * Listening the click event on the document so that we can hide our dropdown list if the event source is not the - * component. - */ - private listenToClickOut = (): void => { - if (this.dropdownIsOpen) { - document.addEventListener('click', this.onDocumentMouseClick); - } - } - - /** - * Handling the click event on the document to hide the dropdown list if it needs. - * @param {MouseEvent} event - */ - private onDocumentMouseClick = (event: MouseEvent): void => { - const el = event.target; - if (!this.dropdown.nativeElement.contains(el)) { - this.closeDropdown(); - this.removeDocumentClickListener(); - } - } - - /** - * Handling the mousedown event, so that we can check the long clicks and open the dropdown if any. - * @param {MouseEvent} event - */ - onMouseDown = (event: MouseEvent): void => { - if (this.hasSubItems) { - const el = event.target; - if (!el.classList.contains(this.caretClass)) { - this.mouseDownTimestamp = Date.now(); - } - } - } - - /** - * The goal is to have one and only one place where we open the dropdown. So that later if we need to change the way - * how we do, it will be easier. - */ - private openDropdown(): void { - this.dropdownIsOpen = true; - } - - /** - * The goal is to have one and only one place where we close the dropdown. So that later if we need to change the way - * how we do, it will be easier. - */ - private closeDropdown(): void { - this.dropdownIsOpen = false; - } - - /** - * Just a simple helper method to make the dropdown toggle more easy. - * @returns {boolean} It will return the open state of the dropdown; - */ - private toggleDropdown(): boolean { - this[this.dropdownIsOpen ? 'closeDropdown' : 'openDropdown'](); - return this.dropdownIsOpen; - } - - /** - * The goal is to simply remove the click event listeners from the document. - */ - private removeDocumentClickListener(): void { - document.removeEventListener('click', this.onDocumentMouseClick); - } - - /** - * The main goal if this function is tho handle the item change event on the child dropdown list. - * Should update the value and close the dropdown if it is not multiple choice type. - * @param {ListItem} item The selected item(s) from the dropdown list. - */ - onDropdownItemChange(item: ListItem | ListItem[]) { - this.updateSelection(item); - if (!this.isMultipleChoice) { - this.closeDropdown(); - } - } - - updateSelection(item: ListItem | ListItem[]) { - this.selectItem.emit(item); - if (this.dropdownList) { - this.dropdownList.doItemsCheck(); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.html deleted file mode 100644 index 96c86191cb4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - -
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.less deleted file mode 100644 index b78b84774e5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.less +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@bar-height: 8px; - -:host { - display: block; - width: 100%; - - .bar-sector { - display: inline-block; - height: @bar-height; - - &:first-child { - border-top-left-radius: @bar-height / 2; - border-bottom-left-radius: @bar-height / 2; - } - - &:last-child { - border-top-right-radius: @bar-height / 2; - border-bottom-right-radius: @bar-height / 2; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.spec.ts deleted file mode 100644 index d47436e789e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.spec.ts +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {NodeBarComponent} from './node-bar.component'; - -describe('NodeBarComponent', () => { - let component: NodeBarComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [NodeBarComponent] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(NodeBarComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.ts deleted file mode 100644 index c7b3ead211f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/node-bar/node-bar.component.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; - -@Component({ - selector: 'node-bar', - templateUrl: './node-bar.component.html', - styleUrls: ['./node-bar.component.less'] -}) -export class NodeBarComponent { - - @Input() - data: any[] = []; - - get totalCount(): number { - return this.data.reduce((currentValue, currentItem) => currentValue + Number(currentItem.value), 0); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.html deleted file mode 100644 index 370d1c76e2e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.less deleted file mode 100644 index c633236bb8c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.less +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -.pagination-control { - .clickable-item; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.spec.ts deleted file mode 100644 index 999609c1d95..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.spec.ts +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; - -import {PaginationControlsComponent} from './pagination-controls.component'; - -describe('PaginationControlsComponent', () => { - let component: PaginationControlsComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [PaginationControlsComponent] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(PaginationControlsComponent); - component = fixture.componentInstance; - component.registerOnChange(() => {}); - component.pagesCount = 3; - component.totalCount = 30; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - it('should the hasNextPage function return true when the currentPage is less than the pagesCount', () => { - component.pagesCount = 3; - component.totalCount = 30; - fixture.detectChanges(); - expect(component.hasNextPage()).toBe(true); - }); - it('should the hasNextPage function return false when the currentPage is equal than the pagesCount', () => { - component.currentPage = 3; - fixture.detectChanges(); - expect(component.hasNextPage()).toBe(false); - }); - it('should the hasNextPage function return false when the pagesCount is 0', () => { - component.pagesCount = 0; - component.totalCount = 0; - component.currentPage = 0; - fixture.detectChanges(); - expect(component.hasNextPage()).toBe(false); - }); - - it('should the hasPreviousPage function return true when the currentPage is greater than 0 and the pagesCount is greater than 0', () => { - component.currentPage = 1; - fixture.detectChanges(); - expect(component.hasPreviousPage()).toBe(true); - }); - it('should the hasPreviousPage function return false when the currentPage is equal to 0', () => { - component.currentPage = 0; - fixture.detectChanges(); - expect(component.hasPreviousPage()).toBe(false); - }); - it('should the hasPreviousPage function return false when the pagesCount is 0', () => { - component.pagesCount = 0; - component.totalCount = 0; - fixture.detectChanges(); - expect(component.hasPreviousPage()).toBe(false); - }); - - it('should the setNextPage function increment the value/currentPage when it is less then the pagesCount', () => { - let initialPage = 0; - let pagesCount = 3; - component.pagesCount = pagesCount; - component.totalCount = 30; - component.currentPage = initialPage; - fixture.detectChanges(); - component.setNextPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(initialPage + 1); - }); - - it('should not the setNextPage function increment the value/currentPage when it is on the last page', () => { - let pagesCount = 3; - component.pagesCount = pagesCount; - component.totalCount = 30; - component.currentPage = pagesCount - 1; - fixture.detectChanges(); - component.setNextPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(pagesCount - 1); - }); - - it('should the setPreviousPage function decrement the value/currentPage', () => { - let initialPage = 1; - component.pagesCount = 3; - component.totalCount = 30; - component.currentPage = initialPage; - fixture.detectChanges(); - component.setPreviousPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(initialPage - 1); - }); - - it('should not the setPreviousPage function decrement the value/currentPage when it is equal to 0', () => { - component.pagesCount = 3; - component.totalCount = 30; - component.currentPage = 0; - fixture.detectChanges(); - component.setPreviousPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(0); - }); - - it('should the setFirstPage set the value/currentPage to 0', () => { - component.pagesCount = 3; - component.totalCount = 30; - component.currentPage = 1; - fixture.detectChanges(); - component.setFirstPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(0); - }); - - - it('should the setLastPage set the value/currentPage to the value of pagesCount', () => { - component.setLastPage(); - fixture.detectChanges(); - expect(component.currentPage).toEqual(component.pagesCount - 1); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.ts deleted file mode 100644 index 5f85da7da56..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination-controls/pagination-controls.component.ts +++ /dev/null @@ -1,137 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, forwardRef, Input, Output, EventEmitter} from '@angular/core'; -import {ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms'; - -@Component({ - selector: 'pagination-controls', - templateUrl: './pagination-controls.component.html', - styleUrls: ['./pagination-controls.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => PaginationControlsComponent), - multi: true - } - ] -}) -export class PaginationControlsComponent implements ControlValueAccessor { - - private onChange: (fn: any) => void; - - currentPage: number = 0; - - @Input() - totalCount: number; - - @Input() - pagesCount: number; - - @Output() - currentPageChange: EventEmitter = new EventEmitter(); - - get value(): number { - return this.currentPage; - } - - set value(newValue: number) { - if (this.isValidValue(newValue)) { // this is the last validation check - this.currentPage = newValue; - this.currentPageChange.emit(newValue); - if (this.onChange) { - this.onChange(newValue); - } - } else { - throw new Error(`Invalid value ${newValue}. The currentPage should be between 0 and ${this.pagesCount}.`); - } - } - - /** - * A simple check if the given value is valid for the current pagination instance - * @param {number} value The new value to test - * @returns {boolean} - */ - private isValidValue(value: number): boolean { - return value <= this.pagesCount || value >= 0; - } - - /** - * The goal is to set the value to the first page... obviously to zero. It is just to have a centralized api for that. - */ - setFirstPage(): void { - this.value = 0; - } - - /** - * The goal is to set the value to the last page which is the pagesCount property anyway. - */ - setLastPage(): void { - this.value = this.pagesCount - 1; - } - - /** - * The goal is to decrease the value (currentPage) property if it is possible (checking with 'hasPreviousPage'). - * @returns {number} The new value of the currentPage - */ - setPreviousPage(): number { - if (this.hasPreviousPage()) { - this.value -= 1; - } - return this.value; - } - - /** - * The goal is to increase the value (currentPage) property if it is possible (checking with 'hasNextPage'). - * @returns {number} The new value of the currentPage - */ - setNextPage(): number { - if (this.hasNextPage()){ - this.value += 1; - } - return this.value; - } - - /** - * The goal is to have a single source of true to check if we can set a next page or not. - * @returns {boolean} - */ - hasNextPage(): boolean { - return this.pagesCount > 0 && this.value < this.pagesCount - 1; - } - - /** - * The goal is to have a single source of true to check if we can set a previous page or not. - * @returns {boolean} - */ - hasPreviousPage(): boolean { - return this.pagesCount > 0 && this.value > 0; - } - - writeValue(value: number) { - this.value = value; - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched() { - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.html deleted file mode 100644 index ecfa75d9c15..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.html +++ /dev/null @@ -1,25 +0,0 @@ - - -
- - {{'pagination.numbers' | translate: numbersTranslateParams}} - -
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.less deleted file mode 100644 index dfa0aa72384..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.less +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - display: flex; - - .pagination-form { - .flex-vertical-align; - justify-content: flex-end; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts deleted file mode 100644 index c8200274510..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {TranslationModules} from '@app/test-config.spec'; -import {FormControl, FormGroup} from '@angular/forms'; - -import {PaginationComponent} from './pagination.component'; - -describe('PaginationComponent', () => { - let component: PaginationComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: TranslationModules, - declarations: [PaginationComponent], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(PaginationComponent); - component = fixture.componentInstance; - component.filterInstance = { - defaultSelection: [ - { - label: '10', - value: '10' - } - ] - }; - component.filtersForm = new FormGroup({ - pageSize: new FormControl() - }); - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.ts deleted file mode 100644 index 890c2eec006..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.ts +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnInit, Input} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {ListItem} from '@app/classes/list-item'; -import {FilterCondition} from '@app/classes/filtering'; - -@Component({ - selector: 'pagination', - templateUrl: './pagination.component.html', - styleUrls: ['./pagination.component.less'] -}) -export class PaginationComponent implements OnInit { - - ngOnInit() { - this.setPageSizeFromString(this.filterInstance.defaultSelection[0].value); - this.filtersForm.controls.pageSize.valueChanges.subscribe((selection: ListItem): void => { - this.setPageSizeFromString(selection[0].value); - }); - } - - @Input() - filtersForm: FormGroup; - - @Input() - filterInstance: FilterCondition; - - @Input() - currentCount?: number; - - @Input() - totalCount: number; - - private pageSize: number = 0; - - private setPageSizeFromString(value: string) { - this.pageSize = parseInt(value); - } - - private currentPage: number = 0; - - get numbersTranslateParams(): any { - const pageSize = this.pageSize, - startIndex = (this.currentPage * pageSize) + 1; - return { - startIndex, - endIndex: startIndex + Math.min(pageSize, this.currentCount) - 1, - totalCount: this.totalCount - } - } - - get pagesCount(): number { - return Math.ceil(this.totalCount / this.pageSize); - } - - setCurrentPage(pageNumber: number) { - this.currentPage = pageNumber; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html deleted file mode 100644 index 5fd87abea1b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html +++ /dev/null @@ -1,44 +0,0 @@ - - - - {{item.label | translate}} - - - - -{{activeItem.label | translate}}: -
- - -
- diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less deleted file mode 100644 index b8a7a4cae04..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less +++ /dev/null @@ -1,123 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -@inactive-input-width: 1px; -@label-margin: 2px; - -:host { - display: flex; - flex-wrap: wrap; - justify-content: flex-start; - align-items: center; - width: 100%; - border: @input-border; - cursor: text; - - .parameter-label { - margin: @label-margin; - border-radius: @dropdown-border-radius; - padding: @search-parameter-padding; - background-color: @grey-color; - color: @base-font-color; - font-size: 0.8em; - - .parameter-value { - font-weight: normal; - } - - .action-icon { - cursor: pointer; - } - .fa-search-plus { - color: @form-success-color; - } - .fa-search-minus { - color: @form-error-color; - } - } - - .active-parameter-label { - font-weight: bold; - margin: 0 @label-margin; - } - - .search-item-container { - position: relative; - min-width: @inactive-input-width; - height: @input-height; - flex-grow: 1; - - .search-item-input { - border: none; - box-shadow: none; - } - - .parameter-input { - width: @inactive-input-width; - } - - .value-input { - .collapsed-form-control; - } - - &.active { - min-width: @dropdown-min-width; - - .parameter-input { - width: 100%; - } - - .value-input { - .collapsed-form-control; - } - - &.value { - .parameter-input { - display: none; - } - - .value-input { - width: 100%; - } - } - } - - /deep/ typeahead-container .dropdown-menu { - .dropdown-list-default; - max-height: 80vh; - overflow-y: auto; - - > li { - .dropdown-item-default; - - > a{ - .dropdown-item-child-default; - } - } - } - } - - .clear-all-btn { - color: @unknown-color; - cursor: pointer; - height: 1em; - width: 1em; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts deleted file mode 100644 index 32348a3c7f2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; - -import {TranslationModules} from '@app/test-config.spec'; -import {UtilsService} from '@app/services/utils.service'; - -import {SearchBoxComponent} from './search-box.component'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {ComponentLabelPipe} from '@app/pipes/component-label'; - -describe('SearchBoxComponent', () => { - let component: SearchBoxComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - ComponentLabelPipe, - SearchBoxComponent - ], - imports: [ - ...TranslationModules, - StoreModule.provideStore({ - components - }) - ], - providers: [ - ComponentsService, - UtilsService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(SearchBoxComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#activeItemValueOptions()', () => { - const cases = [ - { - itemsOptions: null, - activeItem: { - value: 'v0' - }, - result: [], - title: 'no options available' - }, - { - itemsOptions: { - v1: [ - { - value: 'v2' - } - ] - }, - activeItem: null, - result: [], - title: 'no active item' - }, - { - itemsOptions: {}, - activeItem: { - value: 'v3' - }, - result: [], - title: 'empty itemsOptions object' - }, - { - itemsOptions: { - v4: [ - { - value: 'v5' - } - ] - }, - activeItem: { - value: 'v6' - }, - result: [], - title: 'no options available for active item' - }, - { - itemsOptions: { - v7: [ - { - value: 'v8' - }, - { - value: 'v9' - } - ] - }, - activeItem: { - value: 'v7' - }, - result: [ - { - value: 'v8' - }, - { - value: 'v9' - } - ], - title: 'options are available for active item' - } - ]; - - cases.forEach(test => { - it(test.title, () => { - component.itemsOptions = test.itemsOptions; - component.activeItem = test.activeItem; - expect(component.activeItemValueOptions).toEqual(test.result); - }); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts deleted file mode 100644 index 62835bbc749..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts +++ /dev/null @@ -1,345 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnInit, OnDestroy, HostListener, Input, ViewChild, ElementRef, forwardRef} from '@angular/core'; -import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms'; -import {Subject} from 'rxjs/Subject'; -import {SearchBoxParameter, SearchBoxParameterProcessed, SearchBoxParameterTriggered} from '@app/classes/filtering'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject} from '@app/classes/object'; -import {UtilsService} from '@app/services/utils.service'; -import {Subscription} from 'rxjs/Subscription'; - -@Component({ - selector: 'search-box', - templateUrl: './search-box.component.html', - styleUrls: ['./search-box.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => SearchBoxComponent), - multi: true - } - ] -}) -export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccessor { - - private currentId: number = 0; - - private isExclude: boolean = false; - - /** - * Indicates whether search box is currently active - * @type {boolean} - */ - isActive: boolean = false; - - /** - * Indicates whether search query parameter value is currently typed - * @type {boolean} - */ - isValueInput: boolean = false; - - currentValue: string; - - /** - * Indicates whether there's no autocomplete matches in preset options for search query parameter name - * @type {boolean} - */ - private noMatchingParameterName: boolean = true; - - /** - * Indicates whether there's no autocomplete matches in preset options for search query parameter value - * @type {boolean} - */ - private noMatchingParameterValue: boolean = true; - - @Input() - items: ListItem[] = []; - - @Input() - itemsOptions: HomogeneousObject = {}; - - /** - * Name of parameter to be used if there are no matching values - * @type {string} - */ - @Input() - defaultParameterName?: string; - - @Input() - parameterNameChangeSubject: Subject = new Subject(); - - @Input() - parameterAddSubject: Subject = new Subject(); - - @Input() - updateValueSubject: Subject = new Subject(); - - /** - * Indicates whether form should receive updated value immediately after user adds new search parameter, without - * explicit actions like pressing Submit button or Enter key - * @type {boolean} - */ - @Input() - updateValueImmediately: boolean = true; - - @ViewChild('parameterInput') - parameterInputRef: ElementRef; - - @ViewChild('valueInput') - valueInputRef: ElementRef; - - private parameterInput: HTMLInputElement; - - private valueInput: HTMLInputElement; - - /** - * Currently active search query parameter - * @type {ListItem | null} - */ - activeItem: ListItem | null = null; - - /** - * Search query parameters that are already specified by user - * @type {SearchBoxParameterProcessed[]} - */ - parameters: SearchBoxParameterProcessed[] = []; - - private subscriptions: Subscription[] = []; - - constructor(private utils: UtilsService) {} - - ngOnInit(): void { - this.parameterInput = this.parameterInputRef.nativeElement; - this.valueInput = this.valueInputRef.nativeElement; - this.subscriptions.push( - this.parameterNameChangeSubject.subscribe(this.onParameterNameChange) - ); - this.subscriptions.push( - this.parameterAddSubject.subscribe(this.onParameterAdd) - ); - this.subscriptions.push( - this.updateValueSubject.subscribe(this.updateValue) - ); - } - - ngOnDestroy(): void { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - /** - * Available options for value of currently active search query parameter - * @returns {ListItem[]} - */ - get activeItemValueOptions(): ListItem[] { - return this.itemsOptions && this.activeItem && this.itemsOptions[this.activeItem.value] ? - this.itemsOptions[this.activeItem.value] : []; - } - - private onChange: (fn: any) => void; - - @HostListener('click') - private onRootClick(): void { - if (!this.isActive) { - this.parameterInput.focus(); - } - } - - @HostListener('keydown', ['$event']) - private onRootKeyDown(event: KeyboardEvent): void { - if (this.utils.isEnterPressed(event)) { - event.preventDefault(); - } - }; - - @HostListener('blur') - private onRootBlur(): void { - this.clear(); - }; - - onParameterInputFocus(): void { - this.isActive = true; - } - - private switchToParameterInput = (): void => { - this.clear(); - this.isActive = true; - this.isValueInput = false; - setTimeout(() => this.parameterInput.focus(), 0); - } - - private getItemByValue(name: string): ListItem { - return this.items.find((field: ListItem): boolean => field.value === name); - } - - clear(): void { - this.isActive = false; - this.activeItem = null; - this.currentValue = ''; - this.parameterInput.value = ''; - this.valueInput.value = ''; - } - - onClearButtonClick = (event: MouseEvent): void => { - this.clear(); - this.parameters = []; - this.updateValueSubject.next(); - event.stopPropagation(); - event.preventDefault(); - } - - changeParameterName(options: SearchBoxParameterTriggered): void { - this.parameterNameChangeSubject.next(options); - } - - onParameterNameChange = (options: SearchBoxParameterTriggered): void => { - this.activeItem = options.item.label ? options.item : this.getItemByValue(options.item.value); - this.isExclude = options.isExclude; - this.isActive = true; - this.isValueInput = true; - this.currentValue = ''; - this.valueInput.focus(); - } - - onParameterValueKeyDown(event: KeyboardEvent): void { - if (this.utils.isBackSpacePressed(event) && !this.currentValue) { - this.switchToParameterInput(); - } - } - - onParameterValueKeyUp(event: KeyboardEvent): void { - if (this.utils.isEnterPressed(event) && this.currentValue && this.noMatchingParameterValue) { - this.onParameterValueChange(this.currentValue); - } - } - - onParameterValueChange(value: string): void { - this.parameters.push({ - id: this.currentId++, - name: this.activeItem.value, - label: this.activeItem.label, - value: value, - isExclude: this.isExclude - }); - if (this.updateValueImmediately) { - this.updateValueSubject.next(); - } - this.switchToParameterInput(); - } - - /** - * Adding the new parameter to search query - * @param parameter {SearchBoxParameter} - */ - onParameterAdd = (parameter: SearchBoxParameter): void => { - const item = this.getItemByValue(parameter.name); - this.parameters.push({ - id: this.currentId++, - name: parameter.name, - label: item.label, - value: parameter.value, - isExclude: parameter.isExclude - }); - if (this.updateValueImmediately) { - this.updateValueSubject.next(); - } - this.switchToParameterInput(); - } - - onParameterKeyUp(event: KeyboardEvent): void { - if (this.utils.isEnterPressed(event)) { - if (!this.currentValue && !this.updateValueImmediately) { - this.updateValueSubject.next(); - } else if (this.currentValue && this.noMatchingParameterName && this.defaultParameterName) { - this.parameterAddSubject.next({ - name: this.defaultParameterName, - value: this.currentValue, - isExclude: false - }); - } - } - } - - /** - * Toggle the parameter isExclude property value - * @param event {MouseEvent} - event that triggered this action - * @param id {number} - id of parameter - */ - toggleParameter(event: MouseEvent, id: number): void { - this.parameters = this.parameters.map((parameter: SearchBoxParameterProcessed): SearchBoxParameterProcessed => { - if (parameter.id === id) { - parameter.isExclude = !parameter.isExclude; - } - return parameter; - }); - if (this.updateValueImmediately) { - this.updateValueSubject.next(); - } - event.stopPropagation(); - } - - /** - * Removing parameter from search query - * @param event {MouseEvent} - event that triggered this action - * @param id {number} - id of parameter - */ - removeParameter(event: MouseEvent, id: number): void { - this.parameters = this.parameters.filter((parameter: SearchBoxParameterProcessed): boolean => parameter.id !== id); - if (this.updateValueImmediately) { - this.updateValueSubject.next(); - } - event.stopPropagation(); - } - - updateValue = (): void => { - this.currentValue = ''; - if (this.onChange) { - this.onChange(this.parameters.slice()); - } - } - - /** - * Update flag that indicates presence of autocomplete matches in preset options for search query parameter name - * @param hasNoMatches {boolean} - */ - setParameterNameMatchFlag(hasNoMatches: boolean): void { - this.noMatchingParameterName = hasNoMatches; - } - - /** - * Update flag that indicates presence of autocomplete matches in preset options for search query parameter value - * @param hasNoMatches {boolean} - */ - setParameterValueMatchFlag(hasNoMatches: boolean): void { - this.noMatchingParameterValue = hasNoMatches; - } - - writeValue(parameters: SearchBoxParameterProcessed[] = []): void { - this.parameters = parameters.slice(); - this.updateValueSubject.next(); - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched(): void { - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.html deleted file mode 100644 index 31375ed1278..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.html +++ /dev/null @@ -1,178 +0,0 @@ - - -
-
-
-
- {{'logs.noEventFound' | translate}} - {{'logs.oneEventFound' | translate}} - {{'logs.totalEventFound' | translate: totalEventsFoundMessageParams}} -
-
- - {{'logs.brokenListLayoutMessage' | translate}} -
-
- -
- - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
{{getLabelForField('logtime') | translate}}{{getLabelForField('level') | translate}}{{getLabelForField('type') | translate}}{{getLabelForField(column.value) | translate}}{{getLabelForField('path') | translate}}{{getLabelForField('log_message') | translate}}
- {{log.logtime | amTz: timeZone | amDateFormat: dateFormat}} -
- - - - - - - {{log.type | componentLabel | async}} - {{log[column.value]}} - {{log.path}} - - -
- -
-
- -
- -
- {{log.logtime | amTz: timeZone | amDateFormat: dateFormat}} -
-
-
-
- -
-
- -
-
- - {{log.type}} -
-
- - {{log.path}} -
- -
- - {{log[column.value]}} -
-
-
- -
-
-
- -
-
-
-
- -
- -
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.less deleted file mode 100644 index 0ff490bbb01..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.less +++ /dev/null @@ -1,318 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - - .service-logs-table-controls { - align-items: center; - display: flex; - flex-wrap: wrap; - justify-content: flex-end; - .total-event-info { - margin-right: auto; - } - pagination { - margin-right: auto; - } - .layout-btn-group { - display: flex; - align-items: center; - .btn { - padding: .2em; - display: flex; - align-items: center; - i { - cursor: pointer; - margin: 0 .25em; - &.active { - color: @submit-color; - } - } - } - } - } - - .panel-body { - width: 100%; - } - .log-list-table-container { - width: 100%; - overflow-x: hidden; - } - table { - min-width: 100%; - table-layout: fixed; - width: 100%; - empty-cells: hide; - /deep/ col { - overflow: hidden; - text-overflow: ellipsis; - &.log-action { - overflow: visible; - padding-left: .25em; - padding-right: 0; - width: 1em; - } - &.log-time { - width: 7em; - padding-left: 0; - text-align: right; - } - &.log-level { - text-transform: uppercase; - width: 8em; - } - &.log-type { - color: @link-color; - width: 12em; - } - &.log-path { - overflow: hidden; - text-overflow: ellipsis; - width: 20em; - } - &.log-message { - width: 100%; - } - &.col-default-fixed { - width: 8em; - } - &.log-event_count { - width: 3em !important; - } - &.col-checkpoint { - padding: 0; - width: 1px; - } - } - tfoot td { - overflow: visible; - } - } - - tr.log-date-row, tr.log-date-row:hover { - background: @list-header-background-color; - border: none transparent; - th { - border: none transparent; - } - } - tr.log-item-row td { - background: none transparent; - } - - table { - thead th { - text-transform: uppercase; - } - td { - text-overflow: ellipsis; - overflow: hidden; - word-wrap: break-word; - &.log-action { - overflow: visible; - padding-left: .25em; - padding-right: 0; - width: 1em; - /deep/ .btn, /deep/ .filter-label { - font-size: 1em; - height: auto; - line-height: 1em; - padding: 0; - } - } - &.log-time { - min-width: 7em; - padding-left: 0; - text-align: right; - } - &.log-level { - text-transform: uppercase; - min-width: 8em; - .log-colors; - } - &.log-type { - color: @link-color; - } - &.log-message { - width: 100%; - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; - &.log-message-open { - white-space: pre-wrap; - } - } - &.log-event_count { - width: 3em; - } - } - } - - tr:hover td.log-action { - /deep/ .btn { - display: inline-block; - } - } - - .table.table-hover > tbody > tr { - box-sizing: border-box; - border-width: 1px; - > td { - border-top: 0 none; - } - &:first-of-type { - border-top-color: transparent; - } - &:last-of-type { - border-bottom-color: transparent; - } - } - - .list-layout-warning { - align-items: center; - color: @warning-color; - display: flex; - flex: 1; - font-size: .7em; - i { - margin-right: .6em; - } - } - - /deep/ .tooltip { - font-size: .75em; - .tooltip-inner { - background-color: rgba(50, 50, 50, 1); - } - .tooltip-arrow { - border-top-color: rgba(50, 50, 50, 1); - } - } - - .layout-flex { - .log-list { - color: @base-font-color; - border-bottom: 1px solid @log-list-border-color; - font-size: @table-font-size; - .log-date-row { - background: @list-header-background-color; - padding: @log-list-row-data-padding; - } - .log-row { - border: 1px solid transparent; - border-bottom: 1px solid @log-list-border-color; - display: block; - padding-bottom: .5em; - transition: all 100ms; - &:hover { - background: @log-list-row-hover-background-color; - border-color: @log-list-row-hover-border-color; - > div.log-header .log-action /deep/ .btn { - opacity: 1; - } - } - &:first-of-type { - border-top-color: transparent; - } - &:last-of-type { - border-bottom-color: transparent; - } - div { - padding: (@log-list-row-data-padding / 2) @log-list-row-data-padding; - } - > div.log-header, > div.details { - padding: 0; - } - > div.log-header { - display: flex; - > div { - height: 2em; - text-overflow: ellipsis; - overflow: hidden; - } - .log-level { - align-items: center; - display: flex; - padding-right: 0; - text-transform: uppercase; - width: 7em; - .log-colors; - } - .log-type { - color: @link-color; - } - .log-time { - flex: 1 0 auto; - max-width: none; - min-width: 6em; - text-align: right; - } - .log-action { - display: flex; - justify-content: flex-end; - max-width: none; - overflow: visible; - padding-left: 5px; - padding-right: 0; - > * { - display: inline-block; - } - /deep/ .btn { - opacity: 0; - overflow: hidden; - transition: opacity 50ms; - } - /deep/ .btn, /deep/ .filter-label { - font-size: 1em; - height: auto; - line-height: 1em; - padding: 0; - } - } - } - .log-message { - flex: 1 1 auto; - max-width: none; - overflow: hidden; - padding: .25em 0; - width: 100%; - } - .log-path { - max-width: none; - } - label { - color: lighten(@base-font-color, 25%); - display: block; - font-size: .7em; - margin: 0; - padding: 0; - } - } - &.show-labels > .log-row > div.log-header > div { - height: 2.5em; - } - } - } - - .context-menu { - position: fixed; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.spec.ts deleted file mode 100644 index 77457813d59..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.spec.ts +++ /dev/null @@ -1,146 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import {StoreModule} from '@ngrx/store'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {TooltipModule} from 'ngx-bootstrap'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {AuthService} from '@app/services/auth.service'; -import {PaginationComponent} from '@app/components/pagination/pagination.component'; -import {DropdownListComponent} from '@modules/shared/components/dropdown-list/dropdown-list.component'; - -import {ServiceLogsTableComponent, ListLayout} from './service-logs-table.component'; -import {ComponentLabelPipe} from "@app/pipes/component-label"; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -describe('ServiceLogsTableComponent', () => { - let component: ServiceLogsTableComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - ServiceLogsTableComponent, - PaginationComponent, - DropdownListComponent, - ComponentLabelPipe - ], - imports: [ - RouterTestingModule, - FormsModule, - ReactiveFormsModule, - MomentModule, - MomentTimezoneModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - appState, - appSettings, - tabs, - clusters, - components, - hosts - }), - TooltipModule.forRoot() - ], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - AppStateService, - AppSettingsService, - TabsService, - ClustersService, - ComponentsService, - HostsService, - ComponentGeneratorService, - AuthService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ServiceLogsTableComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - it('should change the layout to TABLE', () => { - component.setLayout(ListLayout.Table); - expect(component.layout).toEqual(ListLayout.Table); - }); - - it('should change the layout to FLEX', () => { - component.setLayout(ListLayout.Flex); - expect(component.layout).toEqual(ListLayout.Flex); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.ts deleted file mode 100644 index 757b4a0199c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/service-logs-table/service-logs-table.component.ts +++ /dev/null @@ -1,372 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { - Component, - OnInit, - OnDestroy, - AfterViewChecked, - ViewChild, - ElementRef, - Input, - ChangeDetectorRef, - SimpleChanges -} from '@angular/core'; - -import { Subject } from 'rxjs/Subject'; -import { Subscription } from 'rxjs/Subscription'; -import { Observable } from 'rxjs/Observable'; -import { auditTime } from 'rxjs/operator/auditTime'; - -import {ListItem} from '@app/classes/list-item'; -import {LogsTableComponent} from '@app/classes/components/logs-table/logs-table-component'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -export enum ListLayout { - Table = 'TABLE', - Flex = 'FLEX' -} - -@Component({ - selector: 'service-logs-table', - templateUrl: './service-logs-table.component.html', - styleUrls: ['./service-logs-table.component.less'] -}) -export class ServiceLogsTableComponent extends LogsTableComponent implements AfterViewChecked, OnInit, OnDestroy { - - /** - * The element reference is used to check if the table is broken or not. - */ - @ViewChild('tableListEl', { - read: ElementRef - }) - private tableListElRef: ElementRef; - - /** - * The element reference is used to check if the table is broken or not. - */ - @ViewChild('tableWrapperEl', { - read: ElementRef - }) - private tableWrapperElRef: ElementRef; - - /** - * We only show the labels in flex layout when this property is TRUE. - * @type {boolean} - */ - @Input() - showLabels = false; - - /** - * The minimum width for the log message column. It is used when we check if the layout is broken or not. - * @type {number} - */ - @Input() - logMessageColumnMinWidth = 175; - - /** - * We use this property in the broken table layout check process when the log message is displayed. - * @type {string} - */ - @Input() - logMessageColumnCssSelector = 'tbody tr td.log-message'; - - /** - * Set the layout for the list. - * It can be: - * 'TABLE': good for comparison, but it is not useful whe the user wants to display too much fields - * 'FLEX': flexible layout (with flex box) is good for display lot of column or display the log list on a relative - * narrow display. - * @type {Layout} - */ - @Input() - layout: ListLayout = ListLayout.Table; - - readonly dateFormat = 'dddd, MMMM Do'; - - readonly timeFormat = 'h:mm:ss A'; - - readonly customStyledColumns: string[] = ['level', 'type', 'logtime', 'log_message', 'path']; - - private readonly messageFilterParameterName = 'log_message'; - - private readonly logsType = 'serviceLogs'; - - private selectedText = ''; - - /** - * This is a private flag to store the table layout check result. It is used to show user notifications about - * non-visible information. - * @type {boolean} - */ - tooManyColumnsSelected = false; - - get contextMenuItems(): ListItem[] { - return this.logsContainer.queryContextMenuItems; - } - - get timeZone(): string { - return this.logsContainer.timeZone; - } - - get filters(): any { - return this.logsContainer.filters; - } - - get logsTypeMapObject(): object { - return this.logsContainer.logsTypeMap.serviceLogs; - } - - get isContextMenuDisplayed(): boolean { - return Boolean(this.selectedText); - }; - - /** - * 'left' CSS property value for context menu dropdown - * @type {number} - */ - contextMenuLeft = 0; - - /** - * 'top' CSS property value for context menu dropdown - * @type {number} - */ - contextMenuTop = 0; - - tableRefresh$ = new Subject(); - - subscriptions: Subscription[] = []; - - constructor( - private logsContainer: LogsContainerService, - private utils: UtilsService, - private cdRef: ChangeDetectorRef, - private notificationService: NotificationService - ) { - super(); - } - - ngOnInit() { - this.subscriptions.push( - Observable.fromEvent(window, 'resize').auditTime(300).subscribe(this.onWindowResize) - ); - } - - ngAfterViewChecked() { - this.checkListLayout(); - this.cdRef.detectChanges(); - } - - ngOnChanges(changes: SimpleChanges) { - if (changes.hasOwnProperty('columns')) { - this.displayedColumns = this.columns.filter((column: ListItem): boolean => column.isChecked); - this.tableRefresh$.next(Date.now()); - } - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - private copyLog = (log: ServiceLog): void => { - if (document.queryCommandSupported('copy')) { - const text = log.log_message, - node = document.createElement('textarea'); - node.value = text; - Object.assign(node.style, { - position: 'fixed', - top: '0', - left: '0', - width: '1px', - height: '1px', - border: 'none', - outline: 'none', - boxShadow: 'none', - backgroundColor: 'transparent', - padding: '0' - }); - document.body.appendChild(node); - node.select(); - if (document.queryCommandEnabled('copy')) { - document.execCommand('copy'); - this.notificationService.addNotification({ - type: 'success', - title: 'logs.copy.title', - message: 'logs.copy.success' - }); - } else { - this.notificationService.addNotification({ - type: 'success', - title: 'logs.copy.title', - message: 'logs.copy.failed' - }); - } - document.body.removeChild(node); - } else { - this.notificationService.addNotification({ - type: 'success', - title: 'logs.copy.title', - message: 'logs.copy.notSupported' - }); - } - } - - private onWindowResize = () => { - this.tableRefresh$.next(Date.now()); - } - - private openLog = (log: ServiceLog): void => { - this.logsContainer.openServiceLog(log); - } - - private openContext = (log: ServiceLog): void => { - this.logsContainer.loadLogContext(log.id, log.host, log.type); - } - - readonly logActions = [ - { - label: 'logs.copy', - iconClass: 'fa fa-files-o', - onSelect: this.copyLog - }, - { - label: 'logs.open', - iconClass: 'fa fa-external-link', - onSelect: this.openLog - }, - { - label: 'logs.context', - iconClass: 'fa fa-crosshairs', - onSelect: this.openContext - } - ]; - - isDifferentDates(dateA, dateB): boolean { - return this.utils.isDifferentDates(dateA, dateB, this.timeZone); - } - - openMessageContextMenu(event: MouseEvent): void { - const selectedText = getSelection().toString(); - if (selectedText) { - this.contextMenuLeft = event.clientX; - this.contextMenuTop = event.clientY; - this.selectedText = selectedText; - event.preventDefault(); - } - } - - updateQuery(event: ListItem): void { - this.logsContainer.queryParameterAdd.next({ - name: this.messageFilterParameterName, - value: this.selectedText, - isExclude: event.value - }); - } - - /** - * Handle the event when the contextual menu component hide itself. - */ - onContextMenuDismiss(): void { - this.selectedText = ''; - }; - - /** - * The goal is to check if the log message column is readable or not. Doing this by checking if it is displayed or not - * and by checking the current width and comparing with the minimum configured width. - * @returns {boolean} - */ - isLogMessageVisible(): boolean { - let visible:boolean = this.isColumnDisplayed('log_message'); - if (this.logs.length && visible && this.layout === ListLayout.Table) { - const tableElement: HTMLElement = this.tableListElRef.nativeElement; - const lastTdElement = (tableElement && tableElement.querySelectorAll(this.logMessageColumnCssSelector)[0]) || undefined; - const minWidth = parseFloat(window.getComputedStyle(lastTdElement).minWidth) || this.logMessageColumnMinWidth; - const lastTdElementInfo = lastTdElement.getBoundingClientRect(); - visible = lastTdElementInfo.width >= minWidth; - } - return visible; - } - - /** - * Check if the log list (table) fits its container. The goal is to decide if the layout is broken or not. - * @returns {boolean} - */ - isLogListFitToTheContainer(): boolean { - let result = this.layout === ListLayout.Flex; - if (!result) { - const tableElement: HTMLElement = this.tableListElRef.nativeElement; - const tableElementInfo = tableElement.getBoundingClientRect(); - const wrapperElement: HTMLElement = this.tableWrapperElRef.nativeElement; - const wrapperElementInfo = wrapperElement.getBoundingClientRect(); - result = wrapperElementInfo.width >= tableElementInfo.width; - } - return result; - } - - /** - * The goal of this function is to check either the log message column is readable if displayed or the all table - * columns are visible otherwise. - */ - private checkListLayout(): void { - this.tooManyColumnsSelected = this.isColumnDisplayed('log_message') ? !this.isLogMessageVisible() : !this.isLogListFitToTheContainer(); - } - - /** - * The goal is to enable the layout change to the user so that he/she can decide which view is more readable. - * @param {Layout} layout - */ - public setLayout(layout: ListLayout): void { - this.layout = layout; - } - - /** - * Find the label for the given field in the @columns ListItem array - * @param {string} field - * @returns {string} - */ - private getLabelForField(field: string): string { - const column: ListItem = this.columns.find(currentColumn => currentColumn.value === field); - return column && (column.label || column.value); - } - - /** - * Toggle the true/false value of the showLabels property. The goal is to show/hide the labels in the flex box layout, - * so that the user can decide if he/she wants to see the labels and lost some space. - */ - private toggleShowLabels(): void { - this.showLabels = !this.showLabels; - } - - updateSelectedColumns(columns: string[]): void { - this.logsContainer.updateSelectedColumns(columns, this.logsType); - } - - /** - * The goal is to provide the single source for the parameters of 'xyz events found' message. - * @returns {Object} - */ - get totalEventsFoundMessageParams(): {totalCount: number} { - return { - totalCount: this.logsContainer.totalCount - }; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.html deleted file mode 100644 index caff8dc0966..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.html +++ /dev/null @@ -1,39 +0,0 @@ - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.less deleted file mode 100644 index 7d6b2f80471..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.less +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -.close-icon { - .clickable-item; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.spec.ts deleted file mode 100644 index 2d0839a9da6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.spec.ts +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {TranslationModules} from '@app/test-config.spec'; - -import {TabsComponent} from './tabs.component'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import { UtilsService } from '@app/services/utils.service'; - -describe('TabsComponent', () => { - let component: TabsComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [TabsComponent], - imports: [ - RouterTestingModule, - TranslationModules - ], - providers: [ - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - UtilsService - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(TabsComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#switchTab()', () => { - let activeTab; - const tab = { - id: 'tab0', - isActive: true, - label: '', - appState: null - }; - - it('new active tab', () => { - component.tabSwitched.subscribe((switchedTab: LogTypeTab) => activeTab = switchedTab); - component.switchTab(tab); - expect(activeTab).toEqual(tab); - }); - }); - - describe('#closeTab()', () => { - const items = [ - { - id: 'serviceLogs', - isActive: false, - label: '', - appState: null - }, - { - id: 'auditLogs', - isActive: false, - label: '', - appState: null - }, - { - id: 'newTab', - isActive: false, - label: '', - appState: null - } - ], - cases = [ - { - closedTabIndex: 2, - newActiveTabIndex: 1, - title: 'last tab closed' - }, - { - closedTabIndex: 1, - newActiveTabIndex: 2, - title: 'not last tab closed' - } - ]; - - cases.forEach(test => { - let oldTab, - newTab; - describe(test.title, () => { - beforeEach(() => { - oldTab = null; - newTab = null; - component.items = items; - component.tabClosed.subscribe((tabs: LogTypeTab[]): void => { - oldTab = tabs[0]; - newTab = tabs[1]; - }); - component.closeTab(items[test.closedTabIndex]); - }); - - it('closed tab', () => { - expect(oldTab).toEqual(items[test.closedTabIndex]); - }); - - it('new active tab', () => { - expect(newTab).toEqual(items[test.newActiveTabIndex]); - }); - }); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.ts deleted file mode 100644 index fc46f287584..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/tabs/tabs.component.ts +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input, Output, EventEmitter} from '@angular/core'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import { LogsFilteringUtilsService } from '@app/services/logs-filtering-utils.service'; - -export enum TabsSwitchMode { - Click = 'CLICK', - RouteSegment = 'ROUTE_SEGMENT', - RouteFragment= 'ROUTE_FRAGMENT', - RouteParam = 'ROUTE_PARAM' -}; - -@Component({ - selector: 'tabs', - templateUrl: './tabs.component.html', - styleUrls: ['./tabs.component.less'] -}) -export class TabsComponent { - - @Input() - items: LogTypeTab[] = []; - - @Input() - switchMode: TabsSwitchMode = TabsSwitchMode.Click; - - @Input() - basePathForRoutingMode: string[]; - - @Input() - paramNameForRouteParamMode: string; - - @Input() - queryParams: {[key: string]: any}; - - @Input() - queryParamsHandling = 'merge'; - - @Output() - tabSwitched: EventEmitter = new EventEmitter(); - - @Output() - tabClosed: EventEmitter = new EventEmitter(); - - constructor( - private logsFilterUtilsService: LogsFilteringUtilsService - ) {} - - switchTab(tab: LogTypeTab, event?: MouseEvent): void { - if (event) { - event.preventDefault(); - } - this.items.forEach((item: LogTypeTab) => item.isActive = item.id === tab.id); - this.tabSwitched.emit(tab); - } - - closeTab(tab: LogTypeTab): void { - const tabs = this.items, - tabsCount = tabs.length, - newActiveTab = tabs[tabsCount - 1] === tab ? tabs[tabsCount - 2] : tabs[tabsCount - 1]; - this.tabClosed.emit([tab, newActiveTab]); - } - - /** - * Get a route array for router.navigate. - * @ToDo it's been called too many times. Check what is this. - * @param tab {LogTypeTab} - */ - getRouterLinkForTab(tab: LogTypeTab): (string | {[key: string]: any})[] | string { - let link: (string | {[key: string]: any})[] | string; - switch (this.switchMode) { - case TabsSwitchMode.RouteSegment: - link = [...this.basePathForRoutingMode, ...this.logsFilterUtilsService.getNavigationForTab(tab)]; - break; - case TabsSwitchMode.RouteParam: - link = [...this.basePathForRoutingMode, { - [this.paramNameForRouteParamMode]: tab.id - }]; - break; - case TabsSwitchMode.RouteFragment: - link = [...this.basePathForRoutingMode]; - break; - default: - link = '#'; - break; - } - return link; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.html deleted file mode 100644 index 3b663279dcb..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.html +++ /dev/null @@ -1,40 +0,0 @@ - - -
-
-
-
- {{chartTimeGap.value}} {{chartTimeGap.label | translate}} {{'histogram.gap' | translate}} -
- -
-
-
{{ chartLabel }}
-
-
-
-
-
-
{{firstDateTick | amTz: timeZone | amDateFormat: historyStartEndTimeFormat}}
-
{{lastDateTick | amTz: timeZone | amDateFormat: historyStartEndTimeFormat}}
-
- diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.less deleted file mode 100644 index a0b8551a0c5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.less +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - header { - padding: @graph-padding; - } - .chart-label { - text-align: center; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.spec.ts deleted file mode 100644 index b429cdcb12f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.spec.ts +++ /dev/null @@ -1,196 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injector} from '@angular/core'; -import {async, ComponentFixture, TestBed, inject} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {TranslationModules} from '@app/test-config.spec'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {ServiceInjector} from '@app/classes/service-injector'; -import {TimeZoneAbbrPipe} from '@app/pipes/timezone-abbr.pipe'; -import {GraphLegendComponent} from '@app/components/graph-legend/graph-legend.component'; -import {GraphLegendItemComponent} from '@app/components/graph-legend-item/graph-legend-item.component'; -import {GraphTooltipComponent} from '@app/components/graph-tooltip/graph-tooltip.component'; - -import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {TimeHistogramComponent} from './time-histogram.component'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {UtilsService} from '@app/services/utils.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {AuditLogsService} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.service'; -import {ServiceLogsService} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service'; -import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {ClustersService} from '@app/services/storage/clusters.service'; -import {ComponentsService} from '@app/services/storage/components.service'; -import {HostsService} from '@app/services/storage/hosts.service'; -import {HomogeneousObject} from '@app/classes/object'; - -describe('TimeHistogramComponent', () => { - let component: TimeHistogramComponent; - let fixture: ComponentFixture; - let histogramData: any; - let colors: HomogeneousObject; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => {} - } - } - }; - histogramData = { - 1512476481940: { - FATAL: 0, - ERROR: 1000, - WARN: 700, - INFO: 0, - DEBUG: 0, - TRACE: 0, - UNKNOWN: 0 - }, - 1512472881940: { - FATAL: 0, - ERROR: 2000, - WARN: 900, - INFO: 0, - DEBUG: 0, - TRACE: 0, - UNKNOWN: 0 - } - }; - colors = { - FATAL: '#830A0A', - ERROR: '#E81D1D', - WARN: '#FF8916', - INFO: '#2577B5', - DEBUG: '#65E8FF', - TRACE: '#888', - UNKNOWN: '#BDBDBD' - }; - TestBed.configureTestingModule({ - declarations: [ - TimeHistogramComponent, - GraphLegendComponent, - GraphLegendItemComponent, - GraphTooltipComponent, - TimeZoneAbbrPipe - ], - imports: [ - StoreModule.provideStore({ - appSettings - }), - ...TranslationModules, - MomentModule, - MomentTimezoneModule - ], - providers: [ - AppSettingsService, - LogsContainerService, - { - provide: HttpClientService, - useValue: httpClient - }, - UtilsService, - AppStateService, - AuditLogsService, - AuditLogsFieldsService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - ClustersService, - ComponentsService, - HostsService - ] - }) - .compileComponents(); - })); - - beforeEach(inject([Injector], (injector: Injector) => { - ServiceInjector.injector = injector; - fixture = TestBed.createComponent(TimeHistogramComponent); - component = fixture.componentInstance; - component.colors = colors; - component.svgId = 'HistogramSvg'; - component.data = histogramData; - fixture.detectChanges(); - })); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - const getTimeGapTestCases = [{ - startDate: new Date(2017, 0, 1), - endDate: new Date(2017, 0, 8), - expected: { - unit: 'week', - value: 1, - label: 'histogram.gap.week' - } - }, { - startDate: new Date(2017, 0, 1), - endDate: new Date(2017, 0, 2), - expected: { - unit: 'day', - value: 1, - label: 'histogram.gap.day' - } - }, { - startDate: new Date(2017, 0, 1, 1), - endDate: new Date(2017, 0, 1, 2), - expected: { - unit: 'hour', - value: 1, - label: 'histogram.gap.hour' - } - }, { - startDate: new Date(2017, 0, 1, 1, 1), - endDate: new Date(2017, 0, 1, 1, 2), - expected: { - unit: 'minute', - value: 1, - label: 'histogram.gap.minute' - } - }, { - startDate: new Date(2017, 0, 1, 1, 1, 1), - endDate: new Date(2017, 0, 1, 1, 1, 11), - expected: { - unit: 'second', - value: 10, - label: 'histogram.gap.seconds' - } - }]; - - getTimeGapTestCases.forEach((test) => { - it(`should the getTimeGap return with the proper time gap obj for ${test.expected.value} ${test.expected.unit} difference`, () => { - const getTimeGap: (startDate: Date, endDate: Date) => {value: number, unit: string} = component['getTimeGap']; - const gap = getTimeGap(test.startDate, test.endDate); - expect(gap).toEqual(test.expected); - }); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.ts deleted file mode 100644 index 179011b469a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-histogram/time-histogram.component.ts +++ /dev/null @@ -1,96 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; -import * as d3 from 'd3'; -import {TimeGraphComponent} from '@app/classes/components/graph/time-graph.component'; -import {GraphScaleItem} from '@app/classes/graph'; - -@Component({ - selector: 'time-histogram', - templateUrl: './time-histogram.component.html', - styleUrls: [ - '../../classes/components/graph/graph.component.less', '../../classes/components/graph/time-graph.component.less', - './time-histogram.component.less' - ] -}) -export class TimeHistogramComponent extends TimeGraphComponent { - - @Input() - columnWidth = { - second: 40, - minute: 30, - hour: 25, - day: 20, - base: 20 - }; - - constructor() { - super(); - } - - protected setYScaleDomain(data: GraphScaleItem[]): void { - const keys = Object.keys(this.labels); - const maxYValue = d3.max(data, item => keys.reduce((sum: number, key: string): number => sum + item[key], 0)); - this.yScale.domain([0, maxYValue]); - } - - protected populate(): void { - const keys = Object.keys(this.colors); - const data = this.data; - const timeStamps = Object.keys(data); - // we create a more consumable data structure for d3 - const formattedData = timeStamps.map((timeStamp: string): GraphScaleItem => Object.assign({ - tick: Number(timeStamp) - }, data[timeStamp])); - const layers = d3.stack().keys(keys)(formattedData); - - // after we have the data we set the domain values both scales - this.setXScaleDomain(formattedData); - this.setYScaleDomain(formattedData); - - // Setting the timegap label above the chart - this.setChartTimeGapByXScale(); - - const unitD3TimeProp = this.chartTimeGap.unit.charAt(0).toUpperCase() + this.chartTimeGap.unit.slice(1); - this.xScale.nice(d3[`time${unitD3TimeProp}`], 2); - - const columnWidth = this.columnWidth[this.chartTimeGap.unit] || this.columnWidth.base; - - // drawing the axis - this.drawXAxis(); - this.drawYAxis(); - - // populate the data and drawing the bars - const layer = this.svg.selectAll().data(d3.transpose(layers)) - .enter().append('g') - .attr('class', 'value'); - layer.selectAll().data(item => item).enter().append('rect') - .attr('transform', `translate(${(columnWidth / 2) + 2}, 0)`) - .attr('x', item => this.xScale(item.data.tick) - columnWidth / 2) - .attr('y', item => this.yScale(item[1])) - .attr('height', item => this.yScale(item[0]) - this.yScale(item[1])) - .attr('width', columnWidth.toString()) - .style('fill', (item, index) => this.orderedColors[index]) - .on('mouseover', this.handleMouseOver) - .on('mousemove', this.handleMouseMove) - .on('mouseout', this.handleMouseOut); - this.setDragBehavior(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.html deleted file mode 100644 index e2ca74485c8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.html +++ /dev/null @@ -1,27 +0,0 @@ - - -
-
-
{{firstDateTick | amTz: timeZone | amDateFormat: historyStartEndTimeFormat}}
-
{{lastDateTick | amTz: timeZone | amDateFormat: historyStartEndTimeFormat}}
-
- - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.less deleted file mode 100644 index ffa1155715a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.less +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; - -:host { - /deep/ .line { - fill: none; - stroke-width: 1.5px; - } - - /deep/ .grid-line-area { - fill: transparent; - - + .grid-line { - stroke: transparent; - } - - &.visible-grid-line-area { - cursor: pointer; - - + .grid-line { - stroke: @base-font-color; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.spec.ts deleted file mode 100644 index b58717b9082..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.spec.ts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injector, CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed, inject} from '@angular/core/testing'; -import {MomentModule} from 'angular2-moment'; -import {MomentTimezoneModule} from 'angular-moment-timezone'; -import {TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {UtilsService} from '@app/services/utils.service'; -import {ServiceInjector} from '@app/classes/service-injector'; -import {GraphLegendComponent} from '@app/components/graph-legend/graph-legend.component'; - -import {TimeLineGraphComponent} from './time-line-graph.component'; - -describe('TimeLineGraphComponent', () => { - let component: TimeLineGraphComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - TimeLineGraphComponent, - GraphLegendComponent - ], - imports: [ - MomentModule, - MomentTimezoneModule, - ...TranslationModules, - StoreModule.provideStore({ - appSettings - }) - ], - providers: [ - UtilsService, - AppSettingsService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(inject([Injector], (injector: Injector) => { - ServiceInjector.injector = injector; - fixture = TestBed.createComponent(TimeLineGraphComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - })); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.ts deleted file mode 100644 index 2f0b450fd10..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-line-graph/time-line-graph.component.ts +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input} from '@angular/core'; -import * as d3 from 'd3'; -import {GraphScaleItem, GraphLinePoint, GraphLineData} from '@app/classes/graph'; -import {TimeGraphComponent} from '@app/classes/components/graph/time-graph.component'; - -@Component({ - selector: 'time-line-graph', - templateUrl: './time-line-graph.component.html', - styleUrls: [ - '../../classes/components/graph/graph.component.less', '../../classes/components/graph/time-graph.component.less', - './time-line-graph.component.less' - ] -}) -export class TimeLineGraphComponent extends TimeGraphComponent { - - @Input() - pointRadius: number = 3.5; - - protected populate(): void { - const keys = Object.keys(this.colors), - data = this.data, - timeStamps = Object.keys(data), - dataForDomain = timeStamps.map((timeStamp: string): GraphScaleItem => Object.assign({ - tick: Number(timeStamp) - }, data[timeStamp])), - dataForSvg = keys.map((key: string): GraphLineData => { - return { - points: timeStamps.map((timeStamp: string): GraphScaleItem => { - return { - tick: Number(timeStamp), - y: data[timeStamp][key] - }; - }), - key: key - }; - }), - line = d3.line().x(item => this.xScale(item.tick)).y(item => this.yScale(item.y)); - - // after we have the data we set the domain values both scales - this.setXScaleDomain(dataForDomain); - this.setYScaleDomain(); - - // drawing the axis - this.drawXAxis(); - this.drawYAxis(); - - // populate the data and drawing the lines and points - const layer = this.svg.selectAll().data(dataForSvg); - layer.enter().append('path') - .attr('class', 'line').attr('d', (item: GraphLineData) => line(item.points)) - .style('stroke', (item: GraphLineData): string => this.colors[item.key]); - layer.enter().append('g').selectAll('circle') - .data((item: GraphLineData): GraphLinePoint[] => item.points.map((point: GraphScaleItem): GraphLinePoint => { - return Object.assign({}, point, { - color: this.colors[item.key] - }); - })) - .enter().append('circle') - .attr('cx', (item: GraphLinePoint): number => this.xScale(item.tick)) - .attr('cy', (item: GraphLinePoint): number => this.yScale(item.y)) - .attr('r', this.pointRadius) - .style('fill', (item: GraphLinePoint): string => item.color); - const gridLinesParent = this.svg.selectAll().data(dataForDomain).enter().append('g').selectAll() - .data((item: GraphScaleItem): GraphScaleItem[] => [item]).enter(); - gridLinesParent.append('rect').attr('class', 'grid-line-area') - .attr('x', (item: GraphScaleItem): number => this.xScale(item.tick) - this.pointRadius).attr('y', 0) - .style('width', `${this.pointRadius * 2}px`).style('height', `${this.height}px`) - .on('mouseover', (d: GraphScaleItem, index: number, elements: HTMLElement[]): void => { - elements.forEach((element: HTMLElement) => element.classList.add('visible-grid-line-area')); - this.handleMouseOver(Object.assign([], d, { - data: d - }), index, elements); - }) - .on('mousemove', this.handleMouseMove) - .on('mouseout', (d: GraphScaleItem, index: number, elements: HTMLElement[]): void => { - elements.forEach((element: HTMLElement) => element.classList.remove('visible-grid-line-area')); - this.handleMouseOut(); - }); - gridLinesParent.append('line').attr('class', 'grid-line') - .attr('x1', (item: GraphScaleItem): number => this.xScale(item.tick)) - .attr('x2', (item: GraphScaleItem): number => this.xScale(item.tick)) - .attr('y1', 0).attr('y2', this.height); - this.setDragBehavior(); - } - - protected setYScaleDomain(): void { - const keys = Object.keys(this.data), - maxValues = keys.map((currentKey: string): number => this.utils.getMaxNumberInObject(this.data[currentKey]), 0), - maximum = Math.max(...maxValues); - this.yScale.domain([0, maximum]); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.html deleted file mode 100644 index e2393eca4aa..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.html +++ /dev/null @@ -1,44 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.less deleted file mode 100644 index ba1ba2034e6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.less +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -.btn.dropdown-toggle { - text-transform: none; -} - -.dropdown-menu { - padding-bottom: @col-padding; - - date-picker /deep/ .form-group { - margin-bottom: @col-padding; - } - - .time-range-name { - .clickable-item; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.spec.ts deleted file mode 100644 index f0768610222..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.spec.ts +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {TimeRangePickerComponent} from './time-range-picker.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('TimeRangePickerComponent', () => { - let component: TimeRangePickerComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [TimeRangePickerComponent], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appSettings, - appState, - clusters, - components, - hosts, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - UtilsService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(TimeRangePickerComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.ts deleted file mode 100644 index e4e146fd57e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/time-range-picker/time-range-picker.component.ts +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, forwardRef} from '@angular/core'; -import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms'; -import {Moment} from 'moment-timezone'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {ListItem} from '@app/classes/list-item'; -import {TimeUnitListItem} from '@app/classes/filtering'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; - -@Component({ - selector: 'time-range-picker', - templateUrl: './time-range-picker.component.html', - styleUrls: ['./time-range-picker.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => TimeRangePickerComponent), - multi: true - } - ] -}) -export class TimeRangePickerComponent implements ControlValueAccessor { - - startTime: Moment; - - endTime: Moment; - - private onChange: (fn: any) => void; - - private timeRange?: TimeUnitListItem; - - constructor( - private logsContainer: LogsContainerService, - private logsFilteringUtilsService: LogsFilteringUtilsService - ) {} - - get quickRanges(): (ListItem | TimeUnitListItem[])[] { - return this.logsContainer.filters.timeRange.options; - } - - get selection(): TimeUnitListItem { - return this.timeRange; - } - - set selection(newValue: TimeUnitListItem) { - this.timeRange = newValue; - if (this.onChange) { - this.onChange(newValue); - } - this.setEndTime(this.logsFilteringUtilsService.getEndTimeMomentFromTimeUnitListItem(newValue, this.logsContainer.timeZone)); - this.setStartTime(this.logsFilteringUtilsService.getStartTimeMomentFromTimeUnitListItem( - newValue, this.endTime, this.logsContainer.timeZone - )); - } - - setStartTime(timeObject: Moment): void { - this.startTime = timeObject; - } - - setEndTime(timeObject: Moment): void { - this.endTime = timeObject; - } - - setTimeRange(value: any, label: string): void { - this.selection = {label, value}; - } - - setCustomTimeRange(): void { - this.selection = { - label: this.logsContainer.customTimeRangeKey, - value: { - type: 'CUSTOM', - start: this.startTime, - end: this.endTime - } - }; - } - - writeValue(selection: TimeUnitListItem): void { - this.selection = selection; - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched(): void { - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.html deleted file mode 100644 index 3cb196e5915..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - -
-
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.less deleted file mode 100644 index 73edc0d0351..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.less +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/variables'; - -.btn-link { - // TODO implement actual colors - color: @submit-color; - - &:hover { - color: @submit-hover-color; - } -} - -/deep/ #timezone-map { - .Cbox { - .quickLink { - padding-top: 4px; - } - } - - .hoverZone { - display: inline-block; - - &:after { - content: '\007C\00a0\00a0'; - visibility: hidden; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts deleted file mode 100644 index 736c7ef8972..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UserSettingsService} from '@app/services/user-settings.service'; -import {UtilsService} from '@app/services/utils.service'; -import {AuthService} from '@app/services/auth.service'; -import {TimeZoneAbbrPipe} from '@app/pipes/timezone-abbr.pipe'; -import {ModalComponent} from '@app/modules/shared/components/modal/modal.component'; - -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; - -import {TimeZonePickerComponent} from './timezone-picker.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -import { dataAvailabilityStates, DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -describe('TimeZonePickerComponent', () => { - let component: TimeZonePickerComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ - TimeZonePickerComponent, - ModalComponent, - TimeZoneAbbrPipe - ], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appSettings, - appState, - clusters, - components, - hosts, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs, - dataAvailabilityStates - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - LogsContainerService, - AuthService, - UserSettingsService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService, - DataAvailabilityStatesStore - ], - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(TimeZonePickerComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.ts deleted file mode 100644 index 91df12b4e62..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.ts +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnInit} from '@angular/core'; -import * as $ from 'jquery'; -import '@vendor/js/WorldMapGenerator.min'; -import {AppSettingsService} from '@app/services/storage/app-settings.service'; -import {UserSettingsService} from '@app/services/user-settings.service'; - -@Component({ - selector: 'timezone-picker', - templateUrl: './timezone-picker.component.html', - styleUrls: ['./timezone-picker.component.less'] -}) -export class TimeZonePickerComponent implements OnInit { - - constructor(private appSettings: AppSettingsService, private settingsService: UserSettingsService) { - } - - ngOnInit() { - this.appSettings.getParameter('timeZone').subscribe((value: string) => this.timeZone = value); - } - - readonly mapElementId = 'timezone-map'; - - private readonly mapOptions = { - quickLink: [ - { - PST: 'PST', - MST: 'MST', - CST: 'CST', - EST: 'EST', - GMT: 'GMT', - LONDON: 'Europe/London', - IST: 'IST' - } - ] - }; - - private mapElement: any; - - private timeZoneSelect: JQuery; - - isTimeZonePickerDisplayed: boolean = false; - - timeZone: string; - - setTimeZonePickerDisplay(isDisplayed: boolean): void { - this.isTimeZonePickerDisplayed = isDisplayed; - } - - initMap(): void { - this.mapElement = $(`#${this.mapElementId}`); - this.mapElement.WorldMapGenerator(this.mapOptions); - this.timeZoneSelect = this.mapElement.find('select'); - this.timeZoneSelect.removeClass('btn btn-default').addClass('form-control').val(this.timeZone); - } - - setTimeZone(): void { - const timeZone = this.timeZoneSelect.val(); - this.settingsService.setTimeZone(timeZone); - this.setTimeZonePickerDisplay(false); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.html deleted file mode 100644 index 4c91a8de347..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.html +++ /dev/null @@ -1,24 +0,0 @@ - - -
- - - -
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.less deleted file mode 100644 index a5d7ac13680..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.less +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../modules/shared/mixins'; - -:host { - .default-flex; - margin-right: 0; - - .filters { - display: inline-block; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.spec.ts deleted file mode 100644 index 3ae2f972d63..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.spec.ts +++ /dev/null @@ -1,119 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {ReactiveFormsModule} from '@angular/forms'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; - -import {TopMenuComponent} from './top-menu.component'; -import {RouterTestingModule} from '@angular/router/testing'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('TopMenuComponent', () => { - let component: TopMenuComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - ReactiveFormsModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - appState, - appSettings, - tabs, - clusters, - components, - hosts - }), - ...TranslationModules - ], - declarations: [TopMenuComponent], - providers: [ - ...MockHttpRequestModules, - LogsContainerService, - AuthService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - AppStateService, - AppSettingsService, - TabsService, - ClustersService, - ComponentsService, - HostsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [CUSTOM_ELEMENTS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(TopMenuComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.ts deleted file mode 100644 index a87ca2f4ff0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/components/top-menu/top-menu.component.ts +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component} from '@angular/core'; -import {FormGroup} from '@angular/forms'; -import {FilterCondition, TimeUnitListItem} from '@app/classes/filtering'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject} from '@app/classes/object'; -import {AuthService} from '@app/services/auth.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {Router} from '@angular/router'; - -@Component({ - selector: 'top-menu', - templateUrl: './top-menu.component.html', - styleUrls: ['./top-menu.component.less'] -}) -export class TopMenuComponent { - - constructor(private authService: AuthService, private logsContainer: LogsContainerService, private router: Router) {} - - get filtersForm(): FormGroup { - return this.logsContainer.filtersForm; - }; - - get filters(): HomogeneousObject { - return this.logsContainer.filters; - }; - - openSettings = (): void => {}; - - /** - * Request a logout action from AuthService - */ - logout = (): void => { - this.authService.logout(); - } - - navigateToShipperConfig = (): void => { - this.router.navigate(['/shipper']); - } - - readonly items = [ - { - iconClass: 'fa fa-user grey', - hideCaret: true, - isRightAlign: true, - subItems: [ - { - label: 'common.settings', - onSelect: this.openSettings, - iconClass: 'fa fa-cog' - }, - - { - label: 'topMenu.shipperConfiguration', - onSelect: this.navigateToShipperConfig, - iconClass: 'fa fa-file-code-o' - }, - { - isDivider: true - }, - { - label: 'authorization.logout', - onSelect: this.logout, - iconClass: 'fa fa-sign-out' - } - ] - } - ]; - - get clusters(): (ListItem | TimeUnitListItem[])[] { - return this.filters.clusters.options; - } - - get isClustersFilterDisplayed(): boolean { - return this.logsContainer.isFilterConditionDisplayed('clusters') && this.clusters.length > 1; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/app-load.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/app-load.module.ts deleted file mode 100644 index ade23da6be8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/app-load.module.ts +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { NgModule, APP_INITIALIZER } from '@angular/core'; -import { HttpClientModule } from '@angular/common/http'; - -import { AppLoadService } from './services/app-load.service'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -export function check_if_authorized(appLoadService: AppLoadService) { - return () => appLoadService.syncAuthorizedStateWithBackend(); -} -export function set_translation_service(appLoadService: AppLoadService) { - return () => appLoadService.setTranslationService(); -} - -@NgModule({ - imports: [ - HttpClientModule - ], - providers: [ - AppLoadService, - DataAvailabilityStatesStore, -{ provide: APP_INITIALIZER, useFactory: set_translation_service, deps: [AppLoadService], multi: true }, - { provide: APP_INITIALIZER, useFactory: check_if_authorized, deps: [AppLoadService], multi: true } - ] -}) -export class AppLoadModule { } diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/models/data-availability-state.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/models/data-availability-state.model.ts deleted file mode 100644 index d819dec57b3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/models/data-availability-state.model.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {DataAvailabilityValues} from 'app/classes/string'; - -export interface DataAvaibilityStatesModel { - clustersDataState: DataAvailabilityValues; - hostsDataState: DataAvailabilityValues; - componentsDataState: DataAvailabilityValues; - logFieldsDataState: DataAvailabilityValues; -} - -export const initialDataAvaibilityStates: DataAvaibilityStatesModel = { - clustersDataState: DataAvailabilityValues.NOT_AVAILABLE, - hostsDataState: DataAvailabilityValues.NOT_AVAILABLE, - componentsDataState: DataAvailabilityValues.NOT_AVAILABLE, - logFieldsDataState: DataAvailabilityValues.NOT_AVAILABLE -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/services/app-load.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/services/app-load.service.ts deleted file mode 100644 index 9405c9b8a5f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/services/app-load.service.ts +++ /dev/null @@ -1,256 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Injectable } from '@angular/core'; -import {Response} from '@angular/http'; -import 'rxjs/add/operator/toPromise'; -import {TranslateService} from '@ngx-translate/core'; - -import {AppStateService} from 'app/services/storage/app-state.service'; -import {HttpClientService} from 'app/services/http-client.service'; -import {ClustersService} from 'app/services/storage/clusters.service'; -import {ServiceLogsFieldsService} from 'app/services/storage/service-logs-fields.service'; -import {AuditLogsFieldsService} from 'app/services/storage/audit-logs-fields.service'; -import {AuditFieldsDefinitionSet, LogField} from 'app/classes/object'; -import {Observable} from 'rxjs/Observable'; -import {HostsService} from 'app/services/storage/hosts.service'; -import {NodeItem} from 'app/classes/models/node-item'; -import {ComponentsService} from 'app/services/storage/components.service'; -import {DataAvailabilityValues} from 'app/classes/string'; -import { DataAvaibilityStatesModel } from '@app/modules/app-load/models/data-availability-state.model'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -// @ToDo create a separate data state enrty in the store with keys of the model names -export enum DataStateStoreKeys { - CLUSTERS_DATA_KEY = 'clustersDataState', - HOSTS_DATA_KEY = 'hostsDataState', - COMPONENTS_DATA_KEY = 'componentsDataState', - LOG_FIELDS_DATA_KEY = 'logFieldsDataState', - BASE_DATA_SET_KEY = 'baseDataSetState', -}; - -export const baseDataKeys: DataStateStoreKeys[] = [ - DataStateStoreKeys.CLUSTERS_DATA_KEY, - DataStateStoreKeys.HOSTS_DATA_KEY, - DataStateStoreKeys.COMPONENTS_DATA_KEY -]; - -@Injectable() -export class AppLoadService { - - baseDataAvailibilityState$: Observable; - - constructor( - private httpClient: HttpClientService, - private appStateService: AppStateService, - private clustersStorage: ClustersService, - private serviceLogsFieldsService: ServiceLogsFieldsService, - private auditLogsFieldsService: AuditLogsFieldsService, - private translationService: TranslateService, - private hostStoreService: HostsService, - private componentsStorageService: ComponentsService, - private dataAvaibilityStateStore: DataAvailabilityStatesStore - ) { - this.appStateService.getParameter('isAuthorized').subscribe(this.initOnAuthorization); - this.appStateService.setParameter('isInitialLoading', true); - - Observable.combineLatest( - this.appStateService.getParameter(DataStateStoreKeys.CLUSTERS_DATA_KEY), - this.appStateService.getParameter(DataStateStoreKeys.COMPONENTS_DATA_KEY), - this.appStateService.getParameter(DataStateStoreKeys.HOSTS_DATA_KEY) - ).subscribe(this.onDataAvailibilityChange); - - this.baseDataAvailibilityState$ = this.dataAvaibilityStateStore.getAll() - .map((dataAvailabilityState: DataAvaibilityStatesModel): DataAvailabilityValues => { - const values: DataAvailabilityValues[] = Object.keys(dataAvailabilityState) - .filter((key: DataStateStoreKeys): boolean => baseDataKeys.indexOf(key) > -1) - .map((key): DataAvailabilityValues => dataAvailabilityState[key]); - let nextDataState: DataAvailabilityValues = DataAvailabilityValues.NOT_AVAILABLE; - if (values.indexOf(DataAvailabilityValues.ERROR) > -1) { - nextDataState = DataAvailabilityValues.ERROR; - } - if (values.indexOf(DataAvailabilityValues.LOADING) > -1) { - nextDataState = DataAvailabilityValues.LOADING; - } - if ( values.filter((value: DataAvailabilityValues) => value !== DataAvailabilityValues.AVAILABLE).length === 0 ) { - nextDataState = DataAvailabilityValues.AVAILABLE; - } - return nextDataState; - }); - this.baseDataAvailibilityState$.subscribe(this.onBaseDataAvailabilityChange); - } - - onDataAvailibilityChange = (dataAvailabilityStates: DataAvailabilityValues[]): void => { - const baseDataAvailability: DataAvailabilityValues = dataAvailabilityStates - .reduce((availabeState, dataState: DataAvailabilityValues): DataAvailabilityValues => { - if (dataState === DataAvailabilityValues.ERROR || availabeState === DataAvailabilityValues.ERROR) { - return DataAvailabilityValues.ERROR; - } - if (availabeState === DataAvailabilityValues.LOADING || dataState === DataAvailabilityValues.LOADING) { - return DataAvailabilityValues.LOADING; - } - if (availabeState === DataAvailabilityValues.AVAILABLE && dataState === DataAvailabilityValues.AVAILABLE) { - return DataAvailabilityValues.AVAILABLE; - } - return DataAvailabilityValues.NOT_AVAILABLE; - }, DataAvailabilityValues.AVAILABLE); - } - - onBaseDataAvailabilityChange = (baseDataAvailibilityState: DataAvailabilityValues): void => { - this.appStateService.setParameter(DataStateStoreKeys.BASE_DATA_SET_KEY, baseDataAvailibilityState); - } - - private setDataAvaibility(dataKey: string, availabilityState: DataAvailabilityValues) { - this.dataAvaibilityStateStore.setParameter(dataKey, availabilityState); - } - - loadClusters(): Observable { - this.clustersStorage.clear(); - this.setDataAvaibility(DataStateStoreKeys.CLUSTERS_DATA_KEY, DataAvailabilityValues.LOADING); - const response$: Observable = this.httpClient.get('clusters').filter((response: Response) => response.ok).first(); - response$.subscribe( - (response: Response) => { - const clusterNames = response.json(); - if (clusterNames) { - this.clustersStorage.addInstances(clusterNames); - this.setDataAvaibility(DataStateStoreKeys.CLUSTERS_DATA_KEY, DataAvailabilityValues.AVAILABLE); - } - }, - (errorResponse: Response) => { - this.clustersStorage.addInstances([]); - this.setDataAvaibility(DataStateStoreKeys.CLUSTERS_DATA_KEY, DataAvailabilityValues.ERROR); - } - ); - return response$; - } - - clearClusters(): void { - this.clustersStorage.clear(); - } - - loadHosts(): Observable { - this.hostStoreService.clear(); - this.setDataAvaibility(DataStateStoreKeys.HOSTS_DATA_KEY, DataAvailabilityValues.LOADING); - const response$ = this.httpClient.get('hosts').filter((response: Response) => response.ok); - response$.subscribe((response: Response): void => { - const jsonResponse = response.json(), - hosts = jsonResponse && jsonResponse.vNodeList; - if (hosts) { - this.hostStoreService.addInstances(hosts); - this.setDataAvaibility(DataStateStoreKeys.HOSTS_DATA_KEY, DataAvailabilityValues.AVAILABLE); - } - }, () => { - this.hostStoreService.addInstances([]); - this.setDataAvaibility(DataStateStoreKeys.HOSTS_DATA_KEY, DataAvailabilityValues.ERROR); - }); - return response$; - } - - clearHosts(): void { - this.hostStoreService.clear(); - } - - loadComponents(): Observable<[{[key: string]: any}, {[key: string]: any}]> { - this.setDataAvaibility(DataStateStoreKeys.COMPONENTS_DATA_KEY, DataAvailabilityValues.LOADING); - const responseComponentsData$: Observable = this.httpClient.get('components').first() - .filter((response: Response) => response.ok) - .map((response: Response) => response.json()); - const responseComponentsName$: Observable = this.httpClient.get('serviceComponentsName').first() - .filter((response: Response) => response.ok) - .map((response: Response) => response.json()); - const responses$ = Observable.combineLatest(responseComponentsName$, responseComponentsData$); - responses$.subscribe(([componentsNames, componentsData]: [{[key: string]: any}, {[key: string]: any}]) => { - const components = componentsData && componentsData.vNodeList && componentsData.vNodeList.map((item): NodeItem => { - const component = componentsNames.metadata.find(componentItem => componentItem.name === item.name); - return Object.assign(item, { - label: component && (component.label || item.name), - group: component && component.group && { - name: component.group, - label: componentsNames.groups[component.group] - }, - value: item.logLevelCount.reduce((currentValue: number, currentItem): number => { - return currentValue + Number(currentItem.value); - }, 0) - }); - }); - if (components) { - this.componentsStorageService.addInstances(components); - this.setDataAvaibility(DataStateStoreKeys.COMPONENTS_DATA_KEY, DataAvailabilityValues.AVAILABLE); - } - }); - return responses$; - } - - clearComponents(): void { - this.componentsStorageService.clear(); - } - - loadFieldsForLogs(): Observable<[LogField[], AuditFieldsDefinitionSet]> { - const serviceLogsFieldsResponse$: Observable = this.httpClient.get('serviceLogsFields') - .filter((response: Response) => response.ok) - .map((response: Response) => { - return response.json(); - }); - const auditLogsFieldsResponse$: Observable = this.httpClient.get('auditLogsFields') - .filter((response: Response) => response.ok) - .map((response: Response) => { - return response.json(); - }); - const responses$: Observable<[LogField[], AuditFieldsDefinitionSet]> = Observable.combineLatest( - serviceLogsFieldsResponse$, auditLogsFieldsResponse$ - ); - responses$.subscribe(([serviceLogsFieldsResponse, auditLogsFieldsResponse]: [LogField[], AuditFieldsDefinitionSet]) => { - this.serviceLogsFieldsService.addInstances(serviceLogsFieldsResponse); - this.auditLogsFieldsService.setParameters(auditLogsFieldsResponse); - this.setDataAvaibility(DataStateStoreKeys.LOG_FIELDS_DATA_KEY, DataAvailabilityValues.AVAILABLE); - }); - return responses$; - } - - initOnAuthorization = (isAuthorized): void => { - if (isAuthorized) { - this.loadClusters(); - this.loadHosts(); - this.loadComponents(); - } else { - this.clearClusters(); - this.clearHosts(); - this.clearComponents(); - } - } - - syncAuthorizedStateWithBackend(): Promise { - const setAuthorization = (isAuthorized: boolean) => { - this.appStateService.setParameters({ - isAuthorized, - isInitialLoading: false - }); - }; - return this.httpClient.get('status').toPromise() - .then( - (response: Response) => setAuthorization(response.ok), - (response: Response) => setAuthorization(false) - ); - } - - setTranslationService() { - this.translationService.setDefaultLang('en'); - return this.translationService.use('en').toPromise(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/stores/data-availability-state.store.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/stores/data-availability-state.store.ts deleted file mode 100644 index 5c9df7da1c8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/app-load/stores/data-availability-state.store.ts +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, ObjectModelService, getObjectReducer} from '@app/classes/models/store'; - -import {initialDataAvaibilityStates} from '@modules/app-load/models/data-availability-state.model'; - -export const modelName = 'dataAvailabilityStates'; - -@Injectable() -export class DataAvailabilityStatesStore extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const dataAvailabilityStates = getObjectReducer(modelName, initialDataAvaibilityStates); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/animations.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/animations.less deleted file mode 100644 index 5b8a04ce93c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/animations.less +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@keyframes rotateplane { - 0% { - transform: perspective(120px) rotateX(0deg) rotateY(0deg); - } 50% { - transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg); - } 100% { - transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); - } -} - -.square-spinner(@size: 40px, @background: #3FAE2A, @speed: 1.2s) { - width: @size; - height: @size; - background: @background; - animation: rotateplane @speed infinite ease-in-out; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.html deleted file mode 100644 index 6f83974371b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.less deleted file mode 100644 index 74e8f736ce5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.less +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - @import '../../variables'; -:host { - display: inline-block; - position: relative; - label { - align-items: center; - align-content: center; - background-color: transparent; - display: flex; - height: 100%; - justify-content: center; - left: 0; - position: absolute; - top: 0; - width: 100%; - } - svg { - circle { - fill: transparent; - stroke: @blue; - transition: stroke-dashoffset 0.35s; - transform: rotate(-90deg); - transform-origin: 50% 50%; - &.full-circle { - stroke: @grey; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.spec.ts deleted file mode 100644 index b6cf4caceec..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.spec.ts +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import { CircleProgressBarComponent } from './circle-progress-bar.component'; - -describe('CircleProgressBarComponent', () => { - let component: CircleProgressBarComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ CircleProgressBarComponent ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(CircleProgressBarComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.ts deleted file mode 100644 index c19e4c7e6ec..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/circle-progress-bar/circle-progress-bar.component.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { - Component, - OnInit, - OnChanges, - Input, - ViewChild, - ElementRef, - SimpleChanges, - SimpleChange -} from '@angular/core'; - -@Component({ - selector: 'circle-progress-bar', - templateUrl: './circle-progress-bar.component.html', - styleUrls: ['./circle-progress-bar.component.less'] -}) -export class CircleProgressBarComponent implements OnInit, OnChanges { - - @Input() - radius: number; - - @Input() - strokeColor = 'white'; - - @Input() - strokeWidth: number; - - @Input() - fill = 'transparent'; - - @Input() - percent = 0; - - @Input() - label: string; - - @ViewChild('circle') - circleRef: ElementRef; - - get normalizedRadius(): number { - return this.radius - this.strokeWidth; - } - - get circumference(): number { - return this.normalizedRadius * 2 * Math.PI; - } - - get strokeDashoffset(): number { - return this.circumference - (this.percent / 100 * this.circumference); - } - - constructor() { } - - ngOnInit() { - this.setProgress(this.percent); - } - - ngOnChanges(changes: SimpleChanges) { - if (changes.percent) { - this.setProgress(this.percent); - } - } - - setProgress(percent = this.percent) { - if (this.circleRef) { - this.circleRef.nativeElement.style.strokeDashoffset = this.strokeDashoffset; - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.html deleted file mode 100644 index b2441dd2d29..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.html +++ /dev/null @@ -1,48 +0,0 @@ - - -
-
-
-
-
-
-
-
{{ ('dataAvaibilityState.' + dataAvaibilityState.storeKey + '.label') | translate }}
-
-
- -
-
- - - - -
-
- - - - - -
-
-
-
-
{{'dataAvaibilityState.hasError.message' | translate}}
-
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.less deleted file mode 100644 index ac89f3809d0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.less +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - @import '../../main'; - .indicator() { - height: 20px; - opacity: 1; - width: 20px; - } - -@keyframes dash { - 0% { - stroke-dashoffset: 1000; - } - 100% { - stroke-dashoffset: 0; - } -} - -@keyframes dash-check { - 0% { - stroke-dashoffset: -100; - } - 100% { - stroke-dashoffset: 900; - } -} - -@keyframes rotate { - 100% { - -webkit-transform: rotate(360deg); - transform: rotate(360deg); - } -} -@keyframes dash-loading { - 0% { - stroke-dasharray: 1,200; - stroke-dashoffset: 0; - } - 50% { - stroke-dasharray: 89,200; - stroke-dashoffset: -35; - } - 100% { - stroke-dasharray: 89,200; - stroke-dashoffset: -124; - } -} - -:host { - height: 80vh; - .data-avaibility { - align-items: center; - align-content: center; - display: flex; - justify-content: center; - margin: 25vh 0 0 0; - width: 100vw; - .state-item { - align-items: center; - display: flex; - flex-direction: column; - margin: 0 10px; - width: 9em; - .label { - color: @base-font-color; - font-size: 13px; - margin: 0 0 .5em 0; - } - .state-indicator { - .path { - stroke-dasharray: 1000; - stroke-dashoffset: 0; - &.circle { - animation: dash .9s ease-in-out; - } - &.line { - stroke-dashoffset: 1000; - animation: dash .9s .35s ease-in-out forwards; - } - &.check { - stroke-dashoffset: -100; - animation: dash-check .9s .35s ease-in-out forwards; - } - } - .circular-loader { - animation: rotate 2s linear infinite; - height: 20px; - transform-origin: center center; - width: 20px; - } - .loading-path { - stroke: @form-success-color; - stroke-dasharray: 150,200; - stroke-dashoffset: -10; - -webkit-animation: dash-loading 1.5s ease-in-out infinite, color 6s ease-in-out infinite; - animation: dash-loading 1.5s ease-in-out infinite, color 6s ease-in-out infinite; - stroke-linecap: round; - } - } - &.available .state-indicator { - .state-available { - stroke: @form-success-color; - .indicator(); - } - } - &.error .state-indicator { - .state-error { - stroke: @form-error-color; - .indicator(); - } - } - &.not-available .state-indicator { - .state-not-available { - color: @unknown-color; - .indicator(); - } - } - } - } - .data-avaibility-progress { - width: 100vw; - .progress-bar { - background-color: #fff; - box-shadow: 0 0 8px rgba(0,0,0,.6); - height: 4px; - width: 100%; - .progress-bar-indicator { - height: 100%; - width: 0; - background: @form-success-color; - transition: width 30ms ease-in-out; - } - } - } - .data-avaibility-error { - color: @form-error-color; - height: 2em; - line-height: 2em; - text-align: center; - width: 100vw; - - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.spec.ts deleted file mode 100644 index fdebdaecb13..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.spec.ts +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; -import { DataLoadingIndicatorComponent } from '@app/modules/shared/components/data-loading-indicator/data-loading-indicator.component'; -import { TranslationModules } from '@app/test-config.spec'; -import { DataAvailabilityStatesStore, dataAvailabilityStates } from '@app/modules/app-load/stores/data-availability-state.store'; -import { StoreModule } from '@ngrx/store'; -import { LoadingIndicatorComponent } from '@app/modules/shared/components/loading-indicator/loading-indicator.component'; - -describe('DataLoadingIndicatorComponent', () => { - let component: DataLoadingIndicatorComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - ...TranslationModules, - StoreModule.provideStore({ - dataAvailabilityStates - }) - ], - declarations: [ - LoadingIndicatorComponent, - DataLoadingIndicatorComponent - ], - providers: [ - DataAvailabilityStatesStore - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(DataLoadingIndicatorComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeDefined(); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.ts deleted file mode 100644 index e6800e15406..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/data-loading-indicator/data-loading-indicator.component.ts +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Component, Input, OnDestroy } from '@angular/core'; -import { Subject } from 'rxjs/Subject'; -import { DataStateStoreKeys, baseDataKeys } from '@app/modules/app-load/services/app-load.service'; -import { Observable } from 'rxjs/Observable'; -import { DataAvailabilityValues } from '@app/classes/string'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -export interface DataAvaibilityObject { - storeKey: DataStateStoreKeys; - avaibility: DataAvailabilityValues; -}; - -@Component({ - selector: 'data-loading-indicator', - templateUrl: './data-loading-indicator.component.html', - styleUrls: ['./data-loading-indicator.component.less'] -}) -export class DataLoadingIndicatorComponent implements OnDestroy { - - @Input() - keysToWatch: DataStateStoreKeys[] = baseDataKeys; - - private destroy$: Subject = new Subject(); - - private currentWatchedDataStates$: Observable<{[key: string]: DataAvailabilityValues}> = this.dataAvailabilityStatesStore.getAll() - .map((dataStates: {[key: string]: DataAvailabilityValues}): {[key: string]: DataAvailabilityValues} => { - return Object.keys(dataStates || {}) - .filter((dataStateKey: DataStateStoreKeys) => this.keysToWatch.indexOf(dataStateKey) > -1) - .reduce((watchedStates, key) => Object.assign({}, watchedStates, { - [key]: dataStates[key] - }), {}); - }); - currentWatchedDataStatesAsArray$: Observable = this.currentWatchedDataStates$.map((dataStates) => { - return Object.keys(dataStates).reduce((statesArray, key): DataAvaibilityObject[] => { - return [ - ...statesArray, - { - storeKey: key, - avaibility: dataStates[key] - } - ]; - }, []); - }); - dataLoadingProgress$: Observable = this.currentWatchedDataStates$.map((dataStates): number => { - const keys: string[] = Object.keys(dataStates); - const total: number = keys.length; - const totalAvailable: number = keys.filter( - (key: string) => dataStates[key] === DataAvailabilityValues.AVAILABLE - ).length; - return totalAvailable / total; - }); - hasDataStateError$: Observable = this.currentWatchedDataStates$.map((dataStates): boolean => { - return Object.keys(dataStates).reduce((hasError: boolean, key) => { - return hasError || dataStates[key] === DataAvailabilityValues.ERROR; - }, false); - }); - - constructor( - private dataAvailabilityStatesStore: DataAvailabilityStatesStore - ) {} - - ngOnDestroy() { - this.destroy$.next(true); - this.destroy$.unsubscribe(); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.html deleted file mode 100644 index 714002854a4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.html +++ /dev/null @@ -1,36 +0,0 @@ - - -
- -
    -
    diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.less deleted file mode 100644 index 82645e5c597..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.less +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../mixins'; - -:host { - position: relative; - - button { - text-transform: none; - - .filter-label { - .plain { - color: initial; - } - - .label-before-selection { - padding: @input-group-addon-padding; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.spec.ts deleted file mode 100644 index ca16b80df9d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.spec.ts +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {UtilsService} from '@app/services/utils.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; - -import {DropdownButtonComponent} from './dropdown-button.component'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -describe('DropdownButtonComponent', () => { - let component: DropdownButtonComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - }; - } - }; - TestBed.configureTestingModule({ - declarations: [DropdownButtonComponent], - imports: [ - StoreModule.provideStore({ - appSettings, - clusters, - components, - appState, - hosts, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - AppSettingsService, - ClustersService, - ComponentsService, - AppStateService, - HostsService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - UtilsService, - { - provide: HttpClientService, - useValue: httpClient - }, - LogsContainerService, - AuthService, - NotificationsService, - NotificationService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(DropdownButtonComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.ts deleted file mode 100644 index 534b69dcd57..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-button/dropdown-button.component.ts +++ /dev/null @@ -1,119 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, Input, Output, EventEmitter} from '@angular/core'; -import {ListItem} from '@app/classes/list-item'; -import {UtilsService} from '@app/services/utils.service'; - -@Component({ - selector: 'dropdown-button', - templateUrl: './dropdown-button.component.html', - styleUrls: ['./dropdown-button.component.less'] -}) -export class DropdownButtonComponent { - - @Input() - label?: string; - - @Input() - buttonClass = 'btn-link'; - - @Input() - iconClass?: string; - - @Input() - hideCaret = false; - - @Input() - showSelectedValue = true; - - @Input() - isRightAlign = false; - - @Input() - isDropup = false; - - @Input() - showCommonLabelWithSelection = false; - - @Output() - selectItem: EventEmitter = new EventEmitter(); - - // PROXY PROPERTIES TO DROPDOWN LIST COMPONENT - @Input() - options: ListItem[] = []; - - @Input() - listItemArguments: any[] = []; - - @Input() - isMultipleChoice = false; - - @Input() - useClearToDefaultSelection = false; - - protected selectedItems?: ListItem[] = []; - - get selection(): ListItem[] { - return this.selectedItems; - } - - set selection(items: ListItem[]) { - this.selectedItems = items; - } - - // TODO handle case of selections with multiple items - /** - * Indicates whether selection can be displayed at the moment, i.e. it's not empty, not multiple - * and set to be displayed by showSelectedValue flag - * @returns {boolean} - */ - get isSelectionDisplayable(): boolean { - return this.showSelectedValue && !this.isMultipleChoice && this.selection.length > 0; - } - - constructor(protected utils: UtilsService) {} - - updateSelection(updates: ListItem | ListItem[]): void { - if (updates && (!Array.isArray(updates) || updates.length)) { - const items: ListItem[] = Array.isArray(updates) ? updates : [updates]; - if (this.isMultipleChoice) { - items.forEach((item: ListItem) => { - if (this.options && this.options.length) { - const itemToUpdate: ListItem = this.options.find((option: ListItem) => this.utils.isEqual(option.value, item.value)); - if (itemToUpdate) { - itemToUpdate.isChecked = item.isChecked; - } - } - }); - } else { - const selectedItem: ListItem = Array.isArray(updates) ? updates[0] : updates; - this.options.forEach((item: ListItem) => { - item.isChecked = this.utils.isEqual(item.value, selectedItem.value); - }); - } - } else { - this.options.forEach((item: ListItem) => item.isChecked = false); - } - const checkedItems = this.options.filter((option: ListItem): boolean => option.isChecked); - this.selection = checkedItems; - const selectedValues = checkedItems.map((option: ListItem): any => option.value); - this.selectItem.emit(this.isMultipleChoice ? selectedValues : selectedValues.shift()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.html deleted file mode 100644 index a15b1c315c7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.html +++ /dev/null @@ -1,79 +0,0 @@ - - -
  • - - - - - {{item.label | translate}} - - - -
  • -
    - -
  • - - - -
  • - - -
  • - -
  • -
  • -
  • - {{'dropdown.selection' | translate:({total: itemsSelected.length})}} - {{'dropdown.selection.clearToDefault' | translate}} - {{'dropdown.selection.clear' | translate}} -
  • - - - -
  • - - - -
    - - - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.less deleted file mode 100644 index 5ce1061ae4c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.less +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../../shared/mixins'; - -:host { - max-height: @dropdown-max-height; - overflow-y: auto; - - > li { - .dropdown-item-default; - transition: opacity 300ms ease-in, height 100ms 400ms ease-in; - &.filtered { - overflow: hidden; - opacity: 0; - height: 0; - } - .list-item-label { - .dropdown-item-child-default; - - label { - margin-bottom: 0; - cursor: pointer; - } - - input[type=checkbox]:checked + label:after { - top: @checkbox-top; - } - - .label-container { - width: 100%; - } - } - &.filter { - padding: 0 .25em; - position: relative; - &:hover { - background-color: transparent; - } - .fa { - margin: 0 .6em 0 0; - opacity: .8; - position: absolute; - right: 0; - top: .5em - } - .fa-search { - cursor: auto; - } - .clear-filter { - right: 1.3em; - } - input { - height: 2em; - width: 100%; - } - .clear-filter { - cursor: pointer; - float: right; - } - } - &.selections { - font-size: .75em; - padding: .5em 20px; - &.use-clear-to-default { - span.total-selection { - display: block; - } - a { - padding: 3px 5px; - &:first-of-type { - padding-left: 0; - } - &:last-of-type { - padding-right: 0; - } - } - } - a { - display: inline; - &:hover { - background: transparent none; - text-decoration: underline; - } - } - } - &.selections:hover { - background: transparent none; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.spec.ts deleted file mode 100644 index 8b3b13beb49..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.spec.ts +++ /dev/null @@ -1,148 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {FormsModule} from '@angular/forms'; -import {StoreModule} from '@ngrx/store'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {DropdownListComponent} from './dropdown-list.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('DropdownListComponent', () => { - let component: DropdownListComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - }; - } - }; - TestBed.configureTestingModule({ - declarations: [DropdownListComponent], - imports: [ - RouterTestingModule, - ...TranslationModules, - StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), - FormsModule - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - LogsContainerService, - HostsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - ServiceLogsTruncatedService, - TabsService, - AuthService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(DropdownListComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - - describe('#changeSelectedItem()', () => { - - const options = { - label: 'l', - value: 'v' - }; - - beforeEach(() => { - spyOn(component.selectedItemChange, 'emit').and.callFake(() => {}); - component.changeSelectedItem(options); - }); - - it('event should be emitted', () => { - expect(component.selectedItemChange.emit).toHaveBeenCalled(); - }); - - it('event emitter should be called with correct arguments', () => { - expect(component.selectedItemChange.emit).toHaveBeenCalledWith(options); - }); - - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.ts deleted file mode 100644 index 651578ae2fe..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/dropdown-list/dropdown-list.component.ts +++ /dev/null @@ -1,233 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { - Component, OnChanges, AfterViewChecked, OnDestroy, SimpleChanges, Input, Output, EventEmitter, - ViewChildren, ViewContainerRef, QueryList, ChangeDetectorRef, ElementRef, ViewChild, OnInit -} from '@angular/core'; -import {Subscription} from 'rxjs/Subscription'; -import {ListItem} from '@app/classes/list-item'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; - -@Component({ - selector: 'ul[data-component="dropdown-list"]', - templateUrl: './dropdown-list.component.html', - styleUrls: ['./dropdown-list.component.less'] -}) -export class DropdownListComponent implements OnInit, OnChanges, AfterViewChecked, OnDestroy { - - private shouldRenderAdditionalComponents = false; - - @Input() - items: ListItem[] = []; - - itemsSelected: ListItem[] = []; - - itemsUnSelected: ListItem[] = []; - - defaultSelection: ListItem[] = []; - - @Input() - isMultipleChoice? = false; - - @Input() - additionalLabelComponentSetter?: string; - - @Input() - actionArguments = []; - - @Output() - selectedItemChange: EventEmitter = new EventEmitter(); - - @ViewChildren('additionalComponent', { - read: ViewContainerRef - }) - containers: QueryList; - - @Input() - useLocalFilter = false; - - @Input() - useClearToDefaultSelection = false; - - @ViewChild('filter') - filterRef: ElementRef; - - @Input() - filterStr = ''; - - - @ViewChild('selectAll') - selectAllRef: ElementRef; - - private filterRegExp: RegExp; - - private subscriptions: Subscription[] = []; - - constructor( - private componentGenerator: ComponentGeneratorService, - private changeDetector: ChangeDetectorRef - ) {} - - ngOnInit() { - this.separateSelections(); - this.setDefaultSelection(this.items); - // trigger selection if any of the items has been checked - if (this.items.some((item: ListItem) => item.isChecked)) { - this.selectedItemChange.emit(this.items); - } - this.subscriptions.push( - this.selectedItemChange.subscribe(this.separateSelections) - ); - } - - ngOnDestroy() { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - - ngOnChanges(changes: SimpleChanges): void { - if (changes.hasOwnProperty('items')) { - const previousItems = changes.items.previousValue; - this.separateSelections(); - if ((!this.defaultSelection || !this.defaultSelection.length) && (!previousItems || !previousItems.length)) { - this.setDefaultSelection(this.items); - } - this.shouldRenderAdditionalComponents = true; - } - } - - ngAfterViewChecked() { - this.renderAdditionalComponents(); - } - - getSelectedItems(): ListItem[] { - return this.items ? this.items.filter((item: ListItem) => item.isChecked) : []; - } - - getUnSelectedItems(): ListItem[] { - return this.items ? this.items.filter((item: ListItem) => !item.isChecked) : []; - } - - private setDefaultSelection(items) { - this.defaultSelection = this.getSelectedItems(); - } - - private separateSelections = () => { - this.itemsSelected = this.getSelectedItems(); - this.itemsUnSelected = this.getUnSelectedItems(); - this.shouldRenderAdditionalComponents = true; - } - - private clearSelection() { - this.unSelectAll(); - } - - private clearToDefaultSelection() { - if (this.defaultSelection && this.defaultSelection.length) { - this.items.forEach((item: ListItem) => { - item.isChecked = this.defaultSelection.findIndex((defaultItem) => defaultItem.value === item.value) !== -1; - if (item.onSelect && item.isChecked) { - item.onSelect(...this.actionArguments); - } - }); - this.selectedItemChange.emit(this.items); - } - } - - onClearToDefaultSelectionClick = (event): void => { - event.preventDefault(); - event.stopPropagation(); - this.clearToDefaultSelection(); - } - onClearSelectionClick = (event): void => { - event.preventDefault(); - event.stopPropagation(); - this.clearSelection(); - } - - private changeAllSelection(event) { - event.stopPropagation(); - if (!this.selectAllRef.nativeElement.checked) { - this.selectAll(); - } else { - this.unSelectAll(); - } - } - - selectAll() { - this.items.forEach((item: ListItem) => { - item.isChecked = true; - if (item.onSelect) { - item.onSelect(...this.actionArguments); - } - }); - this.selectedItemChange.emit(this.items); - } - - unSelectAll() { - this.items.forEach((item: ListItem) => { - item.isChecked = false; - if (item.onSelect) { - item.onSelect(...this.actionArguments); - } - }); - this.selectedItemChange.emit(this.items); - } - - private onFilterInputKeyUp(event) { - if (this.useLocalFilter) { - this.filterRegExp = event.target.value ? new RegExp(`${event.target.value}`, 'gi') : null; - this.filterStr = event.target.value; - } - } - - private isFiltered = (item: ListItem): boolean => { - return this.useLocalFilter && this.filterRegExp && ( - !this.filterRegExp.test(item.value) - && - !this.filterRegExp.test(item.label) - ); - } - - private clearFilter = (event: MouseEvent): void => { - this.filterRegExp = null; - this.filterStr = ''; - } - - private renderAdditionalComponents(): void { - const setter = this.additionalLabelComponentSetter; - const containers = this.containers; - if (this.shouldRenderAdditionalComponents && setter && containers) { - containers.forEach((container, index) => this.componentGenerator[setter](this.items[index].value, container)); - this.shouldRenderAdditionalComponents = false; - this.changeDetector.detectChanges(); - } - } - - changeSelectedItem(item: ListItem, event?: MouseEvent): void { - if (item.onSelect) { - item.onSelect(...this.actionArguments); - } - this.selectedItemChange.emit(item); - } - - doItemsCheck() { - this.separateSelections(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.spec.ts deleted file mode 100644 index 1b081c811ce..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.spec.ts +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NO_ERRORS_SCHEMA} from '@angular/core'; -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {UtilsService} from '@app/services/utils.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {AuthService} from '@app/services/auth.service'; - -import {FilterDropdownComponent} from './filter-dropdown.component'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('FilterDropdownComponent', () => { - let component: FilterDropdownComponent; - let fixture: ComponentFixture; - const filtering = { - filters: { - f: { - options: [ - { - value: 'v0', - label: 'l0' - }, - { - value: 'v1', - label: 'l1' - } - ] - } - } - }; - - beforeEach(async(() => { - const httpClient = { - get: () => { - return { - subscribe: () => { - } - }; - } - }; - TestBed.configureTestingModule({ - declarations: [FilterDropdownComponent], - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - appSettings, - appState, - auditLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogs, - serviceLogsFields, - serviceLogsHistogramData, - serviceLogsTruncated, - tabs, - clusters, - components, - hosts - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - AppSettingsService, - AppStateService, - AuditLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - ServiceLogsTruncatedService, - TabsService, - ClustersService, - ComponentsService, - HostsService, - { - provide: LogsContainerService, - useValue: filtering - }, - UtilsService, - LogsContainerService, - AuthService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ], - schemas: [NO_ERRORS_SCHEMA] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(FilterDropdownComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.ts deleted file mode 100644 index 6140e7d1179..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/filter-dropdown/filter-dropdown.component.ts +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, forwardRef} from '@angular/core'; -import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms'; -import {DropdownButtonComponent} from '@modules/shared/components/dropdown-button/dropdown-button.component'; -import {ListItem} from '@app/classes/list-item'; - -@Component({ - selector: 'filter-dropdown', - templateUrl: '../dropdown-button/dropdown-button.component.html', - styleUrls: ['../dropdown-button/dropdown-button.component.less'], - providers: [ - { - provide: NG_VALUE_ACCESSOR, - useExisting: forwardRef(() => FilterDropdownComponent), - multi: true - } - ] -}) -export class FilterDropdownComponent extends DropdownButtonComponent implements ControlValueAccessor { - - private onChange: (fn: any) => void; - - get selection(): ListItem[] { - return this.selectedItems; - } - - set selection(items: ListItem[]) { - this.selectedItems = items; - if (this.isMultipleChoice && this.options) { - this.options.forEach((option: ListItem): void => { - const selectionItem = items.find((item: ListItem): boolean => this.utils.isEqual(item.value, option.value)); - option.isChecked = Boolean(selectionItem); - }); - } - if (this.onChange) { - this.onChange(items); - } - } - - writeValue(items: ListItem[]) { - this.selection = items || []; - } - - registerOnChange(callback: any): void { - this.onChange = callback; - } - - registerOnTouched() { - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.html deleted file mode 100644 index ada351c5658..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.html +++ /dev/null @@ -1,19 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.less deleted file mode 100644 index f09c3d008cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.less +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@import '../../main'; - -.circular-loader { - animation: rotate 2s linear infinite; - height: 20px; - transform-origin: center center; - width: 20px; -} -.loading-path { - stroke: @form-success-color; - stroke-dasharray: 150,200; - stroke-dashoffset: -10; - -webkit-animation: dash-loading 1.5s ease-in-out infinite, color 6s ease-in-out infinite; - animation: dash-loading 1.5s ease-in-out infinite, color 6s ease-in-out infinite; - stroke-linecap: round; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.spec.ts deleted file mode 100644 index 4d59ba02f2f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.spec.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import { LoadingIndicatorComponent } from './loading-indicator.component'; - -describe('LoadingIndicatorComponent', () => { - let component: LoadingIndicatorComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ LoadingIndicatorComponent ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(LoadingIndicatorComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.ts deleted file mode 100644 index 2a3ce674a1a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/loading-indicator/loading-indicator.component.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Component } from '@angular/core'; - -@Component({ - selector: 'loading-indicator', - templateUrl: './loading-indicator.component.html', - styleUrls: ['./loading-indicator.component.less'] -}) -export class LoadingIndicatorComponent { - - constructor() {} - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.html deleted file mode 100644 index a096b3b4080..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.html +++ /dev/null @@ -1,37 +0,0 @@ - - -
    - - -
    diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.less deleted file mode 100644 index dea2c319df1..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.less +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - @import '../../variables'; -:host { - .modal-backdrop { - opacity: .5; - z-index: 1040; - } - .modal-dialog { - margin: 2.5rem auto; - max-width: 90vw; - width: auto; - z-index: 1045; - .modal-content { - display: flex; - flex-direction: column; - max-height: 90vh; - overflow: hidden; - padding: @modal-dialog-content-padding; - .modal-header { - display: flex; - flex-shrink: 1; - line-height: 1.42rem; - padding: @modal-dialog-header-padding; - position: relative; - &> * { - display: inline-block; - } - .close { - order: 1; - margin-left: auto; - } - } - .modal-body { - flex: 1; - overflow: auto; - padding: @modal-dialog-body-padding; - } - .modal-footer { - flex-shrink: 1; - padding: @modal-dialog-footer-padding; - } - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.spec.ts deleted file mode 100644 index 19cd74dc036..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.spec.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import { - getCommonTestingBedConfiguration, MockHttpRequestModules, - TranslationModules -} from '@app/test-config.spec'; - -import { ModalDialogComponent } from './modal-dialog.component'; - -describe('ModalDialogComponent', () => { - let component: ModalDialogComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule(getCommonTestingBedConfiguration({ - imports: [ - ...TranslationModules - ], - declarations: [ ModalDialogComponent ] - })) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ModalDialogComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.ts deleted file mode 100644 index 362f34ff4b8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal-dialog/modal-dialog.component.ts +++ /dev/null @@ -1,94 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { - Component, - AfterViewInit, - Input, - Output, - EventEmitter, - ViewChild, - ChangeDetectorRef, - ElementRef -} from '@angular/core'; - -@Component({ - selector: 'modal-dialog', - templateUrl: './modal-dialog.component.html', - styleUrls: ['./modal-dialog.component.less'] -}) -export class ModalDialogComponent implements AfterViewInit { - - @Input() - title: string; - - @Input() - extraCssClass: string; - - @Input() - showCloseBtn = true; - - @Input() - showBackdrop = true; - - @Input() - closeOnBackdropClick = true; - - @Input() - visible = false; - - @Output() - onCloseRequest: EventEmitter = new EventEmitter(); - - @ViewChild('header') - headerElementRef: ElementRef; - - showHeader = true; - - @ViewChild('footer') - footerElementRef: ElementRef; - - showFooter = true; - - constructor(private cdRef: ChangeDetectorRef) { } - - ngAfterViewInit() { - let totalBuiltInHeaderElement = 0; - if (this.title) { - totalBuiltInHeaderElement += 1; - } - if (this.showCloseBtn) { - totalBuiltInHeaderElement += 1; - } - this.showHeader = this.showCloseBtn || !!this.title || ( - this.headerElementRef && (this.headerElementRef.nativeElement.children.length - totalBuiltInHeaderElement > 0) - ); - this.showFooter = this.footerElementRef && this.footerElementRef.nativeElement.children.length; - this.cdRef.detectChanges(); - } - - onCloseBtnClick(event: MouseEvent) { - this.onCloseRequest.emit(event); - } - - onBackdropClick(event: MouseEvent) { - if (this.closeOnBackdropClick) { - this.onCloseRequest.emit(event); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.html deleted file mode 100644 index 2584ccd6f6b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.html +++ /dev/null @@ -1,44 +0,0 @@ - - - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.less deleted file mode 100644 index 326bd6fe3ea..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.less +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import '../../../shared/variables'; - -.modal-xl { - width: @large-modal-width; -} - -.modal-flex-layout { - .modal-content { - display: flex; - flex-direction: column; - height: 85vh; - overflow: hidden; - .modal-body { - flex-grow: 1; - overflow: auto; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.spec.ts deleted file mode 100644 index acdcae8131f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.spec.ts +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {async, ComponentFixture, TestBed} from '@angular/core/testing'; -import {TranslationModules} from '@app/test-config.spec'; - -import {ModalComponent} from './modal.component'; - -describe('ModalComponent', () => { - let component: ModalComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - declarations: [ModalComponent], - imports: TranslationModules - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ModalComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create component', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.ts deleted file mode 100644 index 6d315117f4b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/components/modal/modal.component.ts +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, OnInit, AfterViewInit, ElementRef, Input, Output, ContentChild, TemplateRef, EventEmitter} from '@angular/core'; -import * as $ from 'jquery'; - -@Component({ - selector: 'modal', - templateUrl: './modal.component.html', - styleUrls: ['./modal.component.less'] -}) -export class ModalComponent implements OnInit, AfterViewInit { - - constructor(private element: ElementRef) { - this.rootElement = $(element.nativeElement); - } - - ngOnInit() { - this.modalElements = this.rootElement.find('.in'); - this.show(); - } - - ngAfterViewInit() { - this.init.emit(); - } - - private rootElement: JQuery; - - private modalElements: JQuery; - - @Input() - showHeader: boolean = true; - - @Input() - title: string = ''; - - @Input() - showCloseButton: boolean = true; - - @Input() - bodyText: string = ''; - - @Input() - showFooter: boolean = true; - - @Input() - showSubmitButton: boolean = true; - - @Input() - submitButtonLabel: string = 'modal.submit'; - - @Input() - submitButtonClassName: string = 'btn-success'; - - @Input() - showCancelButton: boolean = true; - - @Input() - cancelButtonLabel: string = 'modal.cancel'; - - @Input() - cancelButtonClassName: string = 'btn-default'; - - @Input() - isSmallModal: boolean = false; - - @Input() - isLargeModal: boolean = false; - - @Input() - isExtraLargeModal: boolean = false; - - @Input() - isSubmitDisabled: boolean = false; - - @ContentChild(TemplateRef) - bodyTemplate; - - @Output() - init: EventEmitter = new EventEmitter(); - - @Output() - submit: EventEmitter = new EventEmitter(); - - @Output() - cancel: EventEmitter = new EventEmitter(); - - @Output() - close: EventEmitter = new EventEmitter(); - - @Input() - isFlexLayout: boolean = false; - - show(): void { - this.modalElements.show(); - } - - hide(): void { - this.modalElements.hide(); - } - - onSubmit(): void { - this.hide(); - this.submit.emit(); - } - - onCancel(): void { - this.hide(); - this.cancel.emit(); - } - - onClose(): void { - this.hide(); - this.close.emit(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/directives/disable-control.directive.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/directives/disable-control.directive.ts deleted file mode 100644 index edfe2f44904..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/directives/disable-control.directive.ts +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NgControl} from '@angular/forms'; -import {Directive, Input} from '@angular/core'; - -@Directive({ - selector: '[disableControl]' -}) -export class DisableControlDirective { - - @Input() set disableControl( condition: boolean ) { - const action = condition ? 'disable' : 'enable'; - this.ngControl.control[action](); - } - - constructor( private ngControl: NgControl ) {} - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/forms.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/forms.less deleted file mode 100644 index 969154376b6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/forms.less +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import "variables"; - -.has-error { - .ng-invalid:not(form) { - border-color: @form-error-color; - } -} -.has-warning { - .ng-invalid:not(form) { - border-color: @form-warning-color; - } -} - -.ng-valid:not(form).ng-touched { - border-color: @form-success-color; -} - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/api-endpoint-descriptor.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/api-endpoint-descriptor.ts deleted file mode 100644 index dd54c6a294c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/api-endpoint-descriptor.ts +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface RequestDescriptor { - title?: string; - messages?: { - error?: string; - success?: string; - info?: string; - warning?: string; - }; - description?: string; -} - -export interface ApiEndPointDescriptor { - title?: string; - description?: string; - get?: RequestDescriptor; - post?: RequestDescriptor; - put?: RequestDescriptor; - patch?: RequestDescriptor; - delete?: RequestDescriptor; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/notification.interface.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/notification.interface.ts deleted file mode 100644 index b8b3d6a6c34..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/interfaces/notification.interface.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Options} from 'angular2-notifications/src/options.type'; - -export interface NotificationInterface extends Options { - type: string; - message: string; - title: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/main.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/main.less deleted file mode 100644 index d1201382c2b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/main.less +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import "variables"; -@import "mixins"; -@import "forms"; -@import "animations"; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/mixins.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/mixins.less deleted file mode 100644 index 3fab21de978..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/mixins.less +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@import 'variables'; - -// Mixins -.flex-vertical-align { - display: flex; - align-items: center; -} - -.default-flex { - .flex-vertical-align; - justify-content: space-around; -} - -.stretch-flex { - align-items: stretch; - display: flex; -} - -.common-hexagon(@side, @color) { - display: block; - position: absolute; - margin: (@side / 3.464101615) 0; - width: @side; - height: @side / 1.732050808; - background-color: @color; - - &:before, &:after { - display: block; - position: absolute; - width: 0; - border-left: (@side / 2) solid transparent; - border-right: (@side / 2) solid transparent; - content: ''; - } - - &:before { - bottom: 100%; - border-bottom: (@side / 3.464101615) solid @color; - } - - &:after { - top: 100%; - border-top: (@side / 3.464101615) solid @color; - } -} - -.clickable-item { - cursor: pointer; - color: @link-color; - - &:hover { - color: @link-hover-color; - } -} - -.full-size { - position: absolute; - top: 0; - right: 0; - bottom: 0; - left: 0; -} - -.dropdown-list-default { - line-height: 1; - border-radius: @dropdown-border-radius; - font-size: 14px; - min-width: @dropdown-min-width; - background: #FFF; - color: #666; - border: 1px solid #CFD3D7; - padding: 5px 0; - margin: 2px 0 0; - text-align: left; - list-style: none; - box-shadow: 0 6px 12px rgba(0, 0, 0, .175); -} - -.dropdown-item-default { - display: block; - color: #333; - cursor: pointer; - - &.active > a, &:hover { - color: #262626; - text-decoration: none; - background-color: #F5F5F5; - } -} - -.dropdown-item-child-default { - display: block; - min-height: 24px; - padding: 3px 20px; - clear: both; - font-weight: 400; - line-height: 1.42857143; - white-space: nowrap; -} - -.log-colors { - &.fatal { - color: @fatal-color; - } - - &.error { - color: @error-color; - } - - &.warn { - color: @warning-color; - } - - &.info { - color: @info-color; - } - - &.debug { - color: @debug-color; - } - - &.trace { - color: @trace-color; - } - - &.unknown { - color: @unknown-color; - } -} - -.grey { - color: @grey-color; -} - -.collapsed-form-control { - width: 0; - padding: 0; -} - -.inherited-color { - color: inherit; - - &:hover { - color: inherit; - } -} - -/** - * Caret mixin definition. - * The .caret mixin has two parameters: the width of the caret and the direction of the caret - * This is the Less implementation of the Bootstrap caret mixin: - * https://github.com/twbs/bootstrap/blob/v4-dev/scss/mixins/_caret.scss - */ - -// This small mixin create the css of the down direction of a caret -.caret-direction(@caret-width, @direction, @color) when (@direction = down) { - border-top: @caret-width solid @color; - border-right: @caret-width solid transparent; - border-bottom: 0; - border-left: @caret-width solid transparent; -} -// This small mixin to create the css for the up direction -.caret-direction(@caret-width, @direction, @color) when (@direction = up) { - border-top: 0; - border-right: @caret-width solid transparent; - border-bottom: @caret-width solid @color; - border-left: @caret-width solid transparent; -} -// This small mixin to create the css for the right direction -.caret-direction(@caret-width, @direction, @color) when (@direction = right) { - border-top: @caret-width solid transparent; - border-right: 0; - border-bottom: @caret-width solid transparent; - border-left: @caret-width solid @color; -} -// This small mixin to create the css for the left direction -.caret-direction(@caret-width, @direction, @color) when (@direction = left) { - border-top: @caret-width solid transparent; - border-right: @caret-width solid @color; - border-bottom: @caret-width solid transparent; - border-left: 0; -} -// This is the content for the caret pseaudo element. It has been moved out from the .caret definition because -// the property interpolation does not work so that we can use condition -.caret-style(@caret-width, @direction, @color) { - display: inline-block; - width: 0; - height: 0; - vertical-align: @caret-width * .85; - content: ""; - .caret-direction(@caret-width, @direction, @color); -} -// This is the main caret mixin to create the common and the direction related css -.caret-mixin(@caret-width; @direction: down; @color: @base-font-color; @position: before) { - .caret-style(@caret-width, @direction, @color); -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/notifications.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/notifications.less deleted file mode 100644 index 9b19562dc9a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/notifications.less +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import "variables"; - -/deep/ simple-notification .simple-notification.app-notification { - color: @notification-color; - border-radius: 2px; - box-shadow: 0 5px 15px rgba(0,0,0,.5); - &.success { - border: 1px solid @form-success-color; - .fa { - color: @form-success-color; - } - } - &.info { - border: 1px solid @form-info-color; - .fa { - color: @form-info-color; - } - } - &.error { - border: 1px solid @form-error-color; - .fa { - color: @form-error-color; - } - } - .fa { - font-size: 16px; - position: absolute; - right: .25em; - top: .25em; - } - &.error, &.success, &.alert, &.info { - background: @notification-background-color; - } - .sn-title { - font-size: @notification-title-font-size; - padding: 0; - } - .sn-content { - font-size: @notification-content-font-size; - padding: 0; - } - .sn-progress-loader { - height: 2px; - span { - background: @unknown-color; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.spec.ts deleted file mode 100644 index 1cf0849a80b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { CanDeactivateGuardService } from './can-deactivate-guard.service'; - -describe('CanDeactivateGuardService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [CanDeactivateGuardService] - }); - }); - - it('should be created', inject([CanDeactivateGuardService], (service: CanDeactivateGuardService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.ts deleted file mode 100644 index 7065d207499..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/can-deactivate-guard.service.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Injectable} from '@angular/core'; -import {CanDeactivate} from '@angular/router'; -import {Observable} from 'rxjs/Observable'; - -export interface CanComponentDeactivate { - canDeactivate: () => Observable | Promise | boolean; -} - -@Injectable() -export class CanDeactivateGuardService implements CanDeactivate { - canDeactivate(component: CanComponentDeactivate) { - return component.canDeactivate ? component.canDeactivate() : true; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/notification.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/notification.service.ts deleted file mode 100644 index df6ca2a028f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/services/notification.service.ts +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; - -import {NotificationsService as Angular2NotificationsService} from 'angular2-notifications'; -import {Notification} from 'angular2-notifications/src/notification.type'; - -import {NotificationInterface} from '../interfaces/notification.interface'; -import {Icons, defaultIcons} from 'angular2-notifications/src/icons'; -import {TranslateService} from '@ngx-translate/core'; - -export enum NotificationType { - SUCCESS = 'success', - INFO = 'info', - ERROR = 'error', - ALERT = 'alert' -} - -export const notificationIcons: Icons = { - success: ``, - info: ``, - error: ``, - alert: `` -}; -Object.assign(defaultIcons, notificationIcons); - -export const messageTemplate = ` -
    -
    {{title}}
    -
    {{message}}
    - {{icon}} -
    -`; - -@Injectable() -export class NotificationService { - - constructor( - private notificationService: Angular2NotificationsService, - private translateService: TranslateService - ) { } - - addNotification(payload: NotificationInterface): Notification { - const {message, title, ...config} = payload; - const method: string = typeof this.notificationService[config.type] === 'function' ? config.type : 'info'; - if (config.type === NotificationType.ERROR) { - Object.assign(config, { - clickToClose: true, - timeOut: 0, - showProgressBar: false, - pauseOnHover: false, - ...config - }); - } - const icon = notificationIcons[method] || notificationIcons['info']; - const htmlMsg = messageTemplate - .replace(/{{title}}/gi, this.translateService.instant(title)) - .replace(/{{message}}/gi, this.translateService.instant(message)) - .replace(/{{icon}}/gi, icon); - return this.notificationService.html(htmlMsg, method, {icon, ...config}); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/shared.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/shared.module.ts deleted file mode 100644 index 82698521556..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/shared.module.ts +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NgModule} from '@angular/core'; -import {CommonModule} from '@angular/common'; -import {BrowserModule, Title} from '@angular/platform-browser'; -import {FormsModule} from '@angular/forms'; -import {Http} from '@angular/http'; -import {BrowserAnimationsModule} from '@angular/platform-browser/animations'; -import {NotificationsService as Angular2NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {TranslateModule, TranslateLoader} from '@ngx-translate/core'; -import {NgObjectPipesModule} from 'angular-pipes'; - -import {TranslateService as AppTranslateService} from '@app/services/translate.service'; - -import {NotificationService} from './services/notification.service'; - -import {CanDeactivateGuardService} from './services/can-deactivate-guard.service'; -import {DisableControlDirective} from './directives/disable-control.directive'; - -import {DropdownButtonComponent} from './components/dropdown-button/dropdown-button.component'; -import {DropdownListComponent} from './components/dropdown-list/dropdown-list.component'; -import {FilterDropdownComponent} from './components/filter-dropdown/filter-dropdown.component'; -import {ModalComponent} from './components/modal/modal.component'; -import { DataLoadingIndicatorComponent } from '@app/modules/shared/components/data-loading-indicator/data-loading-indicator.component'; -import { ModalDialogComponent } from './components/modal-dialog/modal-dialog.component'; -import { LoadingIndicatorComponent } from './components/loading-indicator/loading-indicator.component'; -import { CircleProgressBarComponent } from './components/circle-progress-bar/circle-progress-bar.component'; - -@NgModule({ - imports: [ - BrowserModule, - CommonModule, - FormsModule, - BrowserAnimationsModule, - NgObjectPipesModule, - TranslateModule.forChild({ - loader: { - provide: TranslateLoader, - useFactory: AppTranslateService.httpLoaderFactory, - deps: [Http] - } - }) - ], - declarations: [ - DisableControlDirective, - DropdownButtonComponent, - DropdownListComponent, - FilterDropdownComponent, - ModalComponent, - DataLoadingIndicatorComponent, - ModalDialogComponent, - LoadingIndicatorComponent, - CircleProgressBarComponent - ], - providers: [ - Title, - NotificationService, - CanDeactivateGuardService, - Angular2NotificationsService - ], - exports: [ - DisableControlDirective, - DropdownButtonComponent, - DropdownListComponent, - FilterDropdownComponent, - ModalComponent, - DataLoadingIndicatorComponent, - ModalDialogComponent, - LoadingIndicatorComponent, - CircleProgressBarComponent - ] -}) -export class SharedModule { } diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/variables.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/variables.less deleted file mode 100644 index 7ffd20c63e8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shared/variables.less +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Variables -@blue: #1491C1; -@grey: #DDD; - -@fluid-gray-1: #ccc; -@fluid-gray-2: #999; -@fluid-gray-3: #666; -@fluid-gray-4: #333; - -@base-font-color: #666; -@navbar-background-color: #323544; -@navbar-logo-background-color: #303d54; -@h1-vertical-margin: 20px; -@button-border-radius: 4px; -@input-border-width: 1px; -@input-border: @input-border-width solid #CFD3D7; -@input-group-addon-padding: 6px 12px 6px 0; -@block-margin-top: 20px; -@link-color: @blue; -@link-hover-color: darken(@blue, 10%); -@grey-color: #DDD; -@default-line-height: 1.42857143; -@main-background-color: #ECECEC; -@filters-panel-background-color: #FFF; -@filters-panel-padding: 10px 0; -@list-header-background-color: #F2F2F2; -@checkbox-top: 4px; -@dropdown-min-width: 160px; -@dropdown-max-height: 60vh; // TODO get rid of magic number, base on actual design -@dropdown-border-radius: 2px; -@input-height: 34px; -@input-padding: 10px; -@col-padding: 15px; -@search-parameter-padding: 5px 2px; - -// Forms -@form-success-color: #1eb475; -@form-error-color: #ef6162; -@form-warning-color: #e98a40; -@form-info-color: @debug-color; - - -@fatal-color: #830A0A; -@error-color: #E81D1D; -@warning-color: #FF8916; -@info-color: #2577B5; -@debug-color: #65E8FF; -@trace-color: #888; -@unknown-color: #BDBDBD; -@submit-color: #5CB85C; -@submit-hover-color: #449D44; -@exclude-color: #EF6162; - -// Panels -@panel-heading: rgba(255, 255, 255, 1); - -// Badge -@badge-bg: rgba(239, 97, 98, 1); -@badge-padding: 2px 5px; - -// Icon -@icon-padding: 5px; - -// Table -@table-border-color: #EEE; -@table-font-size: 13px; - -// Graph -@graph-padding: .5rem; -@graph-invert-selection-background: @grey-color; - -// Log list -@log-list-row-data-padding: 8px; -@log-list-row-hover-background-color: #E7F6FC; -@log-list-row-hover-border-color: #A7DFF2; -@log-list-border-color: rgb(238, 238, 238); - -// Modals -@large-modal-width: 1200px; -@modal-dialog-content-padding: 2rem; -@modal-dialog-header-padding: 0 0 1rem 0; -@modal-dialog-body-padding: 0; -@modal-dialog-footer-padding: 1rem 0 0 0; - -// Notifications -@notification-color: @base-font-color; -@notification-title-font-size: 14px; -@notification-content-font-size: 12px; -@notification-background-color: #FFF; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.html deleted file mode 100644 index a065e3192ef..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.html +++ /dev/null @@ -1,32 +0,0 @@ - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.less deleted file mode 100644 index 8294c4aab23..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.less +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.navigation-bar-container, .navigation-bar-container ul.nav.side-nav-header, -.navigation-bar-container ul.nav.side-nav-menu { - overflow: hidden; - text-overflow: ellipsis; - width: 100%; -} - -.navigation-header { - color: #fff; - font-size: 2rem; -} - -ul.nav.side-nav-menu li.active:not(.has-sub-menu) { - background-color: #fff; - a { - color: darken(#1491C1, 25%); - &:hover { - background-color: transparent; - } - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.ts deleted file mode 100644 index 17e5193e89c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component.ts +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Component, EventEmitter, Input, Output} from '@angular/core'; -import 'rxjs/add/operator/startWith'; -import {ShipperCluster} from '@modules/shipper/models/shipper-cluster.type'; -import {ShipperClusterService} from '@modules/shipper/models/shipper-cluster-service.type'; - -@Component({ - selector: 'shipper-cluster-service-list', - templateUrl: './shipper-cluster-service-list.component.html', - styleUrls: ['./shipper-cluster-service-list.component.less'] -}) -export class ShipperClusterServiceListComponent { - - @Input() - clusterName: ShipperCluster; - - @Input() - serviceNamesList: ShipperClusterService[]; - - @Input() - selectedServiceName: ShipperClusterService; - - @Input() - basePath: string[]; - - @Output() - selectionChange: EventEmitter = new EventEmitter(); - - constructor() {} - - onShipperClusterServiceItemSelect = (serviceName: ShipperClusterService, event?: MouseEvent): void => { - this.selectedServiceName = serviceName; - this.selectionChange.emit(this.selectedServiceName); - event && event.preventDefault(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.html deleted file mode 100644 index 610fb3ee36b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.html +++ /dev/null @@ -1,53 +0,0 @@ - -
    -
    -
    -

    - {{'shipperConfiguration.title' | translate}} - - {{'shipperConfiguration.addConfigurationBtn' | translate}} - -

    -
    -
    -
    - -
    -
    - -
    -
    -
    -
    -
    -
    diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.less deleted file mode 100644 index f416b074a77..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.less +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import "../../../shared/variables"; - -.shipper-configuration { - background-color: @filters-panel-background-color; - margin-top: 40px; - min-height: 100vh; -} - -a.btn, a.btn:focus, a.btn:visited { - color: #fff; -} - -/deep/ .navigation-bar-container { - padding-bottom: 1em; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.spec.ts deleted file mode 100644 index 0ec4f566908..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.spec.ts +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { async, ComponentFixture, TestBed } from '@angular/core/testing'; - -import {ShipperConfigurationComponent} from './shipper-configuration.component'; -import {StoreModule} from '@ngrx/store'; -import {auditLogs, AuditLogsService} from '@app/services/storage/audit-logs.service'; -import {serviceLogsTruncated, ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {components, ComponentsService} from '@app/services/storage/components.service'; -import {UtilsService} from '@app/services/utils.service'; -import {tabs, TabsService} from '@app/services/storage/tabs.service'; -import {serviceLogs, ServiceLogsService} from '@app/services/storage/service-logs.service'; -import {hosts, HostsService} from '@app/services/storage/hosts.service'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {auditLogsGraphData, AuditLogsGraphDataService} from '@app/services/storage/audit-logs-graph-data.service'; -import {serviceLogsHistogramData, ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {clusters, ClustersService} from '@app/services/storage/clusters.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {appSettings, AppSettingsService} from '@app/services/storage/app-settings.service'; -import {appState, AppStateService} from '@app/services/storage/app-state.service'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {serviceLogsFields, ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {ShipperRoutingModule} from '@modules/shipper/shipper-routing.module'; -import {ShipperClusterServiceListComponent} from '@modules/shipper/components/shipper-cluster-service-list/shipper-cluster-service-list.component'; -import {ShipperServiceConfigurationFormComponent} from '@modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component'; -import {FormsModule, ReactiveFormsModule} from '@angular/forms'; -import {TypeaheadModule} from 'ngx-bootstrap'; -import {DisableControlDirective} from '@modules/shared/directives/disable-control.directive'; -import {ModalComponent} from '@modules/shared/components/modal/modal.component'; -import {RouterTestingModule} from '@angular/router/testing'; -import {ShipperClusterServiceListService} from '@modules/shipper/services/shipper-cluster-service-list.service'; -import {ShipperConfigurationService} from '@modules/shipper/services/shipper-configuration.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('ShipperConfigurationComponent', () => { - let component: ShipperConfigurationComponent; - let fixture: ComponentFixture; - - beforeEach(async(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - ShipperRoutingModule, - StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsGraphData, - auditLogsFields, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), - ...TranslationModules, - FormsModule, - ReactiveFormsModule, - TypeaheadModule.forRoot() - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - LogsContainerService, - UtilsService, - HostsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - ServiceLogsTruncatedService, - TabsService, - ComponentGeneratorService, - ClusterSelectionService, - ShipperClusterServiceListService, - ShipperConfigurationService, - NotificationsService, - NotificationService - ], - declarations: [ - ShipperConfigurationComponent, - ShipperClusterServiceListComponent, - ShipperServiceConfigurationFormComponent, - DisableControlDirective, - ModalComponent - ] - }) - .compileComponents(); - })); - - beforeEach(() => { - fixture = TestBed.createComponent(ShipperConfigurationComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('should create', () => { - expect(component).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.ts deleted file mode 100644 index d8fccbdd61b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-configuration/shipper-configuration.component.ts +++ /dev/null @@ -1,182 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Component, Input, OnDestroy, OnInit, ViewChild} from '@angular/core'; -import {ActivatedRoute, Router} from '@angular/router'; -import {Response} from '@angular/http'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/skipWhile'; - -import {NotificationService, NotificationType} from '@modules/shared/services/notification.service'; -import {CanComponentDeactivate} from '@modules/shared/services/can-deactivate-guard.service'; - -import {ShipperCluster} from '../../models/shipper-cluster.type'; -import {ShipperClusterService} from '../../models/shipper-cluster-service.type'; -import {ShipperConfigurationService} from '../../services/shipper-configuration.service'; -import {ShipperClusterServiceListService} from '../../services/shipper-cluster-service-list.service'; -import { - ShipperServiceConfigurationFormComponent -} from '@modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component'; -import {TranslateService} from '@ngx-translate/core'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {Subscription} from 'rxjs/Subscription'; -import { BehaviorSubject } from 'rxjs/BehaviorSubject'; -import { FormGroup } from '@angular/forms'; - -@Component({ - selector: 'shipper-configuration', - templateUrl: './shipper-configuration.component.html', - styleUrls: ['./shipper-configuration.component.less'] -}) -export class ShipperConfigurationComponent implements CanComponentDeactivate, OnInit, OnDestroy { - - static clusterSelectionStoreKey = 'shipper'; - - @Input() - routerPath: string[] = ['/shipper']; - - @ViewChild(ShipperServiceConfigurationFormComponent) - configurationFormRef: ShipperServiceConfigurationFormComponent; - - private requestInProgress$: BehaviorSubject = new BehaviorSubject(false); - - private clusterName$: Observable = this.activatedRoute.params.map(params => params.cluster); - private serviceName$: Observable = this.activatedRoute.params.map(params => params.service); - - private serviceNamesList$: Observable = this.clusterName$.switchMap((cluster: ShipperCluster) => { - return cluster ? this.shipperClusterServiceListService.getServicesForCluster(cluster) : Observable.of(undefined); - }); - - private configuration$: Observable<{[key: string]: any}> = Observable.combineLatest( - this.clusterName$, - this.serviceName$ - ).switchMap(([clusterName, serviceName]: [ShipperCluster, ShipperClusterService]) => { - return clusterName && serviceName ? - this.shipperConfigurationService.loadConfiguration(clusterName, serviceName) : Observable.of(undefined); - }); - - private subscriptions: Subscription[] = []; - - validationResponse: {[key: string]: any}; - - constructor( - private router: Router, - private activatedRoute: ActivatedRoute, - private shipperClusterServiceListService: ShipperClusterServiceListService, - private shipperConfigurationService: ShipperConfigurationService, - private notificationService: NotificationService, - private translate: TranslateService, - private clusterSelectionStoreService: ClusterSelectionService - ) { } - - ngOnInit() { - this.subscriptions.push( - this.clusterSelectionStoreService - .getParameter(ShipperConfigurationComponent.clusterSelectionStoreKey) - .subscribe(this.onClusterSelectionChanged) - ); - } - - ngOnDestroy() { - if (this.subscriptions) { - this.subscriptions.forEach((subscription: Subscription) => subscription.unsubscribe()); - } - } - - private getPathMapForClusterFirstService(cluster: ShipperCluster): Observable { - return this.shipperClusterServiceListService.getServicesForCluster(cluster) - .switchMap((serviceNamesList: ShipperClusterService[]) => { - return Observable.of(this.getRouterLink([cluster, serviceNamesList[0]])); - }); - } - - onClusterSelectionChanged = (selection: ShipperCluster): void => { - let clusterName: ShipperCluster = selection; - if (Array.isArray(clusterName)) { - clusterName = clusterName.shift(); - } - if (clusterName) { - this.clusterName$.first().subscribe((currentClusterName: ShipperCluster) => { - if (currentClusterName !== clusterName) { - this.getPathMapForClusterFirstService(clusterName).first().subscribe((path: string[]) => this.router.navigate(path)); - } - }); - } - } - - private getRouterLink(path: string | string[]): string[] { - return [...this.routerPath, ...(Array.isArray(path) ? path : [path])]; - } - - getResponseHandler(cmd: string, type: string, form?: FormGroup) { - const msgVariables = form.getRawValue(); - return (response: Response) => { - const result = response.json(); - // @ToDo change the backend response status to some error code if the configuration is not valid and don't use the .message prop - const resultType = response ? (response.ok && !result.errorMessage ? NotificationType.SUCCESS : NotificationType.ERROR) : type; - const translateParams = {errorMessage: (result && result.message) || '', ...msgVariables, ...result}; - const title = this.translate.instant(`shipperConfiguration.action.${cmd}.title`, translateParams); - const message = this.translate.instant(`shipperConfiguration.action.${cmd}.${resultType}.message`, translateParams); - this.notificationService.addNotification({type: resultType, title, message}); - if (resultType !== NotificationType.ERROR) { - form.markAsPristine(); - } - this.requestInProgress$.next(false); - }; - } - - onConfigurationFormSubmit(configurationForm: FormGroup): void { - const rawValue = configurationForm.getRawValue(); - this.serviceNamesList$.first().subscribe((services: ShipperClusterService[]) => { - const cmd: string = services.indexOf(rawValue.serviceName) > -1 ? 'update' : 'add'; - this.requestInProgress$.next(true); - this.shipperConfigurationService[`${cmd}Configuration`]({ - cluster: rawValue.clusterName, - service: rawValue.serviceName, - configuration: JSON.parse(rawValue.configuration) - }).subscribe( - this.getResponseHandler(cmd, NotificationType.SUCCESS, configurationForm), - this.getResponseHandler(cmd, NotificationType.ERROR, configurationForm) - ); - }); - } - - private setValidationResult = (result: {[key: string]: any}) => { - this.validationResponse = result; - } - - onValidationFormSubmit(validationForm: FormGroup): void { - this.validationResponse = null; - const rawValue = validationForm.getRawValue(); - const request$: Observable = this.shipperConfigurationService.testConfiguration(rawValue); - request$.subscribe( - this.getResponseHandler('validate', NotificationType.SUCCESS, validationForm), - this.getResponseHandler('validate', NotificationType.ERROR, validationForm) - ); - request$ - .filter((response: Response): boolean => response.ok) - .map((response: Response) => response.json()) - // @ToDo change the backend response status to some error code if the configuration is not valid and don't use the .errorMessage prop - .filter(result => result.errorMessage === undefined) - .subscribe(this.setValidationResult); - } - - canDeactivate() { - return this.configurationFormRef.canDeactivate(); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.html deleted file mode 100644 index bb7034ae285..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.html +++ /dev/null @@ -1,122 +0,0 @@ - - - {{item | translate}} - -
    -
    -
    -
    -
    -

    {{(serviceName ? 'shipperConfiguration.form.titleEdit' : 'shipperConfiguration.form.titleAdd') | translate}}

    -
    - - - -
    {{serviceName}}
    - -
    -
    - -
    - - -
    - -
    -
    -
    -
    -
    -
    -
    -

    {{'shipperConfiguration.validator.title' | translate}}

    - -
    - - -
    -
    - - -
    -
    - -
    {{validationResponse | json}}
    -
    - -
    -
    -
    -
    -
    -
    - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.less deleted file mode 100644 index f6f371388d6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.less +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import "../../../shared/forms"; - -textarea { - @extend input.form-control - min-height: 5em; - resize: vertical; - width: 100%; - &.validation-result, - &.configuration { - min-height: 30em; - } - &.sample-data { - min-height: 8em; - } -} -label { - width: 100%; -} -.help-block.validation-block { - display: none; - font-size: .9em; - margin: 0; - opacity: 0; - transition: all 1s ease-in; -} -.has-error, .has-warning { - .help-block.validation-block { - display: inline-block; - opacity: 1; - } -} - -.shipper-form-configuration { - background-color: @main-background-color; - padding-bottom: 4em; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.ts deleted file mode 100644 index b41b9413e2c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/components/shipper-service-configuration-form/shipper-service-configuration-form.component.ts +++ /dev/null @@ -1,253 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {ChangeDetectorRef, Component, EventEmitter, Input, OnChanges, OnDestroy, OnInit, Output, SimpleChanges} from '@angular/core'; -import {AbstractControl, FormBuilder, FormGroup, ValidatorFn, Validators} from '@angular/forms'; -import {Observable} from 'rxjs/Observable'; -import {Subject} from 'rxjs/Subject'; -import {Observer} from 'rxjs/Observer'; -import 'rxjs/add/operator/startWith'; - -import {CanComponentDeactivate} from '@modules/shared/services/can-deactivate-guard.service'; - -import {ShipperCluster} from '../../models/shipper-cluster.type'; -import {ShipperClusterService} from '../../models/shipper-cluster-service.type'; -import {ShipperClusterServiceConfigurationInterface} from '../../models/shipper-cluster-service-configuration.interface'; -import {ShipperConfigurationModel} from '../../models/shipper-configuration.model'; -import * as formValidators from '../../directives/validator.directive'; -import {BehaviorSubject} from 'rxjs/BehaviorSubject'; -import {Subscription} from 'rxjs/Subscription'; -import {ActivatedRoute} from '@angular/router'; - -@Component({ - selector: 'shipper-configuration-form', - templateUrl: './shipper-service-configuration-form.component.html', - styleUrls: ['./shipper-service-configuration-form.component.less'] -}) -export class ShipperServiceConfigurationFormComponent implements OnInit, OnDestroy, OnChanges, CanComponentDeactivate { - - private configurationForm: FormGroup; - private validatorForm: FormGroup; - - @Input() - clusterName: ShipperCluster; - - @Input() - serviceName: ShipperClusterService; - - @Input() - configuration: ShipperClusterServiceConfigurationInterface; - - @Input() - existingServiceNames: Observable | ShipperClusterService[]; - - @Input() - validationResponse: {[key: string]: any}; - - @Input() - disabled = false; - - @Output() - configurationSubmit: EventEmitter = new EventEmitter(); - - @Output() - validationSubmit: EventEmitter = new EventEmitter(); - - private configurationComponents$: Observable; - - private isLeavingDirtyForm = false; - - private get clusterNameField(): AbstractControl { - return this.configurationForm.get('clusterName'); - } - - private get serviceNameField(): AbstractControl { - return this.configurationForm.get('serviceName'); - } - - private get configurationField(): AbstractControl { - return this.configurationForm.get('configuration'); - } - - private get componentNameField(): AbstractControl { - return this.validatorForm.get('componentName'); - } - - private get sampleDataField(): AbstractControl { - return this.validatorForm.get('sampleData'); - } - - private canDeactivateModalResult: Subject = new Subject(); - - private canDeactivateObservable$: Observable = Observable.create((observer: Observer) => { - this.subscriptions.push( - this.canDeactivateModalResult.subscribe((result: boolean) => { - observer.next(result); - }) - ); - }); - - private serviceNamesListSubject: BehaviorSubject = new BehaviorSubject([]); - - private subscriptions: Subscription[] = []; - - constructor( - private formBuilder: FormBuilder, - private activatedRoute: ActivatedRoute, - private changeDetectionRef: ChangeDetectorRef - ) { - // This is a fix to avoid the ExpressionChangedAfterItHasBeenCheckedError exception - // We create forms checking if there is serviceName set, so that is why we put this in the constructor. - this.createForms(); - } - - ngOnInit() { - this.subscriptions.push( - this.activatedRoute.params.map(params => params.service).subscribe((service) => { - this.serviceName = service; - }) - ); - if (!this.serviceName) { - this.configurationForm.controls.serviceName.setValidators([ - Validators.required, - formValidators.uniqueServiceNameValidator(this.serviceNamesListSubject) - ]); - this.changeDetectionRef.detectChanges(); - } - this.configurationComponents$ = this.configurationForm.controls.configuration.valueChanges.map((newValue: string): string[] => { - let components: string[]; - try { - const inputs: {[key: string]: any}[] = (newValue ? JSON.parse(newValue) : {}).input; - components = inputs && inputs.length ? inputs.map(input => input.type) : []; - } catch (error) { - components = []; - } - return components; - }).startWith([]); - if (this.existingServiceNames instanceof Observable) { - this.subscriptions.push( - this.existingServiceNames.subscribe((serviceNames: ShipperClusterService[]) => { - this.serviceNamesListSubject.next(serviceNames); - }) - ); - } else { - this.serviceNamesListSubject.next(this.existingServiceNames); - } - } - - ngOnChanges(changes: SimpleChanges): void { - if (this.configurationForm) { - Object.keys(changes).forEach((controlName: string) => { - if (this.configurationForm.controls[controlName]) { - let value: any = changes[controlName].currentValue; - if (controlName === 'configuration') { - value = value || new ShipperConfigurationModel(); - if (!(value instanceof String)) { - value = this.getConfigurationAsString(value); - } - } - if (this.configurationForm.controls[controlName].value !== value) { - this.configurationForm.controls[controlName].setValue(value); - this.configurationForm.markAsPristine(); - } - } - }); - } - if (this.validatorForm && changes.clusterName && this.validatorForm.controls.clusterName.value !== changes.clusterName.currentValue) { - this.validatorForm.controls.clusterName.setValue(changes.clusterName.currentValue); - this.validatorForm.markAsPristine(); - } - } - - ngOnDestroy() { - if (this.subscriptions) { - this.subscriptions.forEach(subscription => subscription.unsubscribe()); - } - } - - leaveDirtyFormConfirmed = () => { - this.canDeactivateModalResult.next(true); - this.isLeavingDirtyForm = false; - } - - leaveDirtyFormCancelled = () => { - this.canDeactivateModalResult.next(false); - this.isLeavingDirtyForm = false; - } - - canDeactivate(): Observable { - if (this.configurationForm.pristine) { - return Observable.of(true); - } - this.isLeavingDirtyForm = true; - return this.canDeactivateObservable$; - } - - getConfigurationAsString(configuration: ShipperClusterServiceConfigurationInterface): string { - return configuration ? JSON.stringify(configuration, null, 4) : ''; - } - - createForms(): void { - const configuration: ShipperClusterServiceConfigurationInterface = this.configuration || ( - this.serviceName ? this.configuration : new ShipperConfigurationModel() - ); - this.configurationForm = this.formBuilder.group({ - clusterName: this.formBuilder.control(this.clusterName, Validators.required), - serviceName: this.formBuilder.control( - this.serviceName, - [Validators.required] - ), - configuration: this.formBuilder.control( - this.getConfigurationAsString(configuration), - [Validators.required, formValidators.configurationValidator()] - ) - }); - - this.validatorForm = this.formBuilder.group({ - clusterName: this.formBuilder.control( - this.clusterName, - [Validators.required] - ), - componentName: this.formBuilder.control('', [ - Validators.required, - formValidators.getConfigurationServiceValidator(this.configurationForm.controls.configuration) - ]), - sampleData: this.formBuilder.control('', Validators.required), - configuration: this.formBuilder.control('', Validators.required) - }); - this.subscriptions.push( - this.configurationForm.valueChanges.subscribe(() => { - this.validatorForm.controls.componentName.updateValueAndValidity(); - this.validatorForm.controls.configuration.setValue(this.configurationForm.controls.configuration.value); - }) - ); - } - - onConfigurationSubmit(): void { - if (this.configurationForm.valid) { - this.configurationSubmit.emit(this.configurationForm); - } - } - - onValidationSubmit(): void { - if (this.validatorForm.valid) { - this.validationSubmit.emit(this.validatorForm); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.spec.ts deleted file mode 100644 index 02d76adbfde..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.spec.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { configurationValidator } from './validator.directive'; -import {ValidatorFn} from '@angular/forms'; - -describe('Validator', () => { - it('configurationValidator should return with a function', () => { - const validatorFn: ValidatorFn = configurationValidator(); - expect( typeof validatorFn === 'function').toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.ts deleted file mode 100644 index 50c12375a59..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/directives/validator.directive.ts +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {AbstractControl, ValidatorFn} from '@angular/forms'; -import {ShipperClusterService} from '@modules/shipper/models/shipper-cluster-service.type'; -import {ValidationErrors} from '@angular/forms/src/directives/validators'; -import {BehaviorSubject} from 'rxjs/BehaviorSubject'; - -export function configurationValidator(): ValidatorFn { - return (control: AbstractControl): ValidationErrors | null => { - try { - const json: {[key: string]: any} = JSON.parse(control.value); - return null; - } catch (error) { - return { - invalidJSON: {value: control.value} - }; - } - }; -} - -export function uniqueServiceNameValidator( - serviceNames: BehaviorSubject -): ValidatorFn { - return (control: AbstractControl): ValidationErrors | null => { - const services: ShipperClusterService[] = serviceNames.getValue(); - return services && services.indexOf(control.value) > -1 ? { - serviceNameExists: {value: control.value} - } : null; - }; -} - -export function getConfigurationServiceValidator(configControl: AbstractControl): ValidatorFn { - return (control: AbstractControl): ValidationErrors | null => { - let components: string[]; - try { - const inputs: {[key: string]: any}[] = (configControl.value ? JSON.parse(configControl.value) : {}).input; - components = inputs && inputs.length ? inputs.map(input => input.type) : []; - } catch (error) { - components = []; - } - return components.length && components.indexOf(control.value) === -1 ? { - serviceNameDoesNotExistInConfiguration: {value: control.value} - } : null; - }; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/interfaces/shipper-cluster-information.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/interfaces/shipper-cluster-information.ts deleted file mode 100644 index fa2bb81e276..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/interfaces/shipper-cluster-information.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {ShipperCluster} from '@modules/shipper/models/shipper-cluster.type'; -import {ShipperClusterService} from '@modules/shipper/models/shipper-cluster-service.type'; - -export interface ShipperClusterInformation { - cluster: ShipperCluster; - services: ShipperClusterService[]; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.interface.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.interface.ts deleted file mode 100644 index 087da5c2a3a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.interface.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export interface ShipperClusterServiceConfigurationInterface { - input?: [{ - type: string; - rowtype: string; - [key: string]: any - }]; - filter?: [{[key: string]: any}]; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.model.ts deleted file mode 100644 index b95bdf40b9c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-configuration.model.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {ShipperCluster} from './shipper-cluster.type'; -import {ShipperClusterService} from './shipper-cluster-service.type'; - -export interface ShipperClusterServiceConfigurationModel { - cluster: ShipperCluster; - service: ShipperClusterService; - configuration: {[key: string]: any}; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-validation.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-validation.model.ts deleted file mode 100644 index 0d1740f319f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service-validation.model.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {ShipperCluster} from './shipper-cluster.type'; - -export interface ShipperClusterServiceValidationModel { - clusterName: ShipperCluster; - configuration: string; - componentName: string; - sampleData: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service.type.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service.type.ts deleted file mode 100644 index 4b30b2c9fdd..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster-service.type.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export type ShipperClusterService = string; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster.type.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster.type.ts deleted file mode 100644 index eba4eda670b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-cluster.type.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export type ShipperCluster = string; diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-configuration.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-configuration.model.ts deleted file mode 100644 index eb054558da2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/models/shipper-configuration.model.ts +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {ShipperClusterServiceConfigurationInterface} from '@modules/shipper/models/shipper-cluster-service-configuration.interface'; - -export const ShipperConfigurationModelTemplate: ShipperClusterServiceConfigurationInterface = { - input: [{ - type: '', - rowtype: '' - }], - filter: [{}] -}; - -export class ShipperConfigurationModel { - input: [{ - type: string; - rowtype: string; - [key: string]: any - }]; - filter: [{[key: string]: any}]; - constructor(cfg?: ShipperClusterServiceConfigurationInterface) { - Object.assign(this, cfg || ShipperConfigurationModelTemplate); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.spec.ts deleted file mode 100644 index 27be624d65f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.spec.ts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { ShipperClusterServiceListService } from './shipper-cluster-service-list.service'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {appState, AppStateService} from '@app/services/storage/app-state.service'; -import {hosts} from '@app/services/storage/hosts.service'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {StoreModule} from '@ngrx/store'; -import {clusters} from '@app/services/storage/clusters.service'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {tabs} from '@app/services/storage/tabs.service'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {components} from '@app/services/storage/components.service'; - -describe('ShipperClusterServiceListService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - ShipperClusterServiceListService, - AppStateService - ] - }); - }); - - it('should be created', inject([ShipperClusterServiceListService], (service: ShipperClusterServiceListService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.ts deleted file mode 100644 index 1021b88dfea..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-cluster-service-list.service.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { Injectable } from '@angular/core'; -import {HttpClientService} from '@app/services/http-client.service'; -import {Observable} from 'rxjs/Observable'; -import {ShipperClusterService} from '@modules/shipper/models/shipper-cluster-service.type'; - -@Injectable() -export class ShipperClusterServiceListService { - - constructor( - private httpClientService: HttpClientService - ) { } - - getServicesForCluster(cluster: string): Observable { - return this.httpClientService.get('shipperClusterServiceList', null, {cluster}) - .map((response) => response.json()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.spec.ts deleted file mode 100644 index f3c5adc6ce3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.spec.ts +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { ShipperConfigurationService } from './shipper-configuration.service'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {appState, AppStateService} from '@app/services/storage/app-state.service'; -import {StoreModule} from '@ngrx/store'; -import {hosts} from '@app/services/storage/hosts.service'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {clusters} from '@app/services/storage/clusters.service'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {tabs} from '@app/services/storage/tabs.service'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {components} from '@app/services/storage/components.service'; - -describe('ShipperConfigurationService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), ...TranslationModules], - providers: [...MockHttpRequestModules, ShipperConfigurationService, AppStateService] - }); - }); - - it('should be created', inject([ShipperConfigurationService], (service: ShipperConfigurationService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.ts deleted file mode 100644 index b538c0c22f5..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper-configuration.service.ts +++ /dev/null @@ -1,101 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { Injectable } from '@angular/core'; -import {Response, ResponseOptions, ResponseType} from '@angular/http'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/catch'; - -import {HttpClientService} from '@app/services/http-client.service'; -import {ShipperClusterServiceConfigurationModel} from '@modules/shipper/models/shipper-cluster-service-configuration.model'; -import {ShipperClusterServiceValidationModel} from '@modules/shipper/models/shipper-cluster-service-validation.model'; - -@Injectable() -export class ShipperConfigurationService { - - constructor( - private httpClientService: HttpClientService - ) { } - - createResponseWithConfigBody(configuration: ShipperClusterServiceConfigurationModel, originalResponse?: Response): Response { - return new Response( - new ResponseOptions({ - body: configuration, - status: originalResponse ? originalResponse.status : null, - statusText: originalResponse ? originalResponse.statusText : null, - headers: originalResponse ? originalResponse.headers : null, - type: originalResponse ? originalResponse.type : ResponseType.Basic, - url: originalResponse ? originalResponse.url : '' - }) - ); - } - - addConfiguration(configuration: ShipperClusterServiceConfigurationModel): Observable { - return this.httpClientService.post( - 'shipperClusterServiceConfiguration', - configuration.configuration, - null, - { - cluster: configuration.cluster, - service: configuration.service - }) - .map((response: Response): Response => this.createResponseWithConfigBody(configuration, response)) - .catch((error: Response): Observable => { - return Observable.of(error); - }); - } - - updateConfiguration(configuration: ShipperClusterServiceConfigurationModel): Observable { - return this.httpClientService.put( - 'shipperClusterServiceConfiguration', - configuration.configuration, - null, - { - cluster: configuration.cluster, - service: configuration.service - }) - .map((response: Response): Response => this.createResponseWithConfigBody(configuration, response)) - .catch((error: Response): Observable => { - return Observable.of(error); - }); - } - - loadConfiguration(cluster: string, service: string): Observable<{[key: string]: any}> { - return this.httpClientService.get('shipperClusterServiceConfiguration', null, { - service: service, - cluster: cluster - }) - .map((response) => { - return response.json(); - }) - .catch((error: Response): Observable => { - return Observable.of(new Error(error.json().message || '')); - }); - } - - testConfiguration(payload: ShipperClusterServiceValidationModel): Observable { - const requestPayload: {[key: string]: any} = { - shipperConfig: encodeURIComponent(payload.configuration), - logId: payload.componentName, - testEntry: payload.sampleData - }; - return this.httpClientService.postFormData('shipperClusterServiceConfigurationTest', requestPayload, null, { - cluster: payload.clusterName - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper.guard.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper.guard.ts deleted file mode 100644 index 4c92c058b7d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/services/shipper.guard.ts +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Injectable} from '@angular/core'; -import {CanActivate, ActivatedRouteSnapshot, RouterStateSnapshot, Router} from '@angular/router'; -import {Observable} from 'rxjs/Observable'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {ClustersService} from '@app/services/storage/clusters.service'; -import {ShipperClusterServiceListService} from '@modules/shipper/services/shipper-cluster-service-list.service'; -import {NotificationService, NotificationType} from '@modules/shared/services/notification.service'; -import {TranslateService} from '@ngx-translate/core'; - -@Injectable() -export class ShipperGuard implements CanActivate { - - constructor ( - private routingUtilsService: RoutingUtilsService, - private router: Router, - private clustersStoreService: ClustersService, - private shipperClusterServiceListService: ShipperClusterServiceListService, - private translateService: TranslateService, - private notificationService: NotificationService - ) {} - - getFirstCluster(): Observable { - return this.clustersStoreService.getAll().map((clusters: string[]) => Array.isArray(clusters) ? clusters[0] : clusters); - } - - getFirstServiceForCluster(cluster: string): Observable { - return this.shipperClusterServiceListService.getServicesForCluster(cluster) - .map((services: string[]) => Array.isArray(services) ? services[0] : services); - } - - canActivate( - next: ActivatedRouteSnapshot, - state: RouterStateSnapshot - ): Observable | Promise | boolean { - const cluster: string = this.routingUtilsService.getParamFromActivatedRouteSnapshot(next, 'cluster'); - const service: string = this.routingUtilsService.getParamFromActivatedRouteSnapshot(next, 'service'); - return this.clustersStoreService.getAll().filter(clusters => clusters.length).first() - .map((clusters: string[]) => { - return clusters.indexOf(cluster) === -1 ? clusters[0] : cluster; - }) // checking cluster - .switchMap((validCluster: string) => { - return this.shipperClusterServiceListService.getServicesForCluster(validCluster) // getting valid services for validCluster - .map((services: string[]) => { - const canActivate: boolean = cluster === validCluster && service && ('add' === service || services.indexOf(service) > -1); - // redirect if the cluster changed or the service is not in the valid services and it is not add new service path - if (!canActivate) { - const title = 'shipperConfiguration.navigation.title'; - const invalidKey: string = cluster === validCluster ? 'invalidService' : 'invalidCluster'; - if (cluster || service) { - const message = this.translateService.instant(`shipperConfiguration.navigation.${invalidKey}`, { - cluster: cluster || '', - service: service || '' - }); - this.notificationService.addNotification({ - title, - message, - type: NotificationType.ERROR - }); - } - this.router.navigate(['/shipper', validCluster, services[0]]); - } - return canActivate; - }); - }); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper-routing.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper-routing.module.ts deleted file mode 100644 index 5c92de06729..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper-routing.module.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NgModule} from '@angular/core'; -import {RouterModule, Routes} from '@angular/router'; - -import {AuthGuardService} from '@app/services/auth-guard.service'; -import {CanDeactivateGuardService} from '@modules/shared/services/can-deactivate-guard.service'; - -import {ShipperConfigurationComponent} from './components/shipper-configuration/shipper-configuration.component'; -import {ShipperGuard} from '@modules/shipper/services/shipper.guard'; - -const shipperRoutes: Routes = [{ - path: 'shipper/:cluster/add', - component: ShipperConfigurationComponent, - data: { - breadcrumbs: ['shipperConfiguration.breadcrumbs.title', 'shipperConfiguration.breadcrumbs.add'], - multiClusterFilter: false - }, - canActivate: [AuthGuardService], - canDeactivate: [CanDeactivateGuardService] -}, { - path: 'shipper/:cluster/:service', - component: ShipperConfigurationComponent, - data: { - breadcrumbs: ['shipperConfiguration.breadcrumbs.title', 'shipperConfiguration.breadcrumbs.update'], - multiClusterFilter: false - }, - canActivate: [AuthGuardService, ShipperGuard], - canDeactivate: [CanDeactivateGuardService] -}, { - path: 'shipper/:cluster', - component: ShipperConfigurationComponent, - data: { - breadcrumbs: 'shipperConfiguration.breadcrumbs.title', - multiClusterFilter: false - }, - canActivate: [AuthGuardService, ShipperGuard] -}, { - path: 'shipper', - component: ShipperConfigurationComponent, - data: { - breadcrumbs: 'shipperConfiguration.breadcrumbs.title', - multiClusterFilter: false - }, - canActivate: [AuthGuardService, ShipperGuard] -}]; - -@NgModule({ - imports: [ - RouterModule.forChild(shipperRoutes) - ], - exports: [ - RouterModule - ] -}) -export class ShipperRoutingModule {} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper.module.ts deleted file mode 100644 index 1f6eddf54a1..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/shipper.module.ts +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {NgModule} from '@angular/core'; -import {ReactiveFormsModule} from '@angular/forms'; -import {BrowserModule} from '@angular/platform-browser'; -import {Http} from '@angular/http'; -import {TranslateModule, TranslateLoader} from '@ngx-translate/core'; - -import {TypeaheadModule} from 'ngx-bootstrap'; - -import {SharedModule} from '@modules/shared/shared.module'; - -import {TranslateService as AppTranslateService} from '@app/services/translate.service'; - -import {ShipperRoutingModule} from './shipper-routing.module'; -import {ShipperClusterServiceListComponent} from './components/shipper-cluster-service-list/shipper-cluster-service-list.component'; -import {ShipperServiceConfigurationFormComponent} from './components/shipper-service-configuration-form/shipper-service-configuration-form.component'; -import {ShipperConfigurationStore} from './stores/shipper-configuration.store'; -import {ShipperConfigurationComponent} from './components/shipper-configuration/shipper-configuration.component'; -import {ShipperClusterServiceListService} from './services/shipper-cluster-service-list.service'; -import {ShipperConfigurationService} from './services/shipper-configuration.service'; -import {ShipperGuard} from '@modules/shipper/services/shipper.guard'; - -@NgModule({ - imports: [ - BrowserModule, - ReactiveFormsModule, - SharedModule, - TypeaheadModule.forRoot(), - ShipperRoutingModule, - TranslateModule.forChild({ - loader: { - provide: TranslateLoader, - useFactory: AppTranslateService.httpLoaderFactory, - deps: [Http] - } - }) - ], - declarations: [ - ShipperClusterServiceListComponent, - ShipperServiceConfigurationFormComponent, - ShipperConfigurationComponent - ], - providers: [ - ShipperConfigurationStore, - ShipperConfigurationService, - ShipperClusterServiceListService, - ShipperGuard - ] -}) -export class ShipperModule {} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-configuration.store.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-configuration.store.ts deleted file mode 100644 index 8867a88a23a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-configuration.store.ts +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, getObjectReducer, ObjectModelService} from '@app/classes/models/store'; -import {ShipperClusterServiceConfigurationModel} from "@modules/shipper/models/shipper-cluster-service-configuration.model"; - -export const modelName = 'shipperConfiguration'; - -@Injectable() -export class ShipperConfigurationStore extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } - - private getKeyForConfiguration(configuration: ShipperClusterServiceConfigurationModel) { - return `${configuration.cluster}/${configuration.service}` - } - - addConfiguration(configuration: ShipperClusterServiceConfigurationModel) { - this.setParameter(this.getKeyForConfiguration(configuration), configuration); - } -} - -export const shipperConfigurationReducer = getObjectReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-service.store.ts b/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-service.store.ts deleted file mode 100644 index bfff68965ce..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/modules/shipper/stores/shipper-service.store.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'shipperService'; - -@Injectable() -export class ShipperServiceStore extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const shipperService = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/component-label.ts b/ambari-logsearch/ambari-logsearch-web/src/app/pipes/component-label.ts deleted file mode 100644 index c3c0257f527..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/component-label.ts +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Pipe, PipeTransform} from '@angular/core'; -import {ComponentsService} from "@app/services/storage/components.service"; -import {Observable} from "rxjs/Observable"; - -@Pipe({ - name: 'componentLabel' -}) -export class ComponentLabelPipe implements PipeTransform { - - constructor(private componentService: ComponentsService) { - } - - transform(name: string): Observable { - return this.componentService.findInCollection(component => component.name === name) - .map(component => component ? component.label || component.name : name); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.spec.ts deleted file mode 100644 index a81e0ccbaae..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.spec.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {UtilsService} from '@app/services/utils.service'; - -import {TimerSecondsPipe} from './timer-seconds.pipe'; - -describe('TimerSecondsPipe', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [ - UtilsService - ] - }); - }); - - it('create an instance', inject([UtilsService], (utils: UtilsService) => { - const pipe = new TimerSecondsPipe(utils); - expect(pipe).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.ts b/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.ts deleted file mode 100644 index 1b9dc76ec00..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timer-seconds.pipe.ts +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Pipe, PipeTransform} from '@angular/core'; -import {UtilsService} from '@app/services/utils.service'; - -@Pipe({ - name: 'timerSeconds' -}) -export class TimerSecondsPipe implements PipeTransform { - - constructor(private utils: UtilsService) { - } - - transform(value: number): string { - const seconds = value % 60, - outputSeconds = this.utils.fitIntegerDigitsCount(seconds), - fullMinutes = (value - seconds) / 60, - minutes = fullMinutes % 60, - outputMinutes = this.utils.fitIntegerDigitsCount(minutes), - hours = (fullMinutes - minutes) / 60, - outputHours = hours ? `${this.utils.fitIntegerDigitsCount(hours)}:` : ''; - return `${outputHours}${outputMinutes}:${outputSeconds}`; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.spec.ts deleted file mode 100644 index 0d0c24c05a0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.spec.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TimeZoneAbbrPipe} from './timezone-abbr.pipe'; - -describe('TimeZoneAbbrPipe', () => { - it('create an instance', () => { - const pipe = new TimeZoneAbbrPipe(); - expect(pipe).toBeTruthy(); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.ts b/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.ts deleted file mode 100644 index f4aab0b01cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/pipes/timezone-abbr.pipe.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Pipe, PipeTransform} from '@angular/core'; -import * as moment from 'moment-timezone'; - -@Pipe({ - name: 'timeZoneAbbr' -}) -export class TimeZoneAbbrPipe implements PipeTransform { - - transform(value: string): string { - return moment.tz.zone(value).abbr(moment().valueOf()); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth-guard.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/auth-guard.service.ts deleted file mode 100644 index 8b562392245..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth-guard.service.ts +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot} from '@angular/router'; -import {Observable} from 'rxjs/Observable'; - -import {AuthService} from '@app/services/auth.service'; - -/** - * This guard goal is to prevent to display screens where authorization needs. - */ -@Injectable() -export class AuthGuardService implements CanActivate { - - constructor(private authService: AuthService, private router: Router) {} - - canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable { - return this.authService.isAuthorized().map((isAuthorized: boolean) => { - this.authService.redirectUrl = state.url; - if (!isAuthorized) { - this.router.navigate(['/login']); - } - return isAuthorized; - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.spec.ts deleted file mode 100644 index 74a7125bdf0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.spec.ts +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject, async} from '@angular/core/testing'; -import {HttpModule} from '@angular/http'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/first'; -import 'rxjs/add/operator/last'; -import 'rxjs/add/operator/take'; -import {StoreModule} from '@ngrx/store'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {AuthService} from '@app/services/auth.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {Routes} from '@angular/router'; -import {Component} from '@angular/core'; - -describe('AuthService', () => { - - const successResponse = { - type: 'default', - ok: true, - url: '/', - status: 200, - statusText: 'OK', - bytesLoaded: 100, - totalBytes: 100, - headers: null - }, - errorResponse = { - type: 'error', - ok: false, - url: '/', - status: 401, - statusText: 'ERROR', - bytesLoaded: 100, - totalBytes: 100, - headers: null - }; - - // Note: We add delay to help the isLoginInProgress test case. - let httpServiceStub = { - isError: false, - postFormData: function () { - const isError = this.isError; - return Observable.create(observer => observer.next(isError ? errorResponse : successResponse)).delay(1); - } - }; - - beforeEach(() => { - const testRoutes: Routes = [{ - path: 'login', - component: Component, - data: { - breadcrumbs: 'login.title' - } - }]; - TestBed.configureTestingModule({ - imports: [ - HttpModule, - StoreModule.provideStore({ - appState - }), - RouterTestingModule.withRoutes(testRoutes) - ], - providers: [ - AuthService, - AppStateService, - {provide: HttpClientService, useValue: httpServiceStub} - ] - }); - }); - - it('should create service', inject([AuthService], (service: AuthService) => { - expect(service).toBeTruthy(); - })); - - it('should set the isAuthorized state to true in appState when the login is success', async(inject( - [AuthService, AppStateService, HttpClientService], - (authService: AuthService, appStateService: AppStateService, httpClientService) => { - httpClientService.isError = false; - authService.login('test', 'test') - .subscribe(() => { - appStateService.getParameter('isAuthorized').subscribe((value: Boolean): void => { - expect(value).toBe(true); - }); - }, value => { - throw value; - }); - } - ))); - - - it('should set the isAuthorized state to false in appState when the login is failed', async(inject( - [AuthService, AppStateService, HttpClientService], - (authService: AuthService, appStateService: AppStateService, httpClientService) => { - httpClientService.isError = true; - authService.login('test', 'test') - .subscribe(() => { - appStateService.getParameter('isAuthorized').subscribe((value: Boolean): void => { - expect(value).toBe(false); - }); - }); - } - ))); - - it('should set the isLoginInProgress state to true when the login started', async(inject( - [AuthService, AppStateService, HttpClientService], - (authService: AuthService, appStateService: AppStateService, httpClientService) => { - httpClientService.isError = false; - authService.login('test', 'test'); - appStateService.getParameter('isLoginInProgress').first().subscribe((value: Boolean): void => { - expect(value).toBe(true); - }); - } - ))); - - it('should set the isLoginInProgress state to true after the login is success', async(inject( - [AuthService, AppStateService, HttpClientService], - (authService: AuthService, appStateService: AppStateService, httpClientService) => { - httpClientService.isError = false; - authService.login('test', 'test'); - appStateService.getParameter('isLoginInProgress').take(2).last().subscribe((value: Boolean): void => { - expect(value).toBe(false); - }); - } - ))); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.ts deleted file mode 100644 index 1bf18758613..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/auth.service.ts +++ /dev/null @@ -1,186 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Response} from '@angular/http'; - -import {Observable} from 'rxjs/Observable'; - -import {HttpClientService} from '@app/services/http-client.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {Router} from '@angular/router'; -import {Subscription} from 'rxjs/Subscription'; -import { Observer } from 'rxjs/Observer'; - -export const IS_AUTHORIZED_APP_STATE_KEY: string = 'isAuthorized'; -export const IS_LOGIN_IN_PROGRESS_APP_STATE_KEY: string = 'isLoginInProgress'; - -/** - * This service meant to be a single place where the authorization should happen. - */ -@Injectable() -export class AuthService { - - private subscriptions: Subscription[] = []; - - /** - * A string set by any service or component (mainly from AuthGuard service) to redirect the application after the - * authorization done. - * @type string - */ - redirectUrl: string | string[]; - - constructor( - private httpClient: HttpClientService, - private appState: AppStateService, - private router: Router - ) { - this.subscriptions.push(this.appState.getParameter(IS_AUTHORIZED_APP_STATE_KEY).subscribe( - this.onAppStateIsAuthorizedChanged - )); - } - - onAppStateIsAuthorizedChanged = (isAuthorized): void => { - if (isAuthorized) { - const redirectTo = this.redirectUrl || (this.router.routerState.snapshot.url === '/login' ? '/' : null); - if (redirectTo) { - if (Array.isArray(redirectTo)) { - this.router.navigate(redirectTo); - } else { - this.router.navigateByUrl(redirectTo); - } - } - this.redirectUrl = ''; - } else { - this.router.navigate(['/login']); - } - } - /** - * The single entry point to request a login action. - * @param {string} username - * @param {string} password - * @returns {Observable} - */ - login(username: string, password: string): Observable { - this.setLoginInProgressAppState(true); - const response$ = this.httpClient.postFormData('login', { - username: username, - password: password - }); - response$.subscribe( - (resp: Response) => this.onLoginResponse(resp), - (resp: Response) => this.onLoginError(resp) - ); - return response$.switchMap((resp: Response) => { - return Observable.create((observer: Observer) => { - if (resp.ok) { - observer.next(resp.ok); - } else { - observer.error(resp); - } - observer.complete(); - }); - }); - } - - /** - * The single unique entry point to request a logout action - * @returns {Observable} - */ - logout(): Observable { - const response$ = this.httpClient.get('logout'); - response$.subscribe( - (resp: Response) => this.onLogoutResponse(resp), - (resp: Response) => this.onLogoutError(resp) - ); - return response$.switchMap((resp: Response) => { - return Observable.create((observer) => { - if (resp.ok) { - observer.next(resp.ok); - } else { - observer.error(resp); - } - observer.complete(); - }); - }); - } - - /** - * Set the isLoginInProgress state in AppState. The reason behind create a function for this is that we set this app - * state from two different places so let's do always the same way. - * @param {boolean} state the new value of the isLoginInProgress app state. - */ - private setLoginInProgressAppState(state: boolean) { - this.appState.setParameter(IS_LOGIN_IN_PROGRESS_APP_STATE_KEY, state); - } - - /** - * Set the isAuthorized state in AppState. The reason behind create a function for this is that we set this app - * state from two different places so let's do always the same way. - * @param {boolean} state The new value of the isAuthorized app state. - */ - private setAuthorizedAppState(state: boolean) { - this.appState.setParameter(IS_AUTHORIZED_APP_STATE_KEY, state); - } - - /** - * Handling the login success response. The goal is to set the authorized property of the appState. - * @param resp - */ - private onLoginResponse(resp: Response): void { - this.setLoginInProgressAppState(false); - if (resp && resp.ok) { - this.setAuthorizedAppState(resp.ok); - } - } - - /** - * Handling the login error response. The goal is to set the authorized property correctly of the appState. - * @ToDo decide if we should have a loginError app state. - * @param {Reponse} resp - */ - private onLoginError(resp: Response): void { - this.setLoginInProgressAppState(false); - this.setAuthorizedAppState(false); - } - - /** - * Handling the logout success response. The goal is to set the authorized property of the appState. - * @param {Response} resp - */ - private onLogoutResponse(resp: Response): void { - if (resp && resp.ok) { - this.setAuthorizedAppState(false); - } - } - - /** - * Handling the logout error response. - * @ToDo decide if we should create a logoutError app state or not - * @param {Response} resp - */ - private onLogoutError(resp: Response): void {} - - /** - * Simply return with the boolean value of the isAuthorized application state key. - */ - public isAuthorized(): Observable { - return this.appState.getParameter(IS_AUTHORIZED_APP_STATE_KEY); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts deleted file mode 100644 index e5584e3300b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts +++ /dev/null @@ -1,104 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; - -import {ComponentGeneratorService} from './component-generator.service'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -describe('ComponentGeneratorService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - hosts, - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - LogsContainerService, - UtilsService, - HostsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - ServiceLogsTruncatedService, - TabsService, - ComponentGeneratorService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ] - }); - }); - - it('should create service', inject([ComponentGeneratorService], (service: ComponentGeneratorService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.ts deleted file mode 100644 index 8e79aca4799..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.ts +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable, ComponentFactoryResolver, ViewContainerRef} from '@angular/core'; -import {HostsService} from '@app/services/storage/hosts.service'; -import {ComponentsService} from '@app/services/storage/components.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {NodeBarComponent} from '@app/components/node-bar/node-bar.component'; -import {HistoryItemControlsComponent} from '@app/components/history-item-controls/history-item-controls.component'; -import {LogLevelObject} from '@app/classes/object'; - -@Injectable() -export class ComponentGeneratorService { - - constructor(private resolver: ComponentFactoryResolver, private hostsStorage: HostsService, - private componentsStorage: ComponentsService, private logsContainer: LogsContainerService) { - } - - private get logLevels(): LogLevelObject[] { - return this.logsContainer.logLevels; - } - - private createComponent(type: any, container: ViewContainerRef, properties?: any): void { - const factory = this.resolver.resolveComponentFactory(type); - container.clear(); - let component = container.createComponent(factory); - Object.assign(component.instance, properties); - } - - getDataForHostsNodeBar(hostName: string, container: ViewContainerRef): void { - let data; - this.hostsStorage.getAll().subscribe(hosts => { - if (container && hosts && hosts.length) { - const selectedHost = hosts.find(host => host.name === hostName); - data = selectedHost.logLevelCount.map(event => { - return { - color: this.logLevels.find((level: LogLevelObject): boolean => level.name === event.name).color, - value: event.value - }; - }); - if (data.length) { - this.createComponent(NodeBarComponent, container, { - data - }); - } - } - }); - } - - getDataForComponentsNodeBar(componentName: string, container: ViewContainerRef): void { - let data; - this.componentsStorage.getAll().subscribe(components => { - if (container && components && components.length) { - const selectedHost = components.find(host => host.name === componentName); - data = selectedHost.logLevelCount.map(event => { - return { - color: this.logLevels.find((level: LogLevelObject): boolean => level.name === event.name).color, - value: event.value - }; - }); - if (data.length) { - this.createComponent(NodeBarComponent, container, { - data - }); - } - } - }); - } - - getHistoryItemIcons(historyItem, container: ViewContainerRef): void { - // TODO implement View details and Save filter actions - this.createComponent(HistoryItemControlsComponent, container); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.spec.ts deleted file mode 100644 index ccfe611d61e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.spec.ts +++ /dev/null @@ -1,174 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {HistoryManagerService} from './history-manager.service'; - -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -describe('HistoryManagerService', () => { - beforeEach(() => { - - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - ...TranslationModules, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }) - ], - providers: [ - ...MockHttpRequestModules, - HistoryManagerService, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ] - }); - }); - - it('should be created', inject([HistoryManagerService], (service: HistoryManagerService) => { - expect(service).toBeTruthy(); - })); - - describe('#isHistoryUnchanged()', () => { - const cases = [ - { - valueA: { - p0: 'v0', - p1: ['v1'], - p2: { - k2: 'v2' - } - }, - valueB: { - p0: 'v0', - p1: ['v1'], - p2: { - k2: 'v2' - } - }, - result: true, - title: 'no difference' - }, - { - valueA: { - p0: 'v0', - p1: ['v1'], - p2: { - k2: 'v2' - }, - page: 0 - }, - valueB: { - p0: 'v0', - p1: ['v1'], - p2: { - k2: 'v2' - }, - page: 1 - }, - result: true, - title: 'difference in ignored parameters' - }, - { - valueA: { - p0: 'v0', - p1: ['v1'], - p2: { - k2: 'v2' - }, - page: 0 - }, - valueB: { - p0: 'v0', - p1: ['v3'], - p2: { - k2: 'v4' - }, - page: 1 - }, - result: false, - title: 'difference in non-ignored parameters' - } - ]; - - cases.forEach(test => { - it(test.title, inject([HistoryManagerService], (service: HistoryManagerService) => { - const isHistoryUnchanged: (valueA: object, valueB: object) => boolean = service['isHistoryUnchanged']; - expect(isHistoryUnchanged(test.valueA, test.valueB)).toEqual(test.result); - })); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.ts deleted file mode 100644 index 2a3f5334841..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/history-manager.service.ts +++ /dev/null @@ -1,326 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import 'rxjs/add/operator/distinctUntilChanged'; -import 'rxjs/add/operator/takeUntil'; -import {TranslateService} from '@ngx-translate/core'; -import {SearchBoxParameter, TimeUnitListItem} from '@app/classes/filtering'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject} from '@app/classes/object'; -import {History} from '@app/classes/models/app-state'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {TabsService} from '@app/services/storage/tabs.service'; - -@Injectable() -export class HistoryManagerService { - - /** - * List of filter parameters which shouldn't affect changes history (related to pagination and sorting) - * @type {string[]} - */ - private readonly ignoredParameters: string[] = ['page', 'pageSize', 'auditLogsSorting', 'serviceLogsSorting']; - - /** - * Maximal number of displayed history items - * @type {number} - */ - private readonly maxHistoryItemsCount: number = 25; - - /** - * Indicates whether there is no changes being applied to filters that are triggered by undo or redo action. - * Since user can undo or redo several filters changes at once, and they are applied to form controls step-by-step, - * this flag is needed to avoid recording intermediate items to history. - * @type {boolean} - */ - private hasNoPendingUndoOrRedo: boolean = true; - - /** - * Id of currently active history item. - * Generally speaking, it isn't id of the latest one because it can be shifted by undo or redo action. - * @type {number} - */ - private currentHistoryItemId: number = -1; - - /** - * Contains i18n labels for filtering form control names - */ - private controlNameLabels; - - /** - * Contains i18n labels for time range options - */ - private timeRangeLabels; - - /** - * History items for current tab - * @type {Array} - */ - activeHistory: ListItem[] = []; - - constructor( - private translate: TranslateService, private logsContainerService: LogsContainerService, private utils: UtilsService, - private appState: AppStateService, private tabs: TabsService - ) { - // set labels for history list items - const filters = logsContainerService.filters; - const controlNames = Object.keys(filters).filter((name: string): boolean => { - const key = filters[name].label; - return key && this.ignoredParameters.indexOf(name) === -1; - }); - const filterLabelKeys = controlNames.map((name: string): string => filters[name].label); - const timeRangeLabels = filters.timeRange.options.reduce(( - currentArray: string[], group: TimeUnitListItem[] - ): string[] => { - return [...currentArray, ...group.map((option: TimeUnitListItem): string => option.label)]; - }, [logsContainerService.customTimeRangeKey]); - - translate.get([ - 'filter.include', 'filter.exclude', ...filterLabelKeys, ...timeRangeLabels - ]).subscribe((translates: object): void => { - this.controlNameLabels = controlNames.reduce(( - currentObject: HomogeneousObject, name: string - ): HomogeneousObject => { - return Object.assign({}, currentObject, { - [name]: translates[filters[name].label] - }); - }, { - include: translates['filter.include'], - exclude: translates['filter.exclude'] - }); - this.timeRangeLabels = timeRangeLabels.reduce(( - currentObject: HomogeneousObject, key: string - ): HomogeneousObject => { - return Object.assign({}, currentObject, { - [key]: translates[key] - }); - }, {}); - }); - - // set default history state for each tab - tabs.mapCollection((tab: LogTypeTab): LogTypeTab => { - const currentTabAppState = tab.appState || {}; - const nextTabAppState = Object.assign({}, currentTabAppState, {history: this.initialHistory}); - return Object.assign({}, tab, { - appState: nextTabAppState - }); - }); - - this.logsContainerService.filtersForm.valueChanges - .filter(() => !this.logsContainerService.filtersFormSyncInProgress.getValue()) - .distinctUntilChanged(this.isHistoryUnchanged) - .subscribe(this.onFormValueChanges); - } - - /** - * List of filtering form control names for active tab - * @returns {Array} - */ - private get filterParameters(): string[] { - return this.logsContainerService.logsTypeMap[this.logsContainerService.activeLogsType].listFilters; - } - - get initialHistory(): History { - return Object.assign({}, { - items: [], - currentId: -1 - }); - } - - onFormValueChanges = (value): void => { - if (this.hasNoPendingUndoOrRedo) { - const defaultState = this.logsContainerService.getFiltersData(this.logsContainerService.activeLogsType); - const currentHistory = this.activeHistory; - const previousValue = this.activeHistory.length ? this.activeHistory[0].value.currentValue : defaultState; - const isUndoOrRedo = value.isUndoOrRedo; - const previousChangeId = this.currentHistoryItemId; - if (isUndoOrRedo) { - this.hasNoPendingUndoOrRedo = false; - this.logsContainerService.filtersForm.patchValue({ - isUndoOrRedo: false - }); - this.hasNoPendingUndoOrRedo = true; - } else { - this.currentHistoryItemId = currentHistory.length; - } - const newItem = { - value: { - currentValue: Object.assign({}, value), - previousValue: Object.assign({}, previousValue), - changeId: this.currentHistoryItemId, - previousChangeId, - isUndoOrRedo - }, - label: this.getHistoryItemLabel(previousValue, value) - }; - if (newItem.label) { - this.activeHistory = [ - newItem, - ...currentHistory - ].slice(0, this.maxHistoryItemsCount); - this.appState.setParameter('history', { - items: this.activeHistory.slice(), - currentId: this.currentHistoryItemId - }); - } - } - } - - /** - * List of changes that can be undone - * @returns {ListItem[]} - */ - get undoItems(): ListItem[] { - const allItems = this.activeHistory; - const startIndex = allItems.findIndex((item: ListItem): boolean => { - return item.value.changeId === this.currentHistoryItemId && !item.value.isUndoOrRedo; - }); - let endIndex = allItems.slice(startIndex + 1).findIndex((item: ListItem): boolean => item.value.isUndoOrRedo); - let items = []; - if (startIndex > -1) { - if (endIndex === -1) { - endIndex = allItems.length; - } - items = allItems.slice(startIndex, startIndex + endIndex + 1); - } - return items; - } - - /** - * List of changes that can be redone - * @returns {ListItem[]} - */ - get redoItems(): ListItem[] { - const allItems = this.activeHistory.slice().reverse(); - let startIndex = allItems.findIndex((item: ListItem): boolean => { - return item.value.previousChangeId === this.currentHistoryItemId && !item.value.isUndoOrRedo; - }), - endIndex = allItems.slice(startIndex + 1).findIndex((item: ListItem): boolean => item.value.isUndoOrRedo); - if (startIndex === -1) { - startIndex = allItems.length; - } - if (endIndex === -1) { - endIndex = allItems.length; - } - return allItems.slice(startIndex, endIndex + startIndex + 1); - } - - /** - * Indicates whether there are no filtering form changes that should be tracked - * (all except the ones related to pagination and sorting) - * @param {object} valueA - * @param {object} valueB - * @returns {boolean} - */ - private isHistoryUnchanged = (valueA: object, valueB: object): boolean => { - const objectA = Object.assign({}, valueA), - objectB = Object.assign({}, valueB); - this.ignoredParameters.forEach((controlName: string): void => { - delete objectA[controlName]; - delete objectB[controlName]; - }); - return this.utils.isEqual(objectA, objectB); - } - - /** - * Get label for certain form control change - * @param {string} controlName - * @param {any} selection - * @returns {string} - */ - private getItemValueString(controlName: string, selection: any): string { - switch (controlName) { - case 'timeRange': - return `${this.controlNameLabels[controlName]}: ${this.timeRangeLabels[selection.label]}`; - case 'query': - const includes = selection.filter((item: SearchBoxParameter): boolean => { - return !item.isExclude; - }).map((item: SearchBoxParameter): string => `${item.name}: ${item.value}`).join(', '); - const excludes = selection.filter((item: SearchBoxParameter): boolean => { - return item.isExclude; - }).map((item: SearchBoxParameter): string => `${item.name}: ${item.value}`).join(', '); - const includesString = includes.length ? `${this.controlNameLabels.include}: ${includes}` : ''; - const excludesString = excludes.length ? `${this.controlNameLabels.exclude}: ${excludes}` : ''; - return `${includesString} ${excludesString}`; - default: - const values = selection.map((option: ListItem) => option.value).join(', '); - return `${this.controlNameLabels[controlName]}: ${values}`; - } - } - - /** - * Get label for history list item (i.e., difference with the previous one) - * @param {object} previousFormValue - * @param {object} currentFormValue - * @returns {string} - */ - private getHistoryItemLabel(previousFormValue: object, currentFormValue: object): string { - return this.filterParameters.reduce( - (currentResult: string, currentName: string): string => { - const currentValue = currentFormValue[currentName]; - if (this.ignoredParameters.indexOf(currentName) > -1 - || this.utils.isEqual(previousFormValue[currentName], currentValue)) { - return currentResult; - } else { - const currentLabel = this.getItemValueString(currentName, currentValue); - return `${currentResult} ${currentLabel}`; - } - }, '' - ); - } - - /** - * Handle undo or redo action correctly - * @param {object} value - */ - private handleUndoOrRedo(value: object): void { - const filtersForm = this.logsContainerService.filtersForm; - this.hasNoPendingUndoOrRedo = false; - this.logsContainerService.filtersFormSyncInProgress.next(true); - this.filterParameters.filter(controlName => this.ignoredParameters.indexOf(controlName) === -1) - .forEach((controlName: string): void => { - filtersForm.controls[controlName].setValue(value[controlName], { - emitEvent: false, - onlySelf: true - }); - }); - this.logsContainerService.filtersFormSyncInProgress.next(false); - this.hasNoPendingUndoOrRedo = true; - filtersForm.controls.isUndoOrRedo.setValue(true); - } - - undo(item: ListItem): void { - if (item) { - this.hasNoPendingUndoOrRedo = false; - this.currentHistoryItemId = item.value.previousChangeId; - this.handleUndoOrRedo(item.value.previousValue); - } - } - - redo(item: ListItem): void { - if (item) { - this.hasNoPendingUndoOrRedo = false; - this.currentHistoryItemId = item.value.changeId; - this.handleUndoOrRedo(item.value.currentValue); - } - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts deleted file mode 100644 index 4720a74b212..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {HttpModule, Request} from '@angular/http'; -import {StoreModule} from '@ngrx/store'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {HttpClientService} from './http-client.service'; - -describe('HttpClientService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - HttpModule, - StoreModule.provideStore({ - appState - }) - ], - providers: [ - HttpClientService, - AppStateService - ] - }); - }); - - it('should create service', inject([HttpClientService], (service: HttpClientService) => { - expect(service).toBeTruthy(); - })); - - describe('#generateUrlString()', () => { - it('should generate URL from presets', inject([HttpClientService], (service: HttpClientService) => { - expect(service['generateUrlString']('status')).toEqual('api/v1/status'); - })); - - it('should return explicit URL', inject([HttpClientService], (service: HttpClientService) => { - expect(service['generateUrlString']('login')).toEqual('login'); - })); - }); - - describe('#generateUrl()', () => { - it('string parameter', inject([HttpClientService], (service: HttpClientService) => { - expect(service['generateUrl']('status')).toEqual('api/v1/status'); - })); - - it('request object parameter', inject([HttpClientService], (service: HttpClientService) => { - let request = new Request({ - url: 'status' - }); - expect(service['generateUrl'](request)['url']).toEqual('api/v1/status'); - })); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts deleted file mode 100644 index c65278b51a8..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/operator/first'; -import 'rxjs/add/observable/throw'; -import { - Http, XHRBackend, Request, RequestOptions, RequestOptionsArgs, Response, Headers, URLSearchParams -} from '@angular/http'; -import {HomogeneousObject} from '@app/classes/object'; -import {AuditLogsListQueryParams} from '@app/classes/queries/audit-logs-query-params'; -import {AuditLogsGraphQueryParams} from '@app/classes/queries/audit-logs-graph-query-params'; -import {AuditLogsTopResourcesQueryParams} from '@app/classes/queries/audit-logs-top-resources-query-params'; -import {ServiceLogsQueryParams} from '@app/classes/queries/service-logs-query-params'; -import {ServiceLogsHistogramQueryParams} from '@app/classes/queries/service-logs-histogram-query-params'; -import {ServiceLogsTruncatedQueryParams} from '@app/classes/queries/service-logs-truncated-query-params'; -import {AppStateService} from '@app/services/storage/app-state.service'; - -@Injectable() -export class HttpClientService extends Http { - - private readonly apiPrefix = 'api/v1/'; - - private readonly endPoints = { - status: { - url: 'status' - }, - auditLogs: { - url: 'audit/logs', - params: opts => new AuditLogsListQueryParams(opts) - }, - auditLogsGraph: { - url: 'audit/logs/bargraph', - params: opts => new AuditLogsGraphQueryParams(opts) - }, - auditLogsFields: { - url: 'audit/logs/schema/fields' - }, - serviceLogs: { - url: 'service/logs', - params: opts => new ServiceLogsQueryParams(opts) - }, - serviceLogsHistogram: { - url: 'service/logs/histogram', - params: opts => new ServiceLogsHistogramQueryParams(opts) - }, - serviceLogsFields: { - url: 'service/logs/schema/fields' - }, - serviceLogsTruncated: { - url: 'service/logs/truncated', - params: opts => new ServiceLogsTruncatedQueryParams(opts) - }, - components: { - url: 'service/logs/components/levels/counts' - }, - serviceComponentsName: { - url: 'service/logs/components' - }, - clusters: { - url: 'service/logs/clusters' - }, - hosts: { - url: 'service/logs/tree' - }, - topAuditLogsResources: { - url: variables => `audit/logs/resources/${variables.number}`, - params: opts => new AuditLogsTopResourcesQueryParams(opts) - }, - logIndexFilters: { - url: variables => `shipper/filters/${variables.clusterName}/level` - }, - - shipperClusterServiceList: { - url: variables => `shipper/input/${variables.cluster}/services` - }, - shipperClusterServiceConfiguration: { - url: variables => `shipper/input/${variables.cluster}/services/${variables.service}` - }, - shipperClusterServiceConfigurationTest: { - url: variables => `shipper/input/${variables.cluster}/test` - } - }; - - private readonly unauthorizedStatuses = [401, 403, 419]; - - constructor(backend: XHRBackend, defaultOptions: RequestOptions, private appState: AppStateService) { - super(backend, defaultOptions); - } - - /** - * The goal here is to check if the given real api url should be always POST or not.\ - * See https://issues.apache.org/jira/browse/AMBARI-23779 - * @param {string} url The full url for the api end point. - * @returns {boolean} - */ - private shouldTurnGetToPost(url: string): boolean { - const subUrl = url.replace(this.apiPrefix, ''); - return /^(audit|service)/.test(subUrl); - } - - private generateUrlString(url: string, urlVariables?: HomogeneousObject): string { - const preset = this.endPoints[url]; - let generatedUrl: string; - if (preset) { - const urlExpression = preset.url; - let path: string; - if (typeof urlExpression === 'function') { - path = preset.url(urlVariables); - } else if (typeof urlExpression === 'string') { - path = preset.url; - } - generatedUrl = `${this.apiPrefix}${path}`; - } else { - generatedUrl = url; - } - return generatedUrl; - } - - private generateUrl(request: string | Request): string | Request { - if (typeof request === 'string') { - return this.generateUrlString(request); - } - if (request instanceof Request) { - request.url = this.generateUrlString(request.url); - return request; - } - } - - private generateOptions(url: string, params: HomogeneousObject): RequestOptionsArgs { - const preset = this.endPoints[url], - rawParams = preset && preset.params ? preset.params(params) : params; - if (rawParams) { - const paramsString = Object.keys(rawParams).map((key: string): string => `${key}=${rawParams[key]}`).join('&'), - urlParams = new URLSearchParams(paramsString, { - encodeKey: key => key, - encodeValue: value => encodeURIComponent(value) - }); - return { - params: urlParams - }; - } else { - return { - params: rawParams - }; - } - } - - request(url: string | Request, options?: RequestOptionsArgs): Observable { - const handleResponseError = (error) => { - let handled = false; - if (this.unauthorizedStatuses.indexOf(error.status) > -1) { - this.appState.setParameter('isAuthorized', false); - handled = true; - } - return handled; - }; - return super.request(this.generateUrl(url), options).first().share() - .map(response => response) - .catch((error: any) => { - return handleResponseError(error) ? Observable.of(error) : Observable.throw(error); - }); - } - - get(url: string, params?: HomogeneousObject, urlVariables?: HomogeneousObject): Observable { - const generatedUrl: string = this.generateUrlString(url, urlVariables); - let response$: Observable; - const options = this.generateOptions(url, params); - if (this.shouldTurnGetToPost(generatedUrl)) { - let body = (options && options.params) || params || {}; - if (body instanceof URLSearchParams) { - const paramsMap = Array.from(body.paramsMap); - body = paramsMap.reduce((current, param) => { - const [key, value] = param; - return { - ...current, - [key]: Array.isArray(value) && value.length === 1 ? value[0] : value - }; - }, {}); - } else if (typeof body === 'string') { - body = body.split('&').reduce((current, param): {[key: string]: any} => { - const pair = param.split('='); - return { - ...current, - [pair[0]]: decodeURIComponent(pair[1]) - }; - }, {}); - } - options.params = {}; - response$ = super.post(generatedUrl, body, options); - } else { - response$ = super.get(this.generateUrlString(url, urlVariables), this.generateOptions(url, params)); - } - return response$; - } - - put(url: string, body: any, params?: HomogeneousObject, urlVariables?: HomogeneousObject): Observable { - return super.put(this.generateUrlString(url, urlVariables), body, this.generateOptions(url, params)); - } - - post(url: string, body: any, params?: HomogeneousObject, urlVariables?: HomogeneousObject): Observable { - return super.post(this.generateUrlString(url, urlVariables), body, this.generateOptions(url, params)); - } - - postFormData( - url: string, - params: HomogeneousObject, - options?: RequestOptionsArgs, - urlVariables?: HomogeneousObject): Observable { - const encodedParams = this.generateOptions(url, params).params; - let body; - if (encodedParams && encodedParams instanceof URLSearchParams) { - body = encodedParams.rawParams; - } - const requestOptions = Object.assign({}, options); - if (!requestOptions.headers) { - requestOptions.headers = new Headers(); - } - requestOptions.headers.append('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8'); - return super.post(this.generateUrlString(url, urlVariables), body, requestOptions); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.spec.ts deleted file mode 100644 index 924deee6711..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.spec.ts +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { TestBed, inject } from '@angular/core/testing'; - -import { - getCommonTestingBedConfiguration, - TranslationModules -} from '@app/test-config.spec'; - -import { AppStateService } from '@app/services/storage/app-state.service'; - -import { LogIndexFilterService } from './log-index-filter.service'; - -describe('LogIndexFilterService', () => { - beforeEach(() => { - TestBed.configureTestingModule(getCommonTestingBedConfiguration({ - imports: [ - ...TranslationModules - ], - providers: [ - AppStateService, - LogIndexFilterService - ] - })); - }); - - it('should be created', inject([LogIndexFilterService], (service: LogIndexFilterService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.ts deleted file mode 100644 index 8aa0523452f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/log-index-filter.service.ts +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Injectable } from '@angular/core'; -import {Response} from '@angular/http'; - -import { Observable } from 'rxjs/Observable'; - -import { HttpClientService } from '@app/services/http-client.service'; -import { Filter } from '@app/classes/models/filter'; - -@Injectable() -export class LogIndexFilterService { - - constructor(private httpClient: HttpClientService) { } - - getFilterByClusterName(clusterName: string): Observable { - return this.httpClient.get('logIndexFilters', null, { - clusterName - }).map((response: Response): Filter => { - const filters: Filter = response.json() && response.json().filter; - return filters; - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/login-screen-guard.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/login-screen-guard.service.ts deleted file mode 100644 index 8dbe1d7398b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/login-screen-guard.service.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot} from '@angular/router'; -import {Observable} from 'rxjs/Observable'; - -import {AuthService} from '@app/services/auth.service'; - -/** - * The goal of this guard service is to prevent to display the login screen when the user is logged in. - */ -@Injectable() -export class LoginScreenGuardService implements CanActivate { - - constructor(private authService: AuthService, private router: Router) {} - - canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable { - return this.authService.isAuthorized().map((isAuthorized: boolean) => { - if (isAuthorized && state.url === '/login') { - this.router.navigate(['/']); - } - return !isAuthorized; - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.spec.ts deleted file mode 100644 index 0926ef4c9ed..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.spec.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {TestBed, inject} from '@angular/core/testing'; - -import { LogsBreadcrumbsResolverService } from './logs-breadcrumbs-resolver.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {tabs, TabsService} from '@app/services/storage/tabs.service'; -import {StoreModule} from '@ngrx/store'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {hosts} from '@app/services/storage/hosts.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {TranslationModules} from '@app/test-config.spec'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {clusters} from '@app/services/storage/clusters.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {appState} from '@app/services/storage/app-state.service'; -import {components} from '@app/services/storage/components.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; - -describe('LogsBreadcrumbsResolverService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - LogsBreadcrumbsResolverService, - RoutingUtilsService, - TabsService - ] - }); - }); - - it('should be created', inject([LogsBreadcrumbsResolverService], (service: LogsBreadcrumbsResolverService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.ts deleted file mode 100644 index e44717b38af..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-breadcrumbs-resolver.service.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Injectable} from '@angular/core'; -import {ActivatedRouteSnapshot, Resolve, RouterStateSnapshot} from '@angular/router'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {Observable} from 'rxjs/Observable'; - -@Injectable() -export class LogsBreadcrumbsResolverService implements Resolve { - - constructor( - private routingUtilService: RoutingUtilsService, - private tabStoreService: TabsService - ) { } - - resolve(route: ActivatedRouteSnapshot, routerStateSnapshot: RouterStateSnapshot): Observable { - const activeTabParam: string = this.routingUtilService.getParamFromActivatedRouteSnapshot(route, 'activeTab'); - const breadcrumbs: string[] = ['logs.title']; - return this.tabStoreService.findInCollection((tab: LogTypeTab) => tab.id === activeTabParam).first().map((tab: LogTypeTab) => { - breadcrumbs.push(tab.label); - return breadcrumbs; - }); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts deleted file mode 100644 index 3e70644f577..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts +++ /dev/null @@ -1,101 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {MockHttpRequestModules, TranslationModules} from '@app/test-config.spec'; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {LogsContainerService} from './logs-container.service'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; - -describe('LogsContainerService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - LogsContainerService, - UtilsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService - ] - }); - }); - - it('should create service', inject([LogsContainerService], (service: LogsContainerService) => { - expect(service).toBeTruthy(); - })); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts deleted file mode 100644 index 9fba9517cca..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts +++ /dev/null @@ -1,925 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {FormGroup, FormControl} from '@angular/forms'; -import {Response} from '@angular/http'; -import {Subject} from 'rxjs/Subject'; -import {Observable} from 'rxjs/Observable'; -import 'rxjs/add/observable/timer'; -import 'rxjs/add/observable/combineLatest'; -import 'rxjs/add/operator/distinctUntilChanged'; -import 'rxjs/add/operator/first'; -import 'rxjs/add/operator/map'; -import 'rxjs/add/operator/takeUntil'; -import * as moment from 'moment-timezone'; -import {HttpClientService} from '@app/services/http-client.service'; -import {UtilsService} from '@app/services/utils.service'; -import {AuditLogsService} from '@app/services/storage/audit-logs.service'; -import {AuditLogsFieldsService, ResponseRootProperties} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsService} from '@app/services/storage/service-logs.service'; -import {ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service'; -import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service'; -import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service'; -import {AppStateService} from '@app/services/storage/app-state.service'; -import {AppSettingsService} from '@app/services/storage/app-settings.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {ClustersService} from '@app/services/storage/clusters.service'; -import {ComponentsService} from '@app/services/storage/components.service'; -import {HostsService} from '@app/services/storage/hosts.service'; -import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry'; -import { - FilterCondition, TimeUnitListItem, SearchBoxParameter, SearchBoxParameterTriggered -} from '@app/classes/filtering'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject, LogLevelObject} from '@app/classes/object'; -import {DataAvailability, DataAvailabilityValues, LogsType, ScrollType} from '@app/classes/string'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import {AuditFieldsDefinitionSet} from '@app/classes/object'; -import {AuditLog} from '@app/classes/models/audit-log'; -import {ServiceLog} from '@app/classes/models/service-log'; -import {BarGraph} from '@app/classes/models/bar-graph'; -import {NodeItem} from '@app/classes/models/node-item'; -import {CommonEntry} from '@app/classes/models/common-entry'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {ActivatedRoute, Router} from '@angular/router'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {BehaviorSubject} from 'rxjs/BehaviorSubject'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {LogLevelComponent} from '@app/components/log-level/log-level.component'; -import {NotificationService, NotificationType} from '@modules/shared/services/notification.service'; - -@Injectable() -export class LogsContainerService { - - static clusterSelectionStoreKey = 'logs'; - - private readonly paginationOptions: string[] = ['10', '25', '50', '100']; - - readonly logLevels: LogLevelObject[] = [ - { - name: 'FATAL', - label: 'levels.fatal', - color: '#830A0A' - }, - { - name: 'ERROR', - label: 'levels.error', - color: '#E81D1D' - }, - { - name: 'WARN', - label: 'levels.warn', - color: '#FF8916' - }, - { - name: 'INFO', - label: 'levels.info', - color: '#2577B5' - }, - { - name: 'DEBUG', - label: 'levels.debug', - color: '#65E8FF' - }, - { - name: 'TRACE', - label: 'levels.trace', - color: '#888' - }, - { - name: 'UNKNOWN', - label: 'levels.unknown', - color: '#BDBDBD' - } - ]; - - filters: HomogeneousObject = { - clusters: { - label: 'filter.clusters', - options: [], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.clusters, - fieldName: 'cluster' - }, - timeRange: { // @ToDo remove duplication, this options are in the LogsFilteringUtilsService too - label: 'logs.duration', - options: this.logsFilteringUtilsService.getTimeRandeOptionsByGroup(), - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.timeRange - }, - components: { - label: 'filter.components', - iconClass: 'fa fa-cubes', - options: [], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.components, - fieldName: 'type' - }, - levels: { - label: 'filter.levels', - iconClass: 'fa fa-sort-amount-asc', - options: this.logLevels.map((level: LogLevelObject): ListItem => { - const cssClass = (level.name || 'unknown').toLowerCase(); - return { - label: level.label, - value: level.name, - cssClass: `log-level-item ${cssClass}`, - iconClass: `fa ${LogLevelComponent.classMap[cssClass]}` - }; - }), - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.levels, - fieldName: 'level' - }, - hosts: { - label: 'filter.hosts', - iconClass: 'fa fa-server', - options: [], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.hosts, - fieldName: 'host' - }, - auditLogsSorting: { - label: 'sorting.title', - options: [ - { - label: 'sorting.time.asc', - value: { - key: 'evtTime', - type: 'asc' - } - }, - { - label: 'sorting.time.desc', - value: { - key: 'evtTime', - type: 'desc' - } - } - ], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.auditLogsSorting - }, - serviceLogsSorting: { - label: 'sorting.title', - options: [ - { - label: 'sorting.time.asc', - value: { - key: 'logtime', - type: 'asc' - } - }, - { - label: 'sorting.time.desc', - value: { - key: 'logtime', - type: 'desc' - } - } - ], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.serviceLogsSorting - }, - pageSize: { - label: 'pagination.title', - options: this.paginationOptions.map((option: string): ListItem => { - return { - label: option, - value: option - }; - }), - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.pageSize - }, - page: { - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.page - }, - query: { - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.query - }, - users: { - label: 'filter.users', - iconClass: 'fa fa-server', - options: [], - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.users, - fieldName: 'reqUser' - }, - isUndoOrRedo: { - defaultSelection: this.logsFilteringUtilsService.defaultFilterSelections.isUndoOrRedo - } - }; - - private readonly filtersMapping = { - clusters: ['clusters'], - timeRange: ['to', 'from'], - components: ['mustBe'], - levels: ['level'], - hosts: ['hostList'], - auditLogsSorting: ['sortType', 'sortBy'], - serviceLogsSorting: ['sortType', 'sortBy'], - pageSize: ['pageSize'], - page: ['page'], - query: ['includeQuery', 'excludeQuery'], - users: ['userList'] - }; - - private readonly graphFilters = { - clusters: ['clusters'], - timeRange: ['to', 'from'], - components: ['mustBe'], - levels: ['level'], - hosts: ['hostList'], - query: ['includeQuery', 'excludeQuery'], - users: ['userList'] - }; - - readonly customTimeRangeKey = 'filter.timeRange.custom'; - - readonly topResourcesCount = '10'; - - readonly topUsersCount = '6'; - - readonly logsTypeMap = { - auditLogs: { - logsModel: this.auditLogsStorage, - fieldsModel: this.auditLogsFieldsStorage, - listFilters: ['clusters', 'timeRange', 'auditLogsSorting', 'pageSize', 'page', 'query', 'users'], - topResourcesFilters: ['clusters', 'timeRange', 'query'], - graphFilters: ['clusters', 'timeRange', 'query'], - graphRequestName: 'auditLogsGraph', - graphModel: this.auditLogsGraphStorage - }, - serviceLogs: { - logsModel: this.serviceLogsStorage, - fieldsModel: this.serviceLogsFieldsStorage, - listFilters: [ - 'clusters', 'timeRange', 'components', 'levels', 'hosts', 'serviceLogsSorting', 'pageSize', 'page', 'query' - ], - graphFilters: ['clusters', 'timeRange', 'components', 'levels', 'hosts', 'query'], - graphRequestName: 'serviceLogsHistogram', - graphModel: this.serviceLogsHistogramStorage - } - }; - - private readonly defaultTimeZone = moment.tz.guess(); - - readonly queryContextMenuItems: ListItem[] = [ - { - label: 'logs.addToQuery', - iconClass: 'fa fa-search-plus', - value: false // 'isExclude' is false - }, - { - label: 'logs.excludeFromQuery', - iconClass: 'fa fa-search-minus', - value: true // 'isExclude' is true - } - ]; - - timeZone: string = this.defaultTimeZone; - - totalCount = 0; - - /** - * A configurable property to indicate the maximum capture time in milliseconds. - * @type {number} - * @default 600000 (10 minutes) - */ - readonly maximumCaptureTimeLimit = 600000; - - isServiceLogsFileView = false; - - filtersForm: FormGroup; - - activeLog: ActiveServiceLogEntry | null = null; - - activeLogsType: LogsType; - - // Todo move this prop to a state of the audit log container - activeAuditGroup: string = ResponseRootProperties.DEFAULTS; - - filtersFormChange: Subject = new Subject(); - - private auditLogsSource: Observable = this.auditLogsStorage.getAll(); - - private serviceLogsSource: Observable = this.serviceLogsStorage.getAll(); - - auditLogsColumns: Observable = this.auditLogsFieldsStorage.getParameter(ResponseRootProperties.DEFAULTS) - .map(this.utils.logFieldToListItemMapper); - - serviceLogsColumns: Observable = this.serviceLogsFieldsStorage.getAll().map(this.utils.logFieldToListItemMapper); - - serviceLogs: Observable = Observable.combineLatest( - this.serviceLogsSource, this.serviceLogsColumns - ).map(this.logsMapper); - - auditLogs: Observable = Observable.combineLatest( - this.auditLogsSource, this.auditLogsColumns - ).map(this.logsMapper); - - queryParameterNameChange: Subject = new Subject(); - - queryParameterAdd: Subject = new Subject(); - - private stopTimer: Subject = new Subject(); - - private stopAutoRefreshCountdown: Subject = new Subject(); - - captureSeconds = 0; - - readonly autoRefreshInterval = 30000; - - autoRefreshRemainingSeconds = 0; - - private startCaptureTime: number; - - private stopCaptureTime: number; - - captureTimeRangeCache: ListItem; - - topUsersGraphData: HomogeneousObject> = {}; - - topResourcesGraphData: HomogeneousObject> = {}; - - private readonly valueGetters = { - to: (selection: TimeUnitListItem) => { - return this.logsFilteringUtilsService.getEndTimeFromTimeUnitListItem(selection, this.timeZone); - }, - from: (selection: TimeUnitListItem, current: string) => { - return this.logsFilteringUtilsService.getStartTimeFromTimeUnitListItem(selection, current, this.timeZone); - }, - sortType: this.logsFilteringUtilsService.getSortTypeFromSortingListItem, - sortBy: this.logsFilteringUtilsService.getSortKeyFromSortingListItem, - page: this.logsFilteringUtilsService.getPage, - includeQuery: this.logsFilteringUtilsService.getQuery(false), - excludeQuery: this.logsFilteringUtilsService.getQuery(true) - }; - - filtersFormSyncInProgress: BehaviorSubject = new BehaviorSubject(false); - - constructor( - private httpClient: HttpClientService, private utils: UtilsService, - private tabsStorage: TabsService, private componentsStorage: ComponentsService, private hostsStorage: HostsService, - private appState: AppStateService, private auditLogsStorage: AuditLogsService, - private auditLogsGraphStorage: AuditLogsGraphDataService, private auditLogsFieldsStorage: AuditLogsFieldsService, - private serviceLogsStorage: ServiceLogsService, private serviceLogsFieldsStorage: ServiceLogsFieldsService, - private serviceLogsHistogramStorage: ServiceLogsHistogramDataService, private clustersStorage: ClustersService, - private serviceLogsTruncatedStorage: ServiceLogsTruncatedService, private appSettings: AppSettingsService, - private clusterSelectionStoreService: ClusterSelectionService, - private router: Router, - private activatedRoute: ActivatedRoute, - private logsFilteringUtilsService: LogsFilteringUtilsService, - private logsStateService: LogsStateService, - private notificationService: NotificationService, - private componentsService: ComponentsService - ) { - const formItems = Object.keys(this.filters).reduce((currentObject: any, key: string): HomogeneousObject => { - const formControl = new FormControl(); - const item = { - [key]: formControl - }; - formControl.setValue(this.logsFilteringUtilsService.defaultFilterSelections[key]); - return Object.assign(currentObject, item); - }, {}); - this.filtersForm = new FormGroup(formItems); - - this.componentsStorage.getAll().subscribe(this.setComponentsFilters); - this.clustersStorage.getAll().subscribe(this.setClustersFilters); - this.hostsStorage.getAll().subscribe(this.setHostsFilters); - - appState.getParameter('activeLog').subscribe((value: ActiveServiceLogEntry | null) => this.activeLog = value); - appState.getParameter('isServiceLogsFileView').subscribe((value: boolean) => this.isServiceLogsFileView = value); - appState.getParameter('activeLogsType').subscribe((value: LogsType) => { - if (this.isLogsTypeSupported(value)) { - this.activeLogsType = value; - this.loadLogs(this.activeLogsType); - } - }); - - appSettings.getParameter('timeZone').subscribe((value: string) => this.timeZone = value || this.defaultTimeZone); - tabsStorage.mapCollection((tab: LogTypeTab): LogTypeTab => { - return Object.assign({}, tab, { - activeFilters: this.getFiltersData(tab.appState.activeLogsType) - }); - }); - - this.filtersForm.valueChanges.filter(() => !this.filtersFormSyncInProgress.getValue()).subscribe(this.onFiltersFormValueChange); - - this.auditLogsSource.subscribe((logs: AuditLog[]): void => { - const userNames = logs.map((log: AuditLog): string => log.reqUser); - this.utils.pushUniqueValues( - this.filters.users.options, userNames.map(this.utils.getListItemFromString), - this.compareFilterOptions - ); - }); - this.clusterSelectionStoreService.getParameter(LogsContainerService.clusterSelectionStoreKey) - .filter(selection => !!selection).subscribe(this.onClusterSelectionChanged); - } - - // - // SECTION: FILTERS AND TABS - // - - /** - * Update the filters form with the given filters (from active tab's filters) - * @param tab {LogTypeTab} - */ - syncTabFiltersToFilterForms(tab: LogTypeTab): void { - this.resetFiltersForms(tab.activeFilters); - } - - /** - * Update the filters form with the given filters. - * @param filters {object} - */ - resetFiltersForms(filters): void { - this.appState.getParameter('baseDataSetState') - // do it only when the base data set is available so that the dropdowns can set the selections - .filter((dataSetState: DataAvailability) => dataSetState === DataAvailabilityValues.AVAILABLE) - .first() - .subscribe(() => { - this.filtersFormSyncInProgress.next(true); - this.filtersForm.reset( - {...this.logsFilteringUtilsService.defaultFilterSelections, ...filters}, - {emitEvent: false} - ); - this.filtersFormSyncInProgress.next(false); - this.onFiltersFormValueChange(); - }); - } - - /** - * Sync the given filters into the given tab or if the tabId param is not given into the currently active tab's - * activeFilters property. - * @param filters - * @param tabId - */ - syncFiltersToTabFilters(filters, tabId?): void { - this.tabsStorage.mapCollection((tab: LogTypeTab): LogTypeTab => { - const changes = (tabId && tabId === tab.id) || (!tabId && tab.isActive) ? { - activeFilters: filters - } : {}; - return Object.assign({}, tab, changes); - }); - } - - /** - * Set the appState in the store by the stored state in the Tab object. It is mainly the 'activeLogsType' and the 'isServiceLogsFileView' - * property - * @param {LogTypeTab} tab - */ - private setAppStateByTab(tab: LogTypeTab): void { - this.appState.setParameters(tab.appState); - } - - /** - * Actualize the 'isActive' property all the tabs in the store, and set it true where the given tab id is the same. - * @param {LogTypeTab} tabToActivate - */ - setActiveTab(tabToActivate: LogTypeTab): void { - this.tabsStorage.mapCollection((tab: LogTypeTab): LogTypeTab => { - return Object.assign({}, tab, { - isActive: tab.id === tabToActivate.id - }); - }); - } - - /** - * Switch the tab to the given tab. - * @param {LogTypeTab} activeTab - */ - switchTab(activeTab: LogTypeTab, withFilters?: {[key: string]: any}): void { - this.setActiveTab(activeTab); - this.setAppStateByTab(activeTab); - this.resetFiltersForms(withFilters || activeTab.activeFilters); - } - - /** - * Switch to the tab with the given tab id. - * @param {string} tabId - */ - setActiveTabById(tabId: string): void { - this.tabsStorage.findInCollection((tab: LogTypeTab) => tab.id === tabId).first().subscribe((tab: LogTypeTab | null) => { - if (tab) { - this.switchTab(tab); - this.logsStateService.setParameter('activeTabId', tabId); - } - }); - } - - /** - * Handle the filters form value changes in order to sync the current tab's filters and also to load the logs. - */ - private onFiltersFormValueChange = (): void => { - this.syncFiltersToTabFilters(this.filtersForm.getRawValue()); - this.loadLogs(); - } - - // - // SECTION END: FILTERS AND TABS - // - - private logsMapper(result: [LogT[], ListItem[]]): LogT[] { - const [logs, fields] = result; - if (fields.length) { - const names = fields.map((field: ListItem): string => field.value); - if (names.indexOf('id') === -1) { - names.push('id'); - } - return logs.map((log: LogT): LogT => { - return names.reduce((currentObject: object, key: string) => Object.assign(currentObject, { - [key]: log[key] - }), {}) as LogT; - }); - } else { - return []; - } - } - - private onClusterSelectionChanged = (selection): void => { - const clusterSelection: string[] = Array.isArray(selection) ? selection : [selection]; - this.filtersForm.controls.clusters.setValue(clusterSelection.map(this.utils.getListItemFromString)); - } - - /** - * Compares two options list items by values (so that isChecked flags are ignored) - * @param {ListItem} sourceItem - * @param {ListItem} newItem - * @returns {boolean} - */ - private compareFilterOptions = (sourceItem: ListItem, newItem: ListItem): boolean => { - return this.utils.isEqual(sourceItem.value, newItem.value); - } - - private isFormUnchanged = (valueA: object, valueB: object): boolean => { - const trackedControlNames = this.logsTypeMap[this.activeLogsType].listFilters; - for (const name of trackedControlNames) { - if (!this.utils.isEqual(valueA[name], valueB[name])) { - return false; - } - } - return true; - } - - isLogsTypeSupported(logsType: LogsType): boolean { - return !!this.logsTypeMap[logsType]; - } - - loadLogs = (logsType: LogsType = this.activeLogsType): void => { - if (this.isLogsTypeSupported(logsType)) { - this.httpClient.get(logsType, this.getParams('listFilters', {}, logsType)).subscribe((response: Response): void => { - const jsonResponse = response.json(), - model = this.logsTypeMap[logsType].logsModel; - model.clear(); - if (jsonResponse) { - const logs = jsonResponse.logList, - count = jsonResponse.totalCount || 0; - if (logs) { - model.addInstances(logs); - } - this.totalCount = count; - } - }); - this.httpClient.get(this.logsTypeMap[logsType].graphRequestName, this.getParams('graphFilters', {}, logsType)) - .subscribe((response: Response): void => { - const jsonResponse = response.json(), - model = this.logsTypeMap[logsType].graphModel; - model.clear(); - if (jsonResponse) { - const graphData = jsonResponse.graphData; - if (graphData) { - model.addInstances(graphData); - } - } - }); - if (logsType === 'auditLogs') { - this.httpClient.get('topAuditLogsResources', this.getParams('topResourcesFilters', { - field: 'resource' - }, logsType), { - number: this.topResourcesCount - }).subscribe((response: Response): void => { - const jsonResponse = response.json(); - if (jsonResponse) { - const data = jsonResponse.graphData; - if (data) { - this.topResourcesGraphData = this.parseAuditLogsTopData(data); - } - } - }); - this.httpClient.get('topAuditLogsResources', this.getParams('topResourcesFilters', { - field: 'reqUser' - }, logsType), { - number: this.topUsersCount - }).subscribe((response: Response): void => { - const jsonResponse = response.json(); - if (jsonResponse) { - const data = jsonResponse.graphData; - if (data) { - this.topUsersGraphData = this.parseAuditLogsTopData(data); - } - } - }); - } - } else { - console.error(`Logs Type does not supported: ${logsType}`); - } - } - - loadLogContext(id: string, hostName: string, componentName: string, scrollType: ScrollType = ''): void { - const params = { - id: id, - host_name: hostName, - component_name: componentName, - scrollType: scrollType - }; - this.httpClient.get('serviceLogsTruncated', params).subscribe((response: Response): void => { - const jsonResponse = response.json(); - if (!scrollType) { - this.serviceLogsTruncatedStorage.clear(); - } - if (jsonResponse) { - const logs = jsonResponse.logList; - if (logs) { - if (scrollType === 'before') { - this.serviceLogsTruncatedStorage.addInstancesToStart(logs); - } else { - this.serviceLogsTruncatedStorage.addInstances(logs); - } - if (!scrollType) { - this.appState.setParameters({ - isServiceLogContextView: true, - activeLog: params - }); - } - } - } - }); - } - - private parseAuditLogsTopData(data: BarGraph[]): HomogeneousObject> { - return data.reduce(( - currentObject: HomogeneousObject>, currentItem: BarGraph - ): HomogeneousObject> => Object.assign(currentObject, { - [currentItem.name]: currentItem.dataCount.reduce( - (currentDataObject: HomogeneousObject, currentDataItem: CommonEntry): HomogeneousObject => { - return Object.assign(currentDataObject, { - [currentDataItem.name]: currentDataItem.value - }); - }, {} - ) - }), {}); - } - - private getParams( - filtersMapName: string, additionalParams: HomogeneousObject = {}, logsType: LogsType = this.activeLogsType - ): HomogeneousObject { - const params = {}; - const values = this.filtersForm.getRawValue(); - this.logsTypeMap[logsType][filtersMapName].forEach((key: string): void => { - const inputValue = values[key]; - const paramNames = this.filtersMapping[key]; - paramNames.forEach((paramName: string): void => { - let value; - const valueGetter = this.valueGetters[paramName] || this.logsFilteringUtilsService.defaultValueGetterFromListItem; - if (inputValue === null || inputValue === undefined) { - - } else { - if (paramName === 'from') { - value = valueGetter(inputValue, params['to']); - } else { - value = valueGetter(inputValue); - } - if (value != null && value !== '') { - params[paramName] = value; - } - } - }); - }, this); - return Object.assign({}, params, additionalParams); - } - - getGraphData(data: BarGraph[], keys?: string[]): HomogeneousObject> { - const graphData = {}; - data.forEach(type => { - const name = type.name; - type.dataCount.forEach(entry => { - const timeStamp = new Date(entry.name).valueOf(); - if (!graphData[timeStamp]) { - const initialValue = {}; - if (keys) { - keys.forEach((key: string) => initialValue[key] = 0); - } - graphData[timeStamp] = initialValue; - } - graphData[timeStamp][name] = Number(entry.value); - }); - }); - return graphData; - } - - loadColumnsNames(): void { - this.httpClient.get('serviceLogsFields').subscribe((response: Response): void => { - const jsonResponse = response.json(); - if (jsonResponse) { - this.serviceLogsFieldsStorage.addInstances(jsonResponse); - } - }); - this.httpClient.get('auditLogsFields').subscribe((response: Response): void => { - const jsonResponse: AuditFieldsDefinitionSet = response.json(); - if (jsonResponse) { - this.auditLogsFieldsStorage.setParameters(jsonResponse); - } - }); - } - - startCaptureTimer(): void { - this.startCaptureTime = new Date().valueOf(); - const maxCaptureTimeInSeconds = this.maximumCaptureTimeLimit / 1000; - Observable.timer(0, 1000).takeUntil(this.stopTimer).subscribe((seconds: number): void => { - this.captureSeconds = seconds; - if (this.captureSeconds >= maxCaptureTimeInSeconds) { - this.stopCaptureTimer(); - } - }); - } - - stopCaptureTimer(): void { - const autoRefreshIntervalSeconds = this.autoRefreshInterval / 1000; - this.stopCaptureTime = new Date().valueOf(); - this.captureSeconds = 0; - this.stopTimer.next(); - Observable.timer(0, 1000).takeUntil(this.stopAutoRefreshCountdown).subscribe((seconds: number): void => { - this.autoRefreshRemainingSeconds = autoRefreshIntervalSeconds - seconds; - if (!this.autoRefreshRemainingSeconds) { - this.stopAutoRefreshCountdown.next(); - this.captureTimeRangeCache = this.filtersForm.controls.timeRange.value; - this.setCustomTimeRange(this.startCaptureTime, this.stopCaptureTime); - } - }); - } - - cancelCapture(): void { - this.stopTimer.next(); - this.stopAutoRefreshCountdown.next(); - this.autoRefreshRemainingSeconds = 0; - this.captureSeconds = 0; - } - - loadClusters(): void { - - } - - loadComponents(): Observable { - const requestComponentsData: Observable = this.httpClient.get('components'); - const requestComponentsName: Observable = this.httpClient.get('serviceComponentsName'); - const requests = Observable.combineLatest(requestComponentsName, requestComponentsData); - requests.subscribe(([componentsNamesResponse, componentsDataResponse]: Response[]) => { - const componentsNames = componentsNamesResponse.json(); - const componentsData = componentsDataResponse.json(); - const components = componentsData && componentsData.vNodeList.map((item): NodeItem => { - const component = componentsNames.metadata.find(componentItem => componentItem.name === item.name); - return Object.assign(item, { - label: component && (component.label || item.name), - group: component && component.group && { - name: component.group, - label: componentsNames.groups[component.group] - }, - value: item.logLevelCount.reduce((currentValue: number, currentItem): number => { - return currentValue + Number(currentItem.value); - }, 0) - }); - }); - if (components) { - this.utils.pushUniqueValues(this.filters.components.options, components.map(node => this.utils.getListItemFromNode(node, true) )); - this.componentsStorage.addInstances(components); - } - }); - return requests; - } - - setComponentsFilters = (components): void => { - this.filters.components.options = []; - if (components) { - this.utils.pushUniqueValues( - this.filters.components.options, - components.map(node => this.utils.getListItemFromNode(node, true)) - ); - } - } - - setClustersFilters = (clustersNames: string[]): void => { - this.filters.clusters.options = []; - if (clustersNames) { - this.utils.pushUniqueValues(this.filters.clusters.options, clustersNames.map(this.utils.getListItemFromString)); - } - } - - setHostsFilters = (hosts): void => { - this.filters.hosts.options = []; - if (hosts) { - this.utils.pushUniqueValues(this.filters.hosts.options, hosts.map(this.utils.getListItemFromNode)); - } - } - - loadHosts(): Observable { - const request = this.httpClient.get('hosts'); - request.subscribe((response: Response): void => { - const jsonResponse = response.json(), - hosts = jsonResponse && jsonResponse.vNodeList; - if (hosts) { - this.utils.pushUniqueValues(this.filters.hosts.options, hosts.map(this.utils.getListItemFromNode)); - this.hostsStorage.addInstances(hosts); - } - }); - return request; - } - - setCustomTimeRange(startTime: number, endTime: number): void { - const startTimeMoment = moment(startTime); - const endTimeMoment = moment(endTime); - const diff = endTimeMoment.diff(startTimeMoment); - if (diff > 0) { - this.filtersForm.controls.timeRange.setValue({ - label: this.customTimeRangeKey, - value: { - type: 'CUSTOM', - start: moment(startTime), - end: moment(endTime) - } - }); - } else { - this.notificationService.addNotification({ - title: 'filter.timeRange', - message: 'filter.timeRange.error.tooShort', - type: NotificationType.ALERT - }); - } - } - - getFiltersData(listType: string): object { - const itemsList = this.logsTypeMap[listType].listFilters; - const keys = Object.keys(this.filters).filter((key: string): boolean => itemsList.indexOf(key) > -1); - return keys.reduce((currentObject: object, key: string): object => { - return Object.assign(currentObject, { - [key]: this.logsFilteringUtilsService.defaultFilterSelections[key] - }); - }, {}); - } - - isFilterConditionDisplayed(key: string): boolean { - return this.logsTypeMap[this.activeLogsType].listFilters.indexOf(key) > -1 - && Boolean(this.filtersForm.controls[key]); - } - - updateSelectedColumns(columnNames: string[], logsType: string): void { - const functionName: string = logsType === 'auditLogs' ? 'mapFieldSetGroup' : 'mapCollection'; - const modifierFn: Function = (item) => Object.assign({}, item, { - visible: columnNames.indexOf(item.name) > -1 - }); - const params = [modifierFn, logsType === 'auditLogs' ? this.activeAuditGroup : undefined]; - this.logsTypeMap[logsType].fieldsModel[functionName](...params); - } - - openServiceLog(log: ServiceLog): void { - this.componentsService.findInCollection(component => (component.name || component.label) === log.type) - .map(component => component ? component.label || component.name : name) - .first() - .subscribe((componentName) => { - const tab = { - id: log.id || `${log.host}-${log.type}`, - isCloseable: true, - isActive: false, - label: `${log.host} >> ${componentName || log.type}`, - activeFilters: Object.assign({}, JSON.parse(JSON.stringify(this.filtersForm.value)), { - components: this.filters.components.options.filter((option: ListItem): boolean => { - return option.value === log.type; - }), - hosts: this.filters.hosts.options.filter((option: ListItem): boolean => { - return option.value === log.host; - }) - }), - appState: { - activeLogsType: 'serviceLogs', - isServiceLogsFileView: true, - activeLog: { - id: log.id, - host_name: log.host, - component_name: log.type - } - } - }; - this.tabsStorage.addInstance(tab); - this.router.navigate(['/logs', ...this.logsFilteringUtilsService.getNavigationForTab(tab)]); - }); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.spec.ts deleted file mode 100644 index e36cce6fcf3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.spec.ts +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { LogsFilteringUtilsService } from './logs-filtering-utils.service'; -import {StoreModule} from '@ngrx/store'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {hosts} from '@app/services/storage/hosts.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {TranslationModules} from '@app/test-config.spec'; -import {tabs} from '@app/services/storage/tabs.service'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {clusters} from '@app/services/storage/clusters.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {appState} from '@app/services/storage/app-state.service'; -import {components} from '@app/services/storage/components.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import {UtilsService} from '@app/services/utils.service'; - -describe('LogsFilteringUtilsService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - LogsFilteringUtilsService, - UtilsService - ] - }); - }); - - it('should be created', inject([LogsFilteringUtilsService], (service: LogsFilteringUtilsService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.ts deleted file mode 100644 index 89f8a6db2dc..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-filtering-utils.service.ts +++ /dev/null @@ -1,557 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Injectable} from '@angular/core'; -import {ListItem} from '@app/classes/list-item'; -import {CustomTimeRange, SearchBoxParameter, SortingListItem, TimeUnit, TimeUnitListItem} from '@app/classes/filtering'; -import * as moment from 'moment-timezone'; -import {HomogeneousObject} from '@app/classes/object'; -import {LogsType, SortingType} from '@app/classes/string'; -import {UtilsService} from '@app/services/utils.service'; -import { LogTypeTab } from '@app/classes/models/log-type-tab'; - -// @ToDo remove duplication, this options are in the LogsContainerService -export const timeRangeFilterOptions = [{ - label: 'filter.timeRange.7d', - value: { - type: 'LAST', - unit: 'd', - interval: 7 - }, - group: 0 - }, - { - label: 'filter.timeRange.30d', - value: { - type: 'LAST', - unit: 'd', - interval: 30 - }, - group: 0 - }, - { - label: 'filter.timeRange.60d', - value: { - type: 'LAST', - unit: 'd', - interval: 60 - }, - group: 0 - }, - { - label: 'filter.timeRange.90d', - value: { - type: 'LAST', - unit: 'd', - interval: 90 - }, - group: 0 - }, - { - label: 'filter.timeRange.6m', - value: { - type: 'LAST', - unit: 'M', - interval: 6 - }, - group: 0 - }, - { - label: 'filter.timeRange.1y', - value: { - type: 'LAST', - unit: 'y', - interval: 1 - }, - group: 0 - }, - { - label: 'filter.timeRange.2y', - value: { - type: 'LAST', - unit: 'y', - interval: 2 - }, - group: 0 - }, - { - label: 'filter.timeRange.5y', - value: { - type: 'LAST', - unit: 'y', - interval: 5 - }, - group: 0 - }, - { - label: 'filter.timeRange.yesterday', - value: { - type: 'PAST', - unit: 'd' - }, - group: 1 - }, - { - label: 'filter.timeRange.previousWeek', - value: { - type: 'PAST', - unit: 'w' - }, - group: 1 - }, - { - label: 'filter.timeRange.previousMonth', - value: { - type: 'PAST', - unit: 'M' - }, - group: 1 - }, - { - label: 'filter.timeRange.previousYear', - value: { - type: 'PAST', - unit: 'y' - }, - group: 1 - }, - { - label: 'filter.timeRange.today', - value: { - type: 'CURRENT', - unit: 'd' - }, - group: 1 - }, - { - label: 'filter.timeRange.thisWeek', - value: { - type: 'CURRENT', - unit: 'w' - }, - group: 1 - }, - { - label: 'filter.timeRange.thisMonth', - value: { - type: 'CURRENT', - unit: 'M' - }, - group: 1 - }, - { - label: 'filter.timeRange.thisYear', - value: { - type: 'CURRENT', - unit: 'y' - }, - group: 1 - }, - { - label: 'filter.timeRange.5min', - value: { - type: 'LAST', - unit: 'm', - interval: 5 - }, - group: 2 - }, - { - label: 'filter.timeRange.15min', - value: { - type: 'LAST', - unit: 'm', - interval: 15 - }, - group: 2 - }, - { - label: 'filter.timeRange.30min', - value: { - type: 'LAST', - unit: 'm', - interval: 30 - }, - group: 2 - }, - { - label: 'filter.timeRange.1hr', - value: { - type: 'LAST', - unit: 'h', - interval: 1 - }, - group: 2 - }, - { - label: 'filter.timeRange.3hr', - value: { - type: 'LAST', - unit: 'h', - interval: 3 - }, - group: 2 - }, - { - label: 'filter.timeRange.6hr', - value: { - type: 'LAST', - unit: 'h', - interval: 6 - }, - group: 2 - }, - { - label: 'filter.timeRange.12hr', - value: { - type: 'LAST', - unit: 'h', - interval: 12 - }, - group: 2 - }, - { - label: 'filter.timeRange.24hr', - value: { - type: 'LAST', - unit: 'h', - interval: 24 - }, - group: 2 - }]; - -@Injectable() -export class LogsFilteringUtilsService { - - readonly defaultFilterSelections = { - clusters: [], - timeRange: { - value: { - type: 'LAST', - unit: 'h', - interval: 1 - }, - label: 'filter.timeRange.1hr' - }, - components: [], - levels: [], - hosts: [], - auditLogsSorting: { - label: 'sorting.time.desc', - value: { - key: 'evtTime', - type: 'desc' - } - }, - serviceLogsSorting: { - label: 'sorting.time.desc', - value: { - key: 'logtime', - type: 'desc' - } - }, - pageSize: [{ - label: '100', - value: '100' - }], - page: 0, - query: [], - users: [], - isUndoOrRedo: false - }; - - constructor( - private utilsService: UtilsService - ) { } - - getTimeRandeOptionsByGroup() { - return timeRangeFilterOptions.reduce((groups: any, item: any) => { - const groupItem = {...item}; - delete groupItem.group; - groups[item.group] = groups[item.group] || []; - groups[item.group].push(groupItem); - return groups; - }, []); - } - - getStartTimeMomentFromTimeUnitListItem(selection: TimeUnitListItem, end: moment.Moment, timeZone: string): moment.Moment | undefined { - let time; - const value = selection && selection.value; - if (value) { - const endTime = end.clone(); - switch (value.type) { - case 'LAST': - time = endTime.subtract(value.interval, value.unit); - break; - case 'CURRENT': - time = moment().tz(timeZone).startOf(value.unit); - break; - case 'PAST': - time = endTime.startOf(value.unit); - break; - case 'CUSTOM': - time = value.start; - break; - default: - break; - } - } - return time; - } - - getStartTimeFromTimeUnitListItem(selection: TimeUnitListItem, current: string, timeZone: string): string { - const startMoment = this.getStartTimeMomentFromTimeUnitListItem(selection, moment(moment(current).valueOf()), timeZone); - return startMoment ? startMoment.toISOString() : ''; - } - - getEndTimeMomentFromTimeUnitListItem(selection: TimeUnitListItem, timeZone: string): moment.Moment | undefined { - let time; - const value = selection && selection.value; - if (value) { - switch (value.type) { - case 'LAST': - time = moment(); - break; - case 'CURRENT': - time = moment().tz(timeZone).endOf(value.unit); - break; - case 'PAST': - time = moment().tz(timeZone).startOf(value.unit).millisecond(-1); - break; - case 'CUSTOM': - time = value.end; - break; - default: - break; - } - } - return time; - } - - getEndTimeFromTimeUnitListItem(selection: TimeUnitListItem, timeZone: string): string { - const endMoment = this.getEndTimeMomentFromTimeUnitListItem(selection, timeZone); - return endMoment ? endMoment.toISOString() : ''; - } - - getQuery(isExclude: boolean): (value: SearchBoxParameter[]) => string { - return (value: SearchBoxParameter[]): string => { - let parameters; - if (value && value.length) { - parameters = value.filter((item: SearchBoxParameter): boolean => { - return item.isExclude === isExclude; - }).map((parameter: SearchBoxParameter): HomogeneousObject => { - return { - [parameter.name]: parameter.value.replace(/\s/g, '+') - }; - }); - } - return parameters && parameters.length ? JSON.stringify(parameters) : ''; - }; - } - - getIncludeQuery(value: SearchBoxParameter[]) { - return this.getQuery(false)(value); - } - - getExcludeQuery(value: SearchBoxParameter[]) { - return this.getQuery(true)(value); - } - - getSortTypeFromSortingListItem(selection: SortingListItem[] = []): SortingType { - return selection && selection[0] && selection[0].value ? selection[0].value.type : 'desc'; - } - - getSortKeyFromSortingListItem(selection: SortingListItem[] = []): string { - return selection && selection[0] && selection[0].value ? selection[0].value.key : ''; - } - - getPage(value: number | undefined): string | undefined { - return typeof value === 'undefined' ? value : value.toString(); - } - - defaultValueGetterFromListItem(selection: ListItem | ListItem[] | null): string { - if (Array.isArray(selection)) { - return selection.map((item: ListItem): any => item.value).join(','); - } else if (selection) { - return selection.value; - } else { - return ''; - } - } - - getParamsFromActiveFilter(activeFilter: any, activeLogsType: LogsType): {[key: string]: string} { - const {...filters} = activeFilter; - delete filters.isUndoOrRedo; - return Object.keys(filters).reduce((currentParams, key) => { - const newParams = { - ...currentParams - }; - if (filters[key] !== null && filters[key] !== undefined) { - switch (key) { - case 'auditLogsSorting': - case 'serviceLogsSorting': - if (`${activeLogsType}Sorting` === key) { - const item = Array.isArray(filters[key]) ? filters[key][0] : filters[key]; - const itemValue = item && item.value; - if (itemValue) { - Object.assign(newParams, { - sortingKey: itemValue.key, - sortingType: itemValue.type, - }); - } - } - break; - case 'query' : - if (filters[key] && Object.keys(filters[key]).length) { - Object.assign(newParams, { - [key]: JSON.stringify(filters[key]) - }); - } - break; - case 'timeRange' : - if (filters[key].value) { - const timeRangeValue: TimeUnit | CustomTimeRange = filters[key].value; - const timeRangeParams: {[key: string]: string} = { - timeRangeType: timeRangeValue.type - }; - if (timeRangeValue.type === 'CUSTOM') { - Object.assign(timeRangeParams, { - timeRangeStart: timeRangeValue.start.toISOString(), - timeRangeEnd: timeRangeValue.end.toISOString() - }); - } else { - Object.assign(timeRangeParams, { - timeRangeUnit: timeRangeValue.unit - }); - if (timeRangeValue.interval !== undefined) { - Object.assign(timeRangeParams, { - timeRangeInterval: timeRangeValue.interval - }); - } - } - Object.assign(newParams, timeRangeParams); - } - break; - default: - const customMethodName: string = 'get' + (key.charAt(0).toUpperCase()) + key.slice(1); - const valueGetter: Function = ( - this[customMethodName] || this.defaultValueGetterFromListItem - ); - const paramValue = valueGetter(filters[key]); - if (paramValue !== null && paramValue !== undefined && paramValue !== '') { - Object.assign(newParams, { - [key]: paramValue - }); - } - break; - } - } - return newParams; - }, {}); - } - - private getListItemsFromListParamValue = (value: string): ListItem[] => { - return value ? value.split(',').map(this.getListItemFromParamValue) : []; - } - - private getListItemFromParamValue = (value: string): ListItem => { - return Object.assign(this.utilsService.getListItemFromString(value), { - isChecked: true - }); - } - - getFilterFromParams(params: {[key: string]: string}, activeLogsType: LogsType): {[key: string]: any} { - const filter: {[key: string]: any} = {}; - const paramsKeys: string[] = Object.keys(params); - return paramsKeys.reduce((currentFilter, key) => { - let newFilter = {}; - switch (key) { - case 'clusters': - case 'components': - case 'hosts': - case 'levels': - case 'pageSize': - case 'users': - newFilter = { - [key]: this.getListItemsFromListParamValue(params[key]) - }; - break; - case 'page' : - newFilter = { - [key]: parseInt(params[key], 0) - }; - break; - case 'timeRangeType': - const type = params.timeRangeType || 'LAST'; - const interval = params.timeRangeInterval && parseInt(params.timeRangeInterval, 0); - const unit = params.timeRangeUnit; - const timeRangeFilterValue: {[key: string]: any} = {type, unit, interval}; - let timeRangeFilterLabel = 'filter.timeRange.'; - const timeRangeOption = timeRangeFilterOptions.find((option: any) => { - const value = option.value; - return value.type === type && value.unit === timeRangeFilterValue.unit && value.interval === timeRangeFilterValue.interval; - }); - if (timeRangeOption) { - timeRangeFilterLabel = timeRangeOption.label; - } else if (params.timeRangeType !== 'CUSTOM') { - Object.assign(timeRangeFilterValue, { - unit: params.timeRangeUnit, - interval: parseInt(params.timeRangeInterval, 0) - }); - timeRangeFilterLabel += `${timeRangeFilterValue.interval}${timeRangeFilterValue.unit}`; - } else { - Object.assign(timeRangeFilterValue, { - start: moment(params.timeRangeStart), - end: moment(params.timeRangeEnd) - }); - timeRangeFilterLabel += 'custom'; - } - newFilter = { - timeRange: { - label: timeRangeFilterLabel, - value: timeRangeFilterValue - } - }; - break; - case 'sortingKey' : - const sortingKey = `${activeLogsType}Sorting`; - newFilter = { - [sortingKey]: { - label: `sorting.time.${params.sortingType}`, - value: { - key: params.sortingKey, - type: params.sortingType - } - } - }; - break; - case 'query' : - newFilter = { - query: JSON.parse(params[key]) - }; - break; - } - return {...currentFilter, ...newFilter}; - }, filter); - } - - getNavigationForTab(tab: LogTypeTab): any[] { - const logsType = tab.appState && tab.appState.activeLogsType; - return [tab.id, this.getParamsFromActiveFilter(tab.activeFilters || {}, logsType)]; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.spec.ts deleted file mode 100644 index 5f6a2e139c4..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.spec.ts +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {MockApiDataService} from './mock-api-data.service'; - -describe('MockApiDataService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [MockApiDataService] - }); - }); - - it('should create service', inject([MockApiDataService], (service: MockApiDataService) => { - expect(service).toBeTruthy(); - })); - - describe('#parseUrl()', () => { - const cases = [ - { - url: 'root', - base: '/', - collectionName: 'root', - query: '', - title: 'one-level depth url, no query params' - }, - { - url: 'root?param0=1¶m1=2', - base: '/', - collectionName: 'root', - query: 'param0=1¶m1=2', - title: 'one-level depth url with query params' - }, - { - url: 'root/resources/collection', - base: 'root/resources/', - collectionName: 'collection', - query: '', - title: 'more than one-level depth url, no query params' - }, - { - url: 'root/resources/collection?param0=1¶m1=2', - base: 'root/resources/', - collectionName: 'collection', - query: 'param0=1¶m1=2', - title: 'more than one-level depth url with query params' - } - ]; - - cases.forEach(test => { - describe(test.title, () => { - it('base', inject([MockApiDataService], (service: MockApiDataService) => { - expect(service.parseUrl(test.url).base).toEqual(test.base); - })); - - it('collectionName', inject([MockApiDataService], (service: MockApiDataService) => { - expect(service.parseUrl(test.url).collectionName).toEqual(test.collectionName); - })); - - it('query', inject([MockApiDataService], (service: MockApiDataService) => { - expect(service.parseUrl(test.url).query.toString()).toEqual(test.query); - })); - }); - }); - }); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.ts deleted file mode 100644 index 92707d07de3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/mock-api-data.service.ts +++ /dev/null @@ -1,285 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {URLSearchParams, Response, ResponseOptions} from '@angular/http'; -import {InMemoryDbService, InMemoryBackendService, createErrorResponse} from 'angular-in-memory-web-api'; -import {Observable} from 'rxjs/Observable'; -import {Subscriber} from 'rxjs/Subscriber'; -import 'rxjs/add/operator/delay'; -import * as moment from 'moment'; -import {mockDataGet} from '@mockdata/mock-data-get'; -import {mockDataPost} from '@mockdata/mock-data-post'; - -export class MockBackendService extends InMemoryBackendService { - getLocation(url: string): any { - return super.getLocation(url); - } -} - -export class MockApiDataService implements InMemoryDbService { - - private filterByMessage = (value: string, filterValue: string): boolean => { - return value.toLowerCase().indexOf(filterValue.toLowerCase()) > -1; - } - - private filterByStartTime = (value: number, filterValue: number | string | Date | moment.Moment): boolean => { - return value >= moment(filterValue).valueOf(); - } - - private filterByEndTime = (value: number, filterValue: number | string | Date | moment.Moment): boolean => { - return value <= moment(filterValue).valueOf(); - } - - private readonly filterMap = { - 'api/v1/service/logs': { - pathToCollection: 'logList', - totalCountKey: 'totalCount', - filters: { - clusters: { - key: 'cluster', - isValuesList: true - }, - mustBe: { - key: 'type', - isValuesList: true - }, - level: { - key: 'level', - isValuesList: true - }, - includeMessage: { - key: 'log_message', - filterFunction: this.filterByMessage - }, - from: { - key: 'logtime', - filterFunction: this.filterByStartTime - }, - to: { - key: 'logtime', - filterFunction: this.filterByEndTime - }, - hostList: { - key: 'host', - isValuesList: true - } - } - }, - 'api/v1/audit/logs': { - pathToCollection: 'logList', - totalCountKey: 'totalCount', - filters: { - clusters: { - key: 'cluster', - isValuesList: true - }, - includeMessage: { - key: 'log_message', - filterFunction: this.filterByMessage - }, - from: { - key: 'evtTime', - filterFunction: this.filterByStartTime - }, - to: { - key: 'evtTime', - filterFunction: this.filterByEndTime - }, - userList: { - key: 'reqUser', - isValuesList: true - } - } - } - }; - - parseUrl(url: string): any { - const urlLocation = MockBackendService.prototype.getLocation(url), - query = urlLocation.search && new URLSearchParams(urlLocation.search.substr(1), { - encodeKey: key => key, - encodeValue: value => value - }), - splitUrl = urlLocation.pathname.substr(1).split('/'), - urlPartsCount = splitUrl.length, - collectionName = splitUrl[urlPartsCount - 1], - base = splitUrl.slice(0, urlPartsCount - 1).join('/') + '/'; - return { - base: base, - collectionName: collectionName, - query: query - }; - } - - private findDataByUrlPatter(path: string, mockDataObj: {[key: string]: any}): {[key: string]: any} | undefined | Function { - const paths: string[] = Object.keys(mockDataObj); - const matchedPath: string = paths.find((key: string): boolean => { - const test: RegExp = new RegExp(key); - return test.test(path); - }); - return mockDataObj[matchedPath]; - } - - /** - * The goal here is to check if the given real api url should be always POST or not.\ - * See https://issues.apache.org/jira/browse/AMBARI-23779 - * @param {string} url The full url for the api end point. - * @returns {boolean} - */ - private shouldTurnGetToPost(url: string): boolean { - return /(audit|service)/.test(url); - } - - get(interceptorArgs: any): Observable { - const query = interceptorArgs.requestInfo.query; - const path = interceptorArgs.requestInfo.base + interceptorArgs.requestInfo.collectionName; - if (query && query.paramsMap.has('static') && interceptorArgs.passThruBackend) { - return interceptorArgs.passThruBackend.createConnection(interceptorArgs.requestInfo.req).response; - } else { - let allData = mockDataGet[path]; - if (!allData) { - allData = this.findDataByUrlPatter(path, mockDataGet); - } - if (typeof allData === 'function') { - try { - allData = allData(query, interceptorArgs.requestInfo.req); - } catch (error) { - return new Observable((subscriber: Subscriber) => subscriber.error( - new Response(createErrorResponse( - interceptorArgs.requestInfo.req, 500, error - ))) - ); - } - } - const is404 = !allData; - - if (is404) { - return new Observable((subscriber: Subscriber) => subscriber.error( - new Response(createErrorResponse( - interceptorArgs.requestInfo.req, 404, 'Not found' - ))) - ); - } else { - let filteredData; - const filterMapItem = this.filterMap[path]; - if (query && filterMapItem) { - filteredData = {}; - const pathToCollection = filterMapItem.pathToCollection, - collection = allData[pathToCollection]; - let filteredCollection = collection.filter(item => { - let result = true; - query.paramsMap.forEach((value, key) => { - const paramValue = decodeURIComponent(value[0]), - paramFilter = filterMapItem.filters[key], - paramValuesList = paramFilter && paramFilter.isValuesList && paramValue ? paramValue.split(',') : [], - currentValue = paramFilter && item[paramFilter.key]; - if ( - paramFilter && ((paramFilter.filterFunction && !paramFilter.filterFunction(currentValue, paramValue)) || - (!paramFilter.filterFunction && !paramFilter.isValuesList && currentValue !== paramValue) || - (!paramFilter.filterFunction && paramFilter.isValuesList && paramValuesList.indexOf(currentValue) === -1)) - ) { - result = false; - } - }); - return result; - }); - if (query.paramsMap.has('sortBy') && query.paramsMap.has('sortType')) { - const sortKey = query.paramsMap.get('sortBy')[0], - sortType = query.paramsMap.get('sortType')[0]; - filteredCollection.sort((a, b) => { - const itemA = a[sortKey], - itemB = b[sortKey]; - let ascResult; - if (itemA > itemB) { - ascResult = 1; - } else if (itemA < itemB) { - ascResult = -1; - } else { - ascResult = 0; - } - return ascResult * Math.pow(-1, Number(sortType === 'desc')); - }); - } - if (filterMapItem.totalCountKey) { - filteredData[filterMapItem.totalCountKey] = filteredCollection.length; - } - if (query && query.paramsMap.has('page') && query.paramsMap.has('pageSize')) { - const page = parseInt(query.paramsMap.get('page')[0], 0), - pageSize = parseInt(query.paramsMap.get('pageSize')[0], 0); - filteredCollection = filteredCollection.slice(page * pageSize, (page + 1) * pageSize); - } - filteredData[pathToCollection] = filteredCollection; - } else { - filteredData = allData; - } - return new Observable((subscriber: Subscriber) => subscriber.next( - new Response(new ResponseOptions({ - status: 200, - body: filteredData - }))) - ); - } - } - } - - post(interceptorArgs: any): Observable { - const query = interceptorArgs.requestInfo.query; - const path = interceptorArgs.requestInfo.base + interceptorArgs.requestInfo.collectionName; - if (query && query.paramsMap.has('static') && interceptorArgs.passThruBackend) { - return interceptorArgs.passThruBackend.createConnection(interceptorArgs.requestInfo.req).response; - } - let responseBody = mockDataPost[path]; - if (!responseBody) { - responseBody = this.findDataByUrlPatter(path, mockDataPost); - } - if (typeof responseBody === 'function') { - try { - responseBody = responseBody(query, interceptorArgs.requestInfo.req); - } catch (error) { - return new Observable((subscriber: Subscriber) => subscriber.error( - new Response(createErrorResponse( - interceptorArgs.requestInfo.req, 500, error - ))) - ); - } - } - const is404 = !responseBody; - - if (is404) { - return new Observable((subscriber: Subscriber) => subscriber.error( - new Response(createErrorResponse( - interceptorArgs.requestInfo.req, 404, 'Not found' - ))) - ); - } else { - return new Observable((subscriber: Subscriber) => subscriber.next( - new Response(new ResponseOptions({ - status: 200, - body: responseBody - }))) - ); - } - } - - put(interceptorArgs: any): Observable { - return this.post(interceptorArgs); - } - - createDb() { - return {}; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.spec.ts deleted file mode 100644 index ced86f7dac6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { RoutingUtilsService } from './routing-utils.service'; - -describe('RoutingUtilsService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [RoutingUtilsService] - }); - }); - - it('should be created', inject([RoutingUtilsService], (service: RoutingUtilsService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.ts deleted file mode 100644 index 21a5b776b49..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/routing-utils.service.ts +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { Injectable } from '@angular/core'; -import {ActivatedRouteSnapshot} from '@angular/router'; - -@Injectable() -export class RoutingUtilsService { - - constructor() { } - - getParamFromActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot, key: string) { - const params = routeSnapshot.params; - return params && params[key] !== undefined ? routeSnapshot.params[key] : ( - routeSnapshot.firstChild && this.getParamFromActivatedRouteSnapshot(routeSnapshot.firstChild, key) - ); - } - - getDataFromActivatedRouteSnapshot(routeSnapshot: ActivatedRouteSnapshot, key: string) { - const data = routeSnapshot && routeSnapshot.data; - return data && data[key] !== undefined ? routeSnapshot.data[key] : ( - routeSnapshot.firstChild && this.getDataFromActivatedRouteSnapshot(routeSnapshot.firstChild, key) - ); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-settings.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-settings.service.ts deleted file mode 100644 index cec2656a2fa..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-settings.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {defaultSettings} from '@app/classes/models/app-settings'; -import {AppStore, ObjectModelService, getObjectReducer} from '@app/classes/models/store'; - -export const modelName = 'appSettings'; - -@Injectable() -export class AppSettingsService extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const appSettings = getObjectReducer(modelName, defaultSettings); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-state.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-state.service.ts deleted file mode 100644 index df773fc11c2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/app-state.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {initialState} from '@app/classes/models/app-state'; -import {AppStore, ObjectModelService, getObjectReducer} from '@app/classes/models/store'; - -export const modelName = 'appState'; - -@Injectable() -export class AppStateService extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const appState = getObjectReducer(modelName, initialState); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-fields.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-fields.service.ts deleted file mode 100644 index b061a53f77a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-fields.service.ts +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, ObjectModelService, getObjectReducer} from '@app/classes/models/store'; -import {LogField} from "@app/classes/object"; -import {Observable} from "rxjs/Observable"; - -export const modelName = 'auditLogsFields'; - -export const enum ResponseRootProperties { - DEFAULTS = 'defaults', - OVERRIDES = 'overrides' -}; - -@Injectable() -export class AuditLogsFieldsService extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } - - /** - * The goal is to return with the proper fieldset for a given group/service and to return with the default fieldset - * when the group has no overrides. - * @param {string} group The name of the group/service - * @returns {Observable} - */ - getFieldSetForGroup(group: string): Observable { - return Observable.combineLatest(this.getParameter(ResponseRootProperties.DEFAULTS), this.getParameter(ResponseRootProperties.OVERRIDES)) - .map(([defaults, overrides]): LogField[] => { - return overrides[group] || defaults; - }); - } - - /** - * The goal is to update the given fieldset group with the given modifier function. It will map over the selected - * group. Right now we let to change the defaults fieldset. - * @param {Function} modifier Called by the map method. - * @param {string} group The service/group name owner of the fieldset - */ - mapFieldSetGroup(modifier: Function, group: string) { - Observable.combineLatest( - this.getParameter(ResponseRootProperties.DEFAULTS), - this.getParameter(ResponseRootProperties.OVERRIDES) - ).first().subscribe(([defaults, overrides]) => { - const fieldset = (overrides[group] || defaults).map(modifier); - const payload = group === ResponseRootProperties.DEFAULTS ? fieldset : Object.assign({}, overrides, { - [group]: fieldset - }); - this.setParameter( - group === ResponseRootProperties.DEFAULTS ? ResponseRootProperties.DEFAULTS : ResponseRootProperties.OVERRIDES, - payload - ); - }); - } - -} - -export const auditLogsFields = getObjectReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-graph-data.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-graph-data.service.ts deleted file mode 100644 index eeb2780dfbf..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs-graph-data.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'auditLogsGraphData'; - -@Injectable() -export class AuditLogsGraphDataService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const auditLogsGraphData = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs.service.ts deleted file mode 100644 index a467fc9f1ca..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/audit-logs.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'auditLogs'; - -@Injectable() -export class AuditLogsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const auditLogs = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/cluster-selection.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/cluster-selection.service.ts deleted file mode 100644 index fb48e072cc7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/cluster-selection.service.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import { - AppStore, getObjectReducer, - ObjectModelService -} from '@app/classes/models/store'; - -export const modelName = 'clusterSelections'; - -@Injectable() -export class ClusterSelectionService extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const clusterSelections = getObjectReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/clusters.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/clusters.service.ts deleted file mode 100644 index 35a07bec205..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/clusters.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'clusters'; - -@Injectable() -export class ClustersService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const clusters = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/components.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/components.service.ts deleted file mode 100644 index 1432f6afcbc..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/components.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'components'; - -@Injectable() -export class ComponentsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const components = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/graphs.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/graphs.service.ts deleted file mode 100644 index 8a5bb2b9b57..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/graphs.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'graphs'; - -@Injectable() -export class GraphsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const graphs = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/hosts.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/hosts.service.ts deleted file mode 100644 index acf7dda4113..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/hosts.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'hosts'; - -@Injectable() -export class HostsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const hosts = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/logs-state.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/logs-state.service.ts deleted file mode 100644 index f4dded87103..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/logs-state.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {defaultState} from '@app/classes/models/logs-state'; -import {AppStore, ObjectModelService, getObjectReducer} from '@app/classes/models/store'; - -export const modelName = 'logsState'; - -@Injectable() -export class LogsStateService extends ObjectModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const logsState = getObjectReducer(modelName, defaultState); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts deleted file mode 100644 index cd67461553f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {combineReducers} from '@ngrx/store'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import {appState} from '@app/services/storage/app-state.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {clusters} from '@app/services/storage/clusters.service'; -import {components} from '@app/services/storage/components.service'; -import {graphs} from '@app/services/storage/graphs.service'; -import {hosts} from '@app/services/storage/hosts.service'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {userConfigs} from '@app/services/storage/user-configs.service'; -import {tabs} from '@app/services/storage/tabs.service'; -import {clusterSelections} from '@app/services/storage/cluster-selection.service'; -import {logsState} from '@app/services/storage/logs-state.service'; -import {dataAvailabilityStates} from '@app/modules/app-load/stores/data-availability-state.store'; - -export const reducers = { - appSettings, - appState, - auditLogs, - auditLogsGraphData, - serviceLogs, - serviceLogsHistogramData, - serviceLogsTruncated, - graphs, - hosts, - userConfigs, - clusters, - components, - serviceLogsFields, - auditLogsFields, - tabs, - clusterSelections, - logsState, - dataAvailabilityStates -}; - -export function reducer(state: any, action: any) { - return (combineReducers(reducers))(state, action); -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-fields.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-fields.service.ts deleted file mode 100644 index 1440d8dfaf6..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-fields.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'serviceLogsFields'; - -@Injectable() -export class ServiceLogsFieldsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const serviceLogsFields = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-histogram-data.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-histogram-data.service.ts deleted file mode 100644 index 91ee94afb03..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-histogram-data.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'serviceLogsHistogramData'; - -@Injectable() -export class ServiceLogsHistogramDataService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const serviceLogsHistogramData = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts deleted file mode 100644 index 53b73bacaeb..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'serviceLogsTruncated'; - -@Injectable() -export class ServiceLogsTruncatedService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const serviceLogsTruncated = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs.service.ts deleted file mode 100644 index 0f4fa357dec..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'serviceLogs'; - -@Injectable() -export class ServiceLogsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const serviceLogs = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/tabs.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/tabs.service.ts deleted file mode 100644 index f3be0b74be0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/tabs.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {initialTabs} from '@app/classes/models/log-type-tab'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'tabs'; - -@Injectable() -export class TabsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const tabs = getCollectionReducer(modelName, initialTabs); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/user-configs.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/user-configs.service.ts deleted file mode 100644 index 3b6bb158912..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/user-configs.service.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -import {Injectable} from '@angular/core'; -import {Store} from '@ngrx/store'; -import {AppStore, CollectionModelService, getCollectionReducer} from '@app/classes/models/store'; - -export const modelName = 'userConfigs'; - -@Injectable() -export class UserConfigsService extends CollectionModelService { - constructor(store: Store) { - super(modelName, store); - } -} - -export const userConfigs = getCollectionReducer(modelName); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.spec.ts deleted file mode 100644 index 18b8a5897f1..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.spec.ts +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { TabGuard } from './tab.guard'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {tabs, TabsService} from '@app/services/storage/tabs.service'; -import {StoreModule} from '@ngrx/store'; -import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {serviceLogs} from '@app/services/storage/service-logs.service'; -import {hosts} from '@app/services/storage/hosts.service'; -import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {TranslationModules} from '@app/test-config.spec'; -import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service'; -import {clusters} from '@app/services/storage/clusters.service'; -import {auditLogs} from '@app/services/storage/audit-logs.service'; -import {appState} from '@app/services/storage/app-state.service'; -import {components} from '@app/services/storage/components.service'; -import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import {appSettings} from '@app/services/storage/app-settings.service'; -import { UtilsService } from '@app/services/utils.service'; - -describe('TabGuard', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs - }), - ...TranslationModules - ], - providers: [ - TabGuard, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - TabsService, - UtilsService - ] - }); - }); - - it('should ...', inject([TabGuard], (guard: TabGuard) => { - expect(guard).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.ts deleted file mode 100644 index 6b4801d0270..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/tab.guard.ts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import {Injectable} from '@angular/core'; -import {CanActivate, ActivatedRouteSnapshot, RouterStateSnapshot, Router} from '@angular/router'; -import {Observable} from 'rxjs/Observable'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {TabsService} from '@app/services/storage/tabs.service'; -import {LogTypeTab} from '@app/classes/models/log-type-tab'; -import { LogsFilteringUtilsService } from '@app/services/logs-filtering-utils.service'; - -@Injectable() -export class TabGuard implements CanActivate { - - constructor ( - private routingUtilsService: RoutingUtilsService, - private router: Router, - private tabsStorageService: TabsService, - private logsFilteringUtilsService: LogsFilteringUtilsService - ) {} - - canActivate( - next: ActivatedRouteSnapshot, - state: RouterStateSnapshot): Observable | Promise | boolean { - const activeTabParam: string = this.routingUtilsService.getParamFromActivatedRouteSnapshot(state.root, 'activeTab'); - return this.tabsStorageService.getAll().switchMap((tabs: LogTypeTab[]) => { - if (!activeTabParam && tabs && tabs.length) { - const tab = tabs.find((currentTab: LogTypeTab) => currentTab.isActive); - if (tab) { - this.router.navigate(['/logs', ...this.logsFilteringUtilsService.getNavigationForTab(tab)]); - } - } - const canActivate: boolean = !!activeTabParam && !!tabs.find((tab: LogTypeTab) => tab.id === activeTabParam); - return Observable.of(canActivate); - }); - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.spec.ts deleted file mode 100644 index 1e999cfaace..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TestBed, inject } from '@angular/core/testing'; - -import { TranslateService } from './translate.service'; - -describe('TranslateService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [TranslateService] - }); - }); - - it('should be created', inject([TranslateService], (service: TranslateService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.ts deleted file mode 100644 index 77330b7a0cd..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/translate.service.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { Injectable } from '@angular/core'; -import {Http} from '@angular/http'; -import {TranslateHttpLoader} from '@ngx-translate/http-loader'; - -@Injectable() -export class TranslateService { - - static httpLoaderFactory(http: Http): TranslateHttpLoader { - // adding 'static' parameter to step over mock data request - return new TranslateHttpLoader(http, 'resources/assets/i18n/', '.json?static=true'); - } - - constructor() { } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.spec.ts deleted file mode 100644 index 8dce161f3a9..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.spec.ts +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; -import {MockHttpRequestModules, TranslationModules} from "@app/test-config.spec"; -import {StoreModule} from '@ngrx/store'; -import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service'; -import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service'; -import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service'; -import {AuditLogsGraphDataService, auditLogsGraphData} from '@app/services/storage/audit-logs-graph-data.service'; -import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service'; -import { - ServiceLogsHistogramDataService, serviceLogsHistogramData -} from '@app/services/storage/service-logs-histogram-data.service'; -import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service'; -import {AppStateService, appState} from '@app/services/storage/app-state.service'; -import {ClustersService, clusters} from '@app/services/storage/clusters.service'; -import {ComponentsService, components} from '@app/services/storage/components.service'; -import {HostsService, hosts} from '@app/services/storage/hosts.service'; -import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service'; -import {TabsService, tabs} from '@app/services/storage/tabs.service'; -import {LogsContainerService} from '@app/services/logs-container.service'; -import {UtilsService} from '@app/services/utils.service'; - -import {UserSettingsService} from './user-settings.service'; -import {ClusterSelectionService} from '@app/services/storage/cluster-selection.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {RoutingUtilsService} from '@app/services/routing-utils.service'; -import {LogsFilteringUtilsService} from '@app/services/logs-filtering-utils.service'; -import {LogsStateService} from '@app/services/storage/logs-state.service'; -import {NotificationsService} from 'angular2-notifications/src/notifications.service'; -import {NotificationService} from '@modules/shared/services/notification.service'; - -import { dataAvailabilityStates, DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; - -describe('UserSettingsService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - imports: [ - RouterTestingModule, - StoreModule.provideStore({ - auditLogs, - serviceLogs, - auditLogsFields, - auditLogsGraphData, - serviceLogsFields, - serviceLogsHistogramData, - appSettings, - appState, - clusters, - components, - hosts, - serviceLogsTruncated, - tabs, - dataAvailabilityStates - }), - ...TranslationModules - ], - providers: [ - ...MockHttpRequestModules, - UserSettingsService, - LogsContainerService, - UtilsService, - AuditLogsService, - ServiceLogsService, - AuditLogsFieldsService, - AuditLogsGraphDataService, - ServiceLogsFieldsService, - ServiceLogsHistogramDataService, - AppSettingsService, - AppStateService, - ClustersService, - ComponentsService, - HostsService, - ServiceLogsTruncatedService, - TabsService, - ClusterSelectionService, - RoutingUtilsService, - LogsFilteringUtilsService, - LogsStateService, - NotificationsService, - NotificationService, - DataAvailabilityStatesStore - ] - }); - }); - - it('should be created', inject([UserSettingsService], (service: UserSettingsService) => { - expect(service).toBeTruthy(); - })); -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.ts deleted file mode 100644 index 2b4de0aa3cf..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/user-settings.service.ts +++ /dev/null @@ -1,197 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Injectable } from '@angular/core'; -import { FormGroup, FormControl } from '@angular/forms'; -import { Response } from '@angular/http'; -import { HomogeneousObject, LogLevelObject } from '@app/classes/object'; -import { LevelOverridesConfig, LogIndexFilterComponentConfig } from '@app/classes/settings'; -import { LogLevel } from '@app/classes/string'; -import { Filter } from '@app/classes/models/filter'; -import { LogsContainerService } from '@app/services/logs-container.service'; -import { HttpClientService } from '@app/services/http-client.service'; -import { UtilsService } from '@app/services/utils.service'; -import { AppSettingsService } from '@app/services/storage/app-settings.service'; -import { TranslateService } from '@ngx-translate/core'; -import { NotificationService } from '@modules/shared/services/notification.service'; -import { DataAvailabilityStatesStore } from '@app/modules/app-load/stores/data-availability-state.store'; -import { DataAvailabilityValues } from '@app/classes/string'; - -@Injectable() -export class UserSettingsService { - - settingsFormGroup: FormGroup = new FormGroup({ - logIndexFilter: new FormControl() - }); - - currentValues = { - logIndexFilter: {} - }; - - readonly levelNames = this.logsContainer.logLevels.map((level: LogLevelObject): LogLevel => level.name); - - constructor( - private logsContainer: LogsContainerService, - private httpClient: HttpClientService, - private utils: UtilsService, - private settingsStorage: AppSettingsService, - private translateService: TranslateService, - private notificationService: NotificationService, - private dataAvailablilityStore: DataAvailabilityStatesStore - ) { - this.dataAvailablilityStore.setParameter('logIndexFilter', DataAvailabilityValues.NOT_AVAILABLE); - settingsStorage.getParameter('logIndexFilters').subscribe((filters: HomogeneousObject>): void => { - const configs = this.parseLogIndexFilterObjects(filters); - this.settingsFormGroup.controls.logIndexFilter.setValue(configs); - }); - } - - loadIndexFilterConfig(clusterNames: string[]): void { - let processedRequests = 0; - const allFilters: HomogeneousObject = {}; - const totalCount = clusterNames.length; - this.dataAvailablilityStore.setParameter('logIndexFilter', DataAvailabilityValues.LOADING); - clusterNames.forEach((clusterName: string): void => { - this.httpClient.get('logIndexFilters', null, { - clusterName - }).subscribe((response: Response): void => { - const filters = response.json() && response.json().filter; - if (filters) { - Object.assign(allFilters, { - [clusterName]: filters - }); - if (++processedRequests === totalCount) { - this.settingsStorage.setParameter('logIndexFilters', allFilters); - this.dataAvailablilityStore.setParameter('logIndexFilter', DataAvailabilityValues.AVAILABLE); - this.currentValues.logIndexFilter = allFilters; - } - } - }); - }); - } - - handleLogIndexFilterUpdate = (response: Response, cluster?: string): void => { - const title: string = this.translateService.instant('logIndexFilter.update.title'); - const resultStr: string = response instanceof Response && response.ok ? 'success' : 'failed'; - const data: {[key: string]: any} = response instanceof Response && response.text() ? response.json() : {}; - const message: string = this.translateService.instant(`logIndexFilter.update.${resultStr}`, { - message: '', - cluster: cluster || '', - ...data - }); - this.notificationService.addNotification({ - type: resultStr, - title, - message - }); - } - - saveIndexFilterConfig(): void { - const savedValue = this.currentValues.logIndexFilter; - const newValue = this.settingsFormGroup.controls.logIndexFilter.value; - const clusters = Object.keys(newValue); - const storedValue = {}; - const addResponseHandler = (cluster: string) => { - return (response: Response) => { - this.handleLogIndexFilterUpdate(response, cluster); - }; - }; - clusters.forEach((clusterName: string): void => { - const savedConfig = savedValue[clusterName], - newConfig = this.getLogIndexFilterObject(newValue[clusterName]); - Object.assign(storedValue, { - [clusterName]: newConfig - }); - if (!this.utils.isEqual(savedConfig, newConfig)) { - this.httpClient.put('logIndexFilters', { - filter: newConfig - }, null, { - clusterName - }).subscribe(addResponseHandler(clusterName), addResponseHandler(clusterName)); - } - }); - this.settingsStorage.setParameter('logIndexFilters', storedValue); - } - - /** - * Convert log index filter data for usage in component - * @param {HomogeneousObject>} filters - * @returns {HomogeneousObject} - */ - parseLogIndexFilterObjects( - filters: HomogeneousObject> - ): HomogeneousObject { - const levels = this.levelNames; - return filters ? Object.keys(filters).reduce(( - clustersCurrent: HomogeneousObject, clusterName: string - ): HomogeneousObject => { - const clusterConfigs = filters[clusterName], - clusterParsedObject = Object.keys(clusterConfigs).map((componentName: string) => { - const componentConfigs = clusterConfigs[componentName], - levelProperties = levels.reduce(( - levelsCurrent: HomogeneousObject, levelName: LogLevel - ): LevelOverridesConfig => { - return Object.assign({}, levelsCurrent, { - [levelName]: { - defaults: componentConfigs.defaultLevels.indexOf(levelName) > -1, - overrides: componentConfigs.overrideLevels.indexOf(levelName) > -1 - } - }); - }, {}); - return Object.assign({ - name: componentName, - label: componentConfigs.label, - hasOverrides: false, - hosts: componentConfigs.hosts.join(), - expiryTime: componentConfigs.expiryTime - }, levelProperties); - }); - return Object.assign({}, clustersCurrent, { - [clusterName]: clusterParsedObject - }); - }, {}) : {}; - } - - /** - * Convert data from log index filter component to format for PUT API call - * @param configs - * @returns {HomogeneousObject} - */ - private getLogIndexFilterObject(configs): HomogeneousObject { - const levelNames = this.levelNames; - return configs.reduce(( - currentObject: HomogeneousObject, componentConfig: LogIndexFilterComponentConfig - ): HomogeneousObject => { - const hosts = componentConfig.hosts; - return Object.assign({}, currentObject, { - [componentConfig.name]: { - defaultLevels: levelNames.filter((levelName: LogLevel): boolean => componentConfig[levelName].defaults), - expiryTime: componentConfig.expiryTime, - hosts: hosts ? hosts.split(',') : [], - label: componentConfig.label, - overrideLevels: levelNames.filter((levelName: LogLevel): boolean => componentConfig[levelName].overrides) - } - }); - }, {}); - } - - setTimeZone(timeZone: string): void { - this.settingsStorage.setParameter('timeZone', timeZone); - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.spec.ts deleted file mode 100644 index 33b6b7c188a..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.spec.ts +++ /dev/null @@ -1,569 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {TestBed, inject} from '@angular/core/testing'; - -import {UtilsService} from './utils.service'; - -describe('UtilsService', () => { - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [UtilsService] - }); - }); - - it('should create service', inject([UtilsService], (service: UtilsService) => { - expect(service).toBeTruthy(); - })); - - describe('#isEqual()', () => { - const cases = [ - { - valueA: 1, - valueB: 1, - result: true, - title: 'same numbers' - }, - { - valueA: 1, - valueB: 2, - result: false, - title: 'different numbers' - }, - { - valueA: 'a', - valueB: 'a', - result: true, - title: 'same strings' - }, - { - valueA: 'a', - valueB: 'b', - result: false, - title: 'different strings' - }, - { - valueA: '1', - valueB: 1, - result: false, - title: 'different types' - }, - { - valueA: true, - valueB: true, - result: true, - title: 'same booleans' - }, - { - valueA: false, - valueB: true, - result: false, - title: 'different booleans' - }, - { - valueA: {}, - valueB: {}, - result: true, - title: 'empty objects' - }, - { - valueA: { - p0: 'v0' - }, - valueB: { - p0: 'v0' - }, - result: true, - title: 'same objects' - }, - { - valueA: { - p0: 'v0' - }, - valueB: { - p0: 'v1' - }, - result: false, - title: 'different objects' - }, - { - valueA: { - p0: { - p1: 'v1' - } - }, - valueB: { - p0: { - p1: 'v1' - } - }, - result: true, - title: 'same objects in depth' - }, - { - valueA: { - p0: { - p1: 'v1' - } - }, - valueB: { - p0: { - p1: 'v2' - } - }, - result: false, - title: 'different objects in depth' - }, - { - valueA: [], - valueB: [], - result: true, - title: 'empty arrays' - }, - { - valueA: [1, 'a'], - valueB: [1, 'a'], - result: true, - title: 'same arrays' - }, - { - valueA: [1, 'a'], - valueB: [1, 'b'], - result: false, - title: 'different arrays' - }, - { - valueA: [1, 1], - valueB: [1, 1, 1], - result: false, - title: 'arrays of different length' - }, - { - valueA: [{}], - valueB: [{}], - result: true, - title: 'arrays of empty objects' - }, - { - valueA: [ - { - p0: 'v0' - } - ], - valueB: [ - { - p0: 'v0' - } - ], - result: true, - title: 'arrays of same objects' - }, - { - valueA: [ - { - p0: 'v0' - } - ], - valueB: [ - { - p0: 'v1' - } - ], - result: false, - title: 'arrays of different objects' - }, - { - valueA: function() {}, - valueB: function() {}, - result: true, - title: 'same functions' - }, - { - valueA: function(a) { - return a; - }, - valueB: function(b) { - return !b; - }, - result: false, - title: 'different functions' - }, - { - valueA: new Date(1), - valueB: new Date(1), - result: true, - title: 'same dates' - }, - { - valueA: new Date(1), - valueB: new Date(2), - result: false, - title: 'different dates' - }, - { - valueA: new RegExp('a'), - valueB: new RegExp('a'), - result: true, - title: 'same regexps' - }, - { - valueA: new RegExp('a', 'i'), - valueB: new RegExp('a', 'g'), - result: false, - title: 'same regexps with different flags' - }, - { - valueA: new RegExp('a'), - valueB: new RegExp('b'), - result: false, - title: 'different regexps' - }, - { - valueA: new Number(1), - valueB: new Number(1), - result: true, - title: 'same number objects' - }, - { - valueA: new Number(1), - valueB: new Number(2), - result: false, - title: 'different number objects' - }, - { - valueA: new String('a'), - valueB: new String('a'), - result: true, - title: 'same string objects' - }, - { - valueA: new String('a'), - valueB: new String('b'), - result: false, - title: 'different string objects' - }, - { - valueA: new Boolean(true), - valueB: new Boolean(true), - result: true, - title: 'same boolean objects' - }, - { - valueA: new Boolean(true), - valueB: new Boolean(false), - result: false, - title: 'different boolean objects' - }, - { - valueA: null, - valueB: null, - result: true, - title: 'null values' - }, - { - valueA: undefined, - valueB: undefined, - result: true, - title: 'undefined values' - }, - { - valueA: undefined, - valueB: null, - result: false, - title: 'undefined vs null' - } - ]; - - cases.forEach(test => { - describe(test.title, () => { - it('equality', inject([UtilsService], (service: UtilsService) => { - expect(service.isEqual(test.valueA, test.valueB)).toEqual(test.result); - })); - it('symmetry', inject([UtilsService], (service: UtilsService) => { - expect(service.isEqual(test.valueA, test.valueB)).toEqual(service.isEqual(test.valueB, test.valueA)); - })); - }); - }); - }); - - describe('#isEmptyObject()', () => { - const cases = [ - { - obj: {}, - result: true, - title: 'empty object' - }, - { - obj: { - p: 'v' - }, - result: false, - title: 'not empty object' - }, - { - obj: null, - result: false, - title: 'null' - }, - { - obj: undefined, - result: false, - title: 'undefined' - }, - { - obj: '', - result: false, - title: 'empty string' - }, - { - obj: 0, - result: false, - title: 'zero' - }, - { - obj: false, - result: false, - title: 'false' - }, - { - obj: NaN, - result: false, - title: 'NaN' - }, - { - obj: [], - result: false, - title: 'empty array' - }, - { - obj: '123', - result: false, - title: 'not empty primitive' - } - ]; - - cases.forEach(test => { - it(test.title, inject([UtilsService], (service: UtilsService) => { - expect(service.isEmptyObject(test.obj)).toEqual(test.result); - })); - }); - }); - - describe('#getMaxNumberInObject()', () => { - const cases = [ - { - obj: { - a: 1, - b: -1, - c: 0 - }, - max: 1, - title: 'basic case' - }, - { - obj: { - a: 1 - }, - max: 1, - title: 'single-item object' - }, - { - obj: { - a: -Infinity, - b: 0, - c: 1 - }, - max: 1, - title: 'object with -Infinity' - }, - { - obj: { - a: Infinity, - b: 0, - c: 1 - }, - max: Infinity, - title: 'object with Infinity' - }, - { - obj: { - a: NaN, - b: 0, - c: 1 - }, - max: 1, - title: 'object with NaN' - } - ]; - - cases.forEach(test => { - it(test.title, inject([UtilsService], (service: UtilsService) => { - expect(service.getMaxNumberInObject(test.obj)).toEqual(test.max); - })); - }); - }); - - describe('#getListItemFromString()', () => { - it('should convert string to ListItem', inject([UtilsService], (service: UtilsService) => { - expect(service.getListItemFromString('customName')).toEqual({ - label: 'customName', - value: 'customName' - }); - })); - }); - - describe('#getListItemFromNode()', () => { - it('should convert NodeItem to ListItem', inject([UtilsService], (service: UtilsService) => { - expect(service.getListItemFromNode({ - name: 'customName', - value: '1', - isParent: true, - isRoot: true - })).toEqual({ - label: 'customName (1)', - value: 'customName' - }); - })); - }); - - describe('#pushUniqueValues()', () => { - const cases = [ - { - source: [1, 2, 3], - itemsToPush: [2, 4, 5, 1], - compareFunction: undefined, - result: [1, 2, 3, 4, 5], - title: 'primitives array' - }, - { - source: [ - { - p0: 'v0' - }, - { - p1: 'v1' - }, - { - p2: 'v2' - } - ], - itemsToPush: [ - { - p3: 'v3' - }, - { - p2: 'v2' - }, - { - p2: 'v3' - }, - { - p4: 'v4' - } - ], - compareFunction: undefined, - result: [ - { - p0: 'v0' - }, - { - p1: 'v1' - }, - { - p2: 'v2' - }, - { - p3: 'v3' - }, - { - p2: 'v3' - }, - { - p4: 'v4' - } - ], - title: 'objects array' - }, - { - source: [ - { - id: 0, - value: 'v0' - }, - { - id: 1, - value: 'v1' - }, - { - id: 2, - value: 'v2' - } - ], - itemsToPush: [ - { - id: 3, - value: 'v3' - }, - { - id: 4, - value: 'v4' - }, - { - id: 0, - value: 'v5' - }, - { - id: 1, - value: 'v6' - } - ], - compareFunction: (itemA: any, itemB: any): boolean => itemA.id === itemB.id, - result: [ - { - id: 0, - value: 'v0' - }, - { - id: 1, - value: 'v1' - }, - { - id: 2, - value: 'v2' - }, - { - id: 3, - value: 'v3' - }, - { - id: 4, - value: 'v4' - } - ], - title: 'custom comparison function' - } - ]; - - cases.forEach(test => { - it(test.title, inject([UtilsService], (service: UtilsService) => { - expect(service.pushUniqueValues(test.source, test.itemsToPush, test.compareFunction)).toEqual(test.result); - })); - }); - }); - -}); diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts deleted file mode 100644 index 1a2fd058754..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts +++ /dev/null @@ -1,166 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Injectable} from '@angular/core'; -import * as moment from 'moment-timezone'; -import {ListItem} from '@app/classes/list-item'; -import {HomogeneousObject, LogField} from '@app/classes/object'; -import {NodeItem} from '@app/classes/models/node-item'; - -@Injectable() -export class UtilsService { - - /** - * Comparison of two instances of any data type be value instead of reference - * @param valueA - * @param valueB - * @returns {boolean} - */ - isEqual = (valueA: any, valueB: any): boolean => { - if (valueA === valueB) { - return true; - } - if (valueA instanceof Date && valueB instanceof Date) { - return valueA.valueOf() === valueB.valueOf(); - } - if ((typeof valueA === 'function' && typeof valueB === 'function') || - (valueA instanceof RegExp && valueB instanceof RegExp) || - (valueA instanceof String && valueB instanceof String) || - (valueA instanceof Number && valueB instanceof Number) || - (valueA instanceof Boolean && valueB instanceof Boolean)) { - return valueA.toString() === valueB.toString(); - } - if (!(valueA instanceof Object) || !(valueB instanceof Object)) { - return false; - } - if (valueA.constructor !== valueB.constructor) { - return false; - } - if (valueA.isPrototypeOf(valueB) || valueB.isPrototypeOf(valueA)) { - return false; - } - for (const key in valueA) { - if (!valueA.hasOwnProperty(key)) { - continue; - } - if (!valueB.hasOwnProperty(key)) { - return false; - } - if (valueA[key] === valueB[key]) { - continue; - } - if (typeof valueA[key] !== 'object' || !this.isEqual(valueA[key], valueB[key])) { - return false; - } - } - for (const key in valueB) { - if (valueB.hasOwnProperty(key) && !valueA.hasOwnProperty(key)) { - return false; - } - } - return true; - } - - isEnterPressed(event: KeyboardEvent): boolean { - return event.keyCode === 13; - } - - isBackSpacePressed(event: KeyboardEvent): boolean { - return event.keyCode === 8; - } - - isDifferentDates(dateA, dateB, timeZone): boolean { - const momentA = moment(dateA).tz(timeZone), - momentB = moment(dateB).tz(timeZone); - return !momentA.isSame(momentB, 'day'); - } - - fitIntegerDigitsCount(numberToFormat: number, minLength: number = 2): string { - return numberToFormat.toLocaleString(undefined, { - minimumIntegerDigits: minLength - }); - } - - isEmptyObject(obj: any): boolean { - return this.isEqual(obj, {}); - } - - getMaxNumberInObject(obj: HomogeneousObject): number { - const keys = Object.keys(obj); - return keys.reduce((currentMax: number, currentKey: string): number => { - return isNaN(obj[currentKey]) ? currentMax : Math.max(currentMax, obj[currentKey]); - }, 0); - } - - /** - * Get instance for dropdown list from string - * @param name {string} - * @returns {ListItem} - */ - getListItemFromString(name: string): ListItem { - return { - label: name, - value: name - }; - } - - /** - * Get instance for dropdown list from NodeItem object - * @param node {NodeItem} - * @param addGroup {boolean} - * @returns {ListItem} - */ - getListItemFromNode(node: NodeItem, addGroup: boolean = false): ListItem { - const group: string = addGroup && node.group ? `${node.group.label || node.group.name}: ` : ''; - return { - label: `${group}${node.label || node.name} (${node.value})`, - value: node.name - }; - } - - logFieldToListItemMapper(fields: FieldT[]): ListItem[] { - return fields ? fields.map((field: FieldT): ListItem => { - return { - value: field.name, - label: field.label || field.name, - isChecked: field.visible - }; - }) : []; - } - - /** - * Method that updates source array with only the values which aren't already present there - * @param {Array} sourceArray - * @param {Array} itemsToPush - * @param {Function} [compareFunction=this.isEqual] - custom comparison function; - * item is skipped if it returns true, and pushed - if false - * @returns {Array} - */ - pushUniqueValues = ( - sourceArray: any[], itemsToPush: any[], compareFunction: (x: any, y: any) => boolean = this.isEqual - ): any[] => { - itemsToPush.forEach((item: any) => { - const itemExists = sourceArray.some((sourceItem: any): boolean => compareFunction(item, sourceItem)); - if (!itemExists) { - sourceArray.push(item); - } - }); - return sourceArray; - } - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts deleted file mode 100644 index 9a53a3791c0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {HttpModule, Http, BrowserXhr, XSRFStrategy, ResponseOptions, XHRBackend} from '@angular/http'; -import {TranslateModule, TranslateLoader} from '@ngx-translate/core'; -import {TranslateHttpLoader} from '@ngx-translate/http-loader'; -import {Injector} from '@angular/core'; -import {InMemoryBackendService} from 'angular-in-memory-web-api'; -import {MockApiDataService} from '@app/services/mock-api-data.service'; -import {HttpClientService} from '@app/services/http-client.service'; -import {RouterTestingModule} from '@angular/router/testing'; -import {clusters, ClustersService} from '@app/services/storage/clusters.service'; -import {StoreModule} from '@ngrx/store'; -import {UtilsService} from '@app/services/utils.service'; -import {ComponentGeneratorService} from '@app/services/component-generator.service'; -import {HostsService} from '@app/services/storage/hosts.service'; -import {ComponentsService} from '@app/services/storage/components.service'; - -function HttpLoaderFactory(http: Http) { - return new TranslateHttpLoader(http, 'assets/i18n/', '.json'); -} - -export const TranslationModules = [ - HttpModule, - TranslateModule.forRoot({ - loader: { - provide: TranslateLoader, - useFactory: HttpLoaderFactory, - deps: [Http] - } - }) -]; - -export const MockHttpRequestModules = [ - HttpClientService, - { - provide: XHRBackend, - useFactory: getTestXHRBackend, - deps: [Injector, BrowserXhr, XSRFStrategy, ResponseOptions] - } -]; - -export const getCommonTestingBedConfiguration = ( - {declarations = [], imports = [], providers = []} = {} -) => ({ - imports: [ - ...TranslationModules, - RouterTestingModule, - StoreModule.provideStore({ - clusters - }), - ...imports - ], - providers: [ - ...MockHttpRequestModules, - ComponentGeneratorService, - ClustersService, - HostsService, - ComponentsService, - UtilsService, - ...providers - ], - declarations: [ - ...declarations - ] -}); - -export function getTestXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSRFStrategy, options: ResponseOptions) { - return new InMemoryBackendService( - injector, - new MockApiDataService(), - { - passThruUnknownUrl: true, - rootPath: '' - } - ); -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json b/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json deleted file mode 100644 index a0796acb6fe..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "common.title": "Log Search", - "common.serviceLogs": "Service Logs", - "common.auditLogs": "Audit Logs", - "common.summary": "Summary", - "common.logs": "Logs", - "common.name": "Name", - "common.value": "Value", - "common.settings": "Settings", - - "common.form.errors.required": "This field is required", - - "dropdown.selection": "Selected ({{total}})", - "dropdown.selection.clear": "(Clear)", - "dropdown.selection.clearToDefault": "(Set defaults)", - "dropdown.selection.all": "All {{listName}} ({{total}})", - - "modal.submit": "OK", - "modal.cancel": "Cancel", - "modal.apply": "Apply", - "modal.close": "Close", - "modal.save": "Save", - - "authorization.logout": "Logout", - "authorization.name": "Username", - "authorization.password": "Password", - "authorization.signIn": "Sign In", - "authorization.error.401": "Unable to sign in. Invalid username/password combination.", - - "login.title": "Login", - - "topMenu.undo": "Undo", - "topMenu.redo": "Redo", - "topMenu.refresh": "Refresh", - "topMenu.history": "History", - "topMenu.filter": "Filter", - "topMenu.shipperConfiguration": "Configuration Editor", - - "filter.all": "All", - - "filter.clusters": "Clusters", - "filter.components": "Components", - "filter.levels": "Levels", - "filter.include": "Include", - "filter.exclude": "Exclude", - "filter.hosts": "Hosts", - "filter.users": "Users", - - "filter.capture": "Capture", - "filter.captureSnapshot": "Snapshot", - "filter.refreshingLogListIn": "Refreshing log list in...", - "filter.capture.min": "Min", - "filter.capture.sec": "Sec", - "filter.capture.triggeringRefresh": "Triggering auto-refresh in {{remainingSeconds}} sec", - "filter.youAreInSnapshotView": "You are in snapshot view", - "filter.closeSnapshotView": "Close snapshot view", - - "filters.clear": "Clear", - - "filter.timeRange": "Time Range", - "filter.timeRange.quick": "Quick Ranges", - "filter.timeRange.7d": "Last 7 days", - "filter.timeRange.30d": "Last 30 days", - "filter.timeRange.60d": "Last 60 days", - "filter.timeRange.90d": "Last 90 days", - "filter.timeRange.6m": "Last 6 months", - "filter.timeRange.1y": "Last 1 year", - "filter.timeRange.2y": "Last 2 years", - "filter.timeRange.5y": "Last 5 years", - "filter.timeRange.yesterday": "Yesterday", - "filter.timeRange.beforeYesterday": "Day before yesterday", - "filter.timeRange.thisDayLastWeek": "This day last week", - "filter.timeRange.previousWeek": "Previous week", - "filter.timeRange.previousMonth": "Previous month", - "filter.timeRange.previousYear": "Previous year", - "filter.timeRange.today": "Today", - "filter.timeRange.todaySoFar": "Today so far", - "filter.timeRange.thisWeek": "This week", - "filter.timeRange.thisWeekSoFar": "This week so far", - "filter.timeRange.thisMonth": "This month", - "filter.timeRange.thisYear": "This year", - "filter.timeRange.5min": "Last 5 minutes", - "filter.timeRange.15min": "Last 15 minutes", - "filter.timeRange.30min": "Last 30 minutes", - "filter.timeRange.1hr": "Last 1 hour", - "filter.timeRange.3hr": "Last 3 hours", - "filter.timeRange.6hr": "Last 6 hours", - "filter.timeRange.12hr": "Last 12 hours", - "filter.timeRange.24hr": "Last 24 hours", - "filter.timeRange.custom": "Custom", - "filter.timeRange.from": "from", - "filter.timeRange.to": "to", - "filter.toggleTo.exclude": "Toggle to exclude", - "filter.toggleTo.include": "Toggle to include", - - "filter.timeRange.error.tooShort": "The selected time range is too short.", - - "levels.fatal": "Fatal", - "levels.error": "Error", - "levels.warn": "Warn", - "levels.info": "Info", - "levels.debug": "Debug", - "levels.trace": "Trace", - "levels.unknown": "Unknown", - - "sorting.title": "Sort By", - "sorting.time.asc": "Ascending Time", - "sorting.time.desc": "Descending Time", - - "pagination.title": "Rows per page:", - "pagination.numbers": "{{startIndex}}-{{endIndex}} of {{totalCount}}", - - "logs.title": "Logs", - "logs.serviceLogs.title": "Service Logs", - "logs.auditLogs.title": "Audit Logs", - "logs.columns": "Columns", - "logs.status": "Status", - "logs.details": "Details", - "logs.message": "Message", - "logs.bundleId": "Bundle Id", - "logs.caseId": "Case Id", - "logs.cluster": "Cluster", - "logs.eventCount": "Event Count", - "logs.file": "File", - "logs.host": "Host", - "logs.id": "Id", - "logs.ip": "Ip", - "logs.level": "Level", - "logs.lineNumber": "Line Number", - "logs.logType": "Log Type", - "logs.logfileLineNumber": "Logfile Line Number", - "logs.loggerName": "Logger Name", - "logs.method": "Method", - "logs.path": "Path", - "logs.rowType": "Row Type", - "logs.threadName": "Thread", - "logs.type": "Type", - "logs.enforcer": "Access Enforcer", - "logs.accessType": "Access Type", - "logs.action": "Action", - "logs.agent": "Agent", - "logs.agentHost": "Agent Host", - "logs.authType": "Auth Type", - "logs.clientIp": "Client Ip", - "logs.clientType": "Client Type", - "logs.dst": "DST", - "logs.eventTime": "Event Time", - "logs.logMessage": "Log Message", - "logs.logTime": "Log Time", - "logs.perm": "Perm", - "logs.policy": "Policy", - "logs.proxyUsers": "Proxy Users", - "logs.reason": "Reason", - "logs.repo": "Repo", - "logs.repoType": "Repo Type", - "logs.reqCallerId": "Req Caller Id", - "logs.reqContext": "Req Context", - "logs.reqData": "Req Data", - "logs.reqSelfId": "Req Self Id", - "logs.resType": "Res Type", - "logs.resource": "Resource", - "logs.result": "Result", - "logs.session": "Session", - "logs.text": "Text", - "logs.ugi": "UGI", - "logs.user": "User", - "logs.baseUrl": "Base URL", - "logs.command": "Command", - "logs.component": "Component", - "logs.displayName": "Display Name", - "logs.os": "OS", - "logs.repoId": "Repo Id", - "logs.repoVersion": "Repo Version", - "logs.repositories": "Repositories", - "logs.requestId": "Request Id", - "logs.resultStatus": "Result Status", - "logs.roles": "Roles", - "logs.stackVersion": "Stack Version", - "logs.stack": "Stack", - "logs.taskId": "Task Id", - "logs.versionNote": "Version Note", - "logs.versionNumber": "Version Number", - "logs.addToQuery": "Add to Query", - "logs.excludeFromQuery": "Exclude from Query", - "logs.copy": "Copy", - "logs.copy.title": "Copy", - "logs.copy.success": "The log has been copied to the clipboard.", - "logs.copy.failed": "Error at copying the log into the clipboard.", - "logs.copy.notSupported": "This function is not supported in this browser.", - "logs.open": "Open Log", - "logs.context": "Context", - "logs.loadMore": "Load more", - "logs.oneEventFound": "1 event found", - "logs.totalEventFound": "{{totalCount}} events found", - "logs.noEventFound": "No event found", - "logs.hideGraph": "Hide Graph", - "logs.showGraph": "Show Graph", - "logs.topUsers": "Top {{number}} Users", - "logs.topResources": "Top {{number}} Resources", - "logs.brokenListLayoutMessage": "Some information may not be visible.", - "logs.brokenListLayoutTooltip": "It seems that your screen is too narrow to display this number of columns.", - "logs.tableLayoutBtnTooltip": "Table layout. Optimal when you want to display only few columns.", - "logs.flexLayoutBtnTooltip": "Flexible layout. Optimal when your screen is narrow or you want to display more columns.", - "logs.toggleLabels": "Turn on/off the labels.", - "logs.duration": "Duration", - - "histogram.gap": "gap", - "histogram.gaps": "gaps", - "histogram.gap.second": "second", - "histogram.gap.seconds": "seconds", - "histogram.gap.minute": "minute", - "histogram.gap.minutes": "minutes", - "histogram.gap.hour": "hour", - "histogram.gap.hours": "hours", - "histogram.gap.day": "day", - "histogram.gap.days": "days", - "histogram.gap.week": "week", - "histogram.gap.weeks": "weeks", - - "logIndexFilter.title": "Log Index Filter", - "logIndexFilter.caption": "For each cluster, choose the components, hosts and log levels that will be indexed in the log feeder", - "logIndexFilter.select": "Select", - "logIndexFilter.selectCluster": "Select Cluster", - "logIndexFilter.override": "Override Hosts", - "logIndexFilter.addHosts": "Add Hosts", - "logIndexFilter.hostname": "Hostname", - "logIndexFilter.expiryDate": "Expiry Date", - "logIndexFilter.update.title": "Log Index Filter Update", - "logIndexFilter.update.success": "Log Index Filter for cluster {{cluster}} has been successfully updated.", - "logIndexFilter.update.error": "Error at updating Log Index Filter for cluster {{cluster}}. {{message}}", - - "shipperConfiguration.title": "All Configuration", - "shipperConfiguration.add": "Add", - "shipperConfiguration.edit": "Edit", - "shipperConfiguration.addConfigurationBtn": "Add Configuration", - "shipperConfiguration.breadcrumbs.title": "Log Feeder", - "shipperConfiguration.breadcrumbs.add": "Add Configuration", - "shipperConfiguration.breadcrumbs.update": "Edit Configuration", - - "shipperConfiguration.form.titleAdd": "Add Configuration", - "shipperConfiguration.form.titleEdit": "Edit Configuration", - "shipperConfiguration.form.serviceLabel": "Service Name", - "shipperConfiguration.form.configurationJSONLabel": "Configuration JSON", - "shipperConfiguration.form.saveBtn.label": "Save", - "shipperConfiguration.form.cancelBtn.label": "Cancel", - "shipperConfiguration.form.testBtn.label": "Test", - "shipperConfiguration.form.errors.configuration.invalidJSON": "Invalid JSON!", - "shipperConfiguration.form.errors.serviceName.exists": "This service name already exists.", - "shipperConfiguration.form.errors.componentNameField.serviceNameDoesNotExistInConfiguration": "This component is not in the configuration.", - "shipperConfiguration.form.leavingDirty.title": "You have unsaved changes", - "shipperConfiguration.form.leavingDirty.message": "Are you sure that you cancel the changes?", - - "shipperConfiguration.validator.title": "Validator", - "shipperConfiguration.validator.componentNameLabel": "Component Name", - "shipperConfiguration.validator.sampleDataLabel": "Sample Data", - "shipperConfiguration.validator.filters": "Filters", - "shipperConfiguration.validator.result": "Validation result", - - "shipperConfiguration.navigation.title": "Shipper Configuration", - "shipperConfiguration.navigation.invalidCluster": "The selected cluster name is invalid: {{cluster}}", - "shipperConfiguration.navigation.invalidService": "The selected service ({{service}}) is not on the selected cluster ({{cluster}}).", - - "shipperConfiguration.action.add.title": "New Configuration", - "shipperConfiguration.action.add.success.message": "New configuration has been added successfully.", - "shipperConfiguration.action.add.error.message": "Error at adding new configuration.
    Cluster: {{cluster}}
    Service: {{service}}
    ", - "shipperConfiguration.action.update.title": "Update Configuration", - "shipperConfiguration.action.update.success.message": "The configuration has been updated successfully.
    Cluster: {{cluster}}
    Service: {{service}}
    ", - "shipperConfiguration.action.update.error.message": "Error at updating the configuration.
    Cluster: {{cluster}}
    Service: {{service}}
    ", - "shipperConfiguration.action.validate.title": "Validate Configuration", - "shipperConfiguration.action.validate.success.message": "The configuration is valid.", - "shipperConfiguration.action.validate.error.message": "The configuration is not valid.
    Cluster: {{clusterName}}
    Service: {{componentName}}
    {{errorMessage}}
    ", - - "dataAvaibilityState.clustersDataState.label": "Loading clusters", - "dataAvaibilityState.hostsDataState.label": "Loading hosts", - "dataAvaibilityState.componentsDataState.label": "Loading components", - "dataAvaibilityState.hasError.message": "We were not able to load the data. Please check your internet connection and reload the page!" - -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/assets/images/ambari-logo.png b/ambari-logsearch/ambari-logsearch-web/src/assets/images/ambari-logo.png deleted file mode 100644 index 07d31ee4806..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/assets/images/ambari-logo.png and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/environments/environment.prod.ts b/ambari-logsearch/ambari-logsearch-web/src/environments/environment.prod.ts deleted file mode 100644 index 690ddb1fb40..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/environments/environment.prod.ts +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export const environment = { - production: true -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/environments/environment.ts b/ambari-logsearch/ambari-logsearch-web/src/environments/environment.ts deleted file mode 100644 index e34c67ca58d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/environments/environment.ts +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export const environment = { - production: false -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/favicon.ico b/ambari-logsearch/ambari-logsearch-web/src/favicon.ico deleted file mode 100644 index 62604cb19f8..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/favicon.ico and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/index.html b/ambari-logsearch/ambari-logsearch-web/src/index.html deleted file mode 100644 index 0f8ff0958a7..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/index.html +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - - LogSearch - - - - - - - - -
    -
    Loading Log Search...
    -
    -
    -
    - - diff --git a/ambari-logsearch/ambari-logsearch-web/src/main.ts b/ambari-logsearch/ambari-logsearch-web/src/main.ts deleted file mode 100644 index 5e76e0d3cdc..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/main.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {enableProdMode} from '@angular/core'; -import {platformBrowserDynamic} from '@angular/platform-browser-dynamic'; - -import {AppModule} from '@app/app.module'; -import {environment} from '@envs/environment'; - -if (environment.production) { - enableProdMode(); -} - -platformBrowserDynamic().bootstrapModule(AppModule); diff --git a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-common.ts b/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-common.ts deleted file mode 100644 index d830139e32d..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-common.ts +++ /dev/null @@ -1,186 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as moment from 'moment'; -import * as randomize from 'randomatic'; - -export const clusters: string[] = ['cl0', 'cl1', 'cl2']; - -export const hosts: string[] = ['c64001', 'c64002', 'c64003']; - -export const services: string[] = ['hdfs', 'ambari']; - -export const users: string[] = ['hdfs', 'admin', 'user']; - -export const components = [ - 'ambari_agent', - 'hdfs_secondarynamenode', - 'infra_solr', - 'logsearch_app', - 'logsearch_feeder' -]; - -export const levels = [ - 'INFO', - 'WARN', - 'ERROR', - 'FATAL', - 'DEBUG' -]; - -export function ucFirst(str) { - return str.charAt(0).toUpperCase() + str.slice(1); -} - -export function getRandomInt(max) { - return Math.floor(Math.random() * Math.floor(max)); -} - -export function getRandomElement(list: Array) { - return list[getRandomInt(list.length)]; -} - -export function generatePath( - c: number = 3, - addComponent: boolean | string = true, - addService: boolean | string = false, - folderNameMaxLength: number = 12 -): string { - let path = '/var/log'; - if (addService) { - path += ('/' + (addService === true ? getRandomElement(services) : addService)); - c -= 1; - } - if (addComponent) { - path += ('/' + (addComponent === true ? getRandomElement(components) : addComponent)); - c -= 1; - } - for (let i = 0; i < c; i += 1) { - path += ('/' + randomize('Aa0?', getRandomInt(folderNameMaxLength), {chars: '-_'})); - } - return path; -} - -export function generateServiceLog(defaults?: {[key:string]: any}) { - const component = (defaults && defaults.type) || getRandomElement(components); - const host = (defaults && defaults.host) || getRandomElement(hosts); - return Object.assign({ - 'id': randomize('a0', 32, {chars: '-'}), - 'bundle_id': null, - 'case_id': null, - 'cluster': getRandomElement(clusters), - 'seq_num': randomize('0', 5), - 'log_message': randomize('a0?a0', getRandomInt(1000), {chars: ' \n'}), - 'logfile_line_number': randomize('0', 4), - 'event_dur_ms': null, - 'file': randomize('a0?a0', 16, {chars: '-_'}) + '.java', - 'type': component, - 'event_count': getRandomInt(1000), - 'event_md5': randomize('a0', 32), - 'message_md5': randomize('a0', 32), - '_ttl_': `-${getRandomInt(30)}DAYS`, - '_expire_at_': 1518188622956, - '_version_': randomize('0', 20), - '_router_field_': null, - 'level': getRandomElement(levels), - 'line_number': getRandomInt(999), - 'logtime': moment().subtract(getRandomInt(14), 'days').valueOf(), - 'ip': `${getRandomInt(255)}.${getRandomInt(255)}.${getRandomInt(255)}.${getRandomInt(255)}`, - 'path': generatePath(3, component) + '.json', - 'host': host + '.ambari.apache.org', - 'group': host + '.ambari.apache.org' - }, defaults || {}); -} - -export function generateAuditLog(defaults?: {[key: string]: any}) { - const component: string = (defaults && defaults.component) || getRandomElement(components); // meta default - const service: string = (defaults && defaults.repo) || getRandomElement(services); - const time = moment().subtract(getRandomInt(14), 'days'); - return Object.assign({ - policy: 'policy', - reason: randomize('aA', { - length: getRandomInt(50), - chars: ' .:' - }), - result: 0, - text: randomize('aA', { - length: getRandomInt(50), - chars: ' .:' - }), - tags: [component], - resource: '/' + component, - sess: '0', - access: '0', - logType: ucFirst(service) + 'Audit', - tags_str: component, - resType: 'agent', - reqUser: 'admin', - reqData: 'data', - repoType: 1, - repo: service, - proxyUsers: ['admin'], - evtTime: time.valueOf(), - enforcer: service + '-acl', - reqContext: service, - cliType: getRandomElement(['GET', 'POST']), - cliIP: '192.168.0.1', - agent: 'agent', - agentHost: 'localhost', - action: 'SERVICE_CHECK', - type: service + '-audit', - _version_: 2, - id: 'id0', - file: component + '.log', - seq_num: 3, - bundle_id: 'b0', - case_id: 'c0', - log_message: `User(${getRandomElement(users)}), Operation(SERVICE_CHECK)`, - logfile_line_number: 4, - message_md5: randomize('a0', 20), - cluster: getRandomElement(clusters), - event_count: getRandomInt(100), - event_md5: randomize('0', 20), - event_dur_ms: getRandomInt(900), - _ttl_: '+7DAYS', - _expire_at_: time.format(), - _router_field_: getRandomInt(20) - }, defaults || {}); -} - -export function generateDataCount(from, to, unit, gap) { - let current = moment(from); - const end = moment(to); - const data = []; - while (current.isBefore(end)) { - data.push({ - name: current.toISOString(), - value: getRandomInt(9000) - }); - current = current.add(gap, unit); - } - return data; -} - -export function generateGraphData(from, to, unit, gap) { - return levels.map((level) => { - return { - dataCount: generateDataCount(from, to, unit, gap), - name: level - }; - }); -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-get.ts b/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-get.ts deleted file mode 100644 index 8511cc46c4e..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-get.ts +++ /dev/null @@ -1,2073 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as moment from 'moment'; -import {Moment} from 'moment'; - -import { - clusters, - hosts, - services, - users, - components, - ucFirst, - getRandomInt, - getRandomElement, - generateServiceLog, - generateAuditLog, - generateGraphData -} from './mock-data-common'; -import Base = moment.unitOfTime.Base; - -const currentTime: Moment = moment(); - -export const mockDataGet = { - 'login': {}, - 'logout': {}, - - 'api/v1/audit/logs': function (query) { - const list = []; - const params = query.rawParams.split('&').reduce((currentObj, param) => { - let [key, value] = param.split('='); - switch (key) { - case 'page': - case 'pageSize': - case 'startIndex': - value = parseInt(value, 0); - break; - case 'from': - case 'to': - value = decodeURIComponent(value); - value = moment(value); - break; - case 'userList': - value = decodeURIComponent(value).split(','); - break; - } - currentObj[key] = value; - return currentObj; - }, {}); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = {logtime: startTime + (i * intervalSteps)}; - list.push(generateAuditLog(defaults)); - } - return { - 'startIndex': params.startIndex, - 'pageSize': pageSize, - 'totalCount': 10 * pageSize, - 'resultSize': 10 * pageSize, - 'sortType': params.sortType, - 'sortBy': params.sortBy, - 'queryTimeMS': 1518013198573, - 'logList': list - }; - }, - 'api/v1/audit/logs/bargraph': { - graphData: [{ - dataCount: [ - { - name: currentTime.toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(20, 'm').toISOString(), - value: '100' - }, - { - name: currentTime.clone().subtract(40, 'm').toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(1, 'h').toISOString(), - value: '50' - } - ], - name: 'AMBARI' - }, { - dataCount: [ - { - name: currentTime.toISOString(), - value: '150' - }, - { - name: currentTime.clone().subtract(20, 'm').toISOString(), - value: '50' - }, - { - name: currentTime.clone().subtract(40, 'm').toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(1, 'h').toISOString(), - value: '100' - } - ], - name: 'HDFS' - } - ]}, - 'api/v1/audit/logs/components': { - 'groups': {}, - 'metadata': components.map(comp => { - return { - name: comp, - label: comp.split('_').map(ucFirst).join(' '), - group: null - }; - }) - }, - 'api/v1/audit/logs/resources/\\d': function (query) { - const graphData = users.map((user: string) => { - return { - name: user, - dataCount: services.map((service: string) => { - return { - name: service, - value: getRandomInt(1000) - }; - }) - }; - }); - return { - graphData: graphData - }; - }, - 'api/v1/audit/logs/schema/fields': { - 'defaults': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'DST', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Session', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': true - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'UGI', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': true - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': true - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Client Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': true - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'IP', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Event Time', - 'filterable': true, - 'visible': true - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Access Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Client Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'overrides': { - 'ambari': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'RangerAudit': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'hdfs': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ] - } - }, - 'api/v1/audit/logs/serviceload': { - graphData: [ - { - dataCount: [ - { - name: 'n4', - value: 1 - }, - { - name: 'n5', - value: 2 - } - ], - name: 'graph2' - }, - { - dataCount: [ - { - name: 'n6', - value: 10 - }, - { - name: 'n7', - value: 20 - } - ], - name: 'graph3' - } - ] - }, - - 'api/v1/public/config': {}, - - 'api/v1/service/logs': function (query) { - const list = []; - const params = query.rawParams.split('&').reduce((currentObj, param) => { - let [key, value] = param.split('='); - switch (key) { - case 'page': - case 'pageSize': - case 'startIndex': - value = parseInt(value, 0); - break; - case 'from': - case 'to': - value = decodeURIComponent(value); - value = moment(value); - break; - case 'mustBe': - case 'hostList': - case 'level': - value = decodeURIComponent(value).split(','); - break; - } - currentObj[key] = value; - return currentObj; - }, {}); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = {logtime: startTime + (i * intervalSteps)}; - if (params.mustBe) { - defaults.type = getRandomElement(params.mustBe); - } - if (params.hostList) { - defaults.host = getRandomElement(params.hostList); - } - if (params.level) { - defaults.level = getRandomElement(params.level); - } - list.push(generateServiceLog(defaults)); - } - return { - 'startIndex': params.startIndex, - 'pageSize': pageSize, - 'totalCount': 10 * pageSize, - 'resultSize': 10 * pageSize, - 'sortType': params.sortType, - 'sortBy': params.sortBy, - 'queryTimeMS': 1518013198573, - 'logList': list - }; - }, - 'api/v1/service/logs/logList': (query) => { - const list = []; - const params = query.rawParams.split('&').reduce((currentObj, param) => { - let [key, value] = param.split('='); - switch (key) { - case 'page': - case 'pageSize': - case 'startIndex': - value = parseInt(value, 0); - break; - case 'from': - case 'to': - value = decodeURIComponent(value); - value = moment(value); - break; - case 'mustBe': - case 'hostList': - case 'level': - value = decodeURIComponent(value).split(','); - break; - } - currentObj[key] = value; - return currentObj; - }, {}); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = { - logtime: startTime + (i * intervalSteps), - event_dur_ms: getRandomInt(1000) - }; - if (params.mustBe) { - defaults.type = getRandomElement(params.mustBe); - } - if (params.hostList) { - defaults.host = getRandomElement(params.hostList); - } - if (params.level) { - defaults.level = getRandomElement(params.level); - } - list.push(generateServiceLog(defaults)); - } - return list; - }, - 'api/v1/service/logs/aggregated': { - graphData: [ - { - name: 'n0', - count: 100, - dataList: [ - { - name: 'n1', - count: 50, - dataList: null - }, - { - name: 'n2', - count: 200, - dataList: null - } - ] - }, - { - name: 'n3', - count: 10, - dataList: [ - { - name: 'n4', - count: 5, - dataList: null - }, - { - name: 'n5', - count: 20, - dataList: null - } - ] - } - ] - }, - 'api/v1/service/logs/components': { - 'groups': {}, - 'metadata': components.map(comp => { - return { - name: comp, - label: comp.split('_').map(ucFirst).join(' '), - group: null - }; - }) - }, - 'api/v1/service/logs/components/levels/counts': { - vNodeList: [ - { - name: 'ambari', - type: 0, - logLevelCount: [ - { - name: 'ERROR', - value: '10' - }, - { - name: 'WARN', - value: '50' - } - ], - childs: [ - { - name: 'hdfs', - type: 2, - logLevelCount: [ - { - name: 'ERROR', - value: '10' - }, - { - name: 'WARN', - value: '20' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper', - type: 3, - logLevelCount: [ - { - name: 'ERROR', - value: '20' - }, - { - name: 'WARN', - value: '40' - } - ], - isParent: false, - isRoot: false - } - ], - isParent: true, - isRoot: false - }, - { - name: 'ambari_agent', - type: 1, - logLevelCount: [ - { - name: 'ERROR', - value: '100' - }, - { - name: 'WARN', - value: '500' - } - ], - isParent: false, - isRoot: false - } - ] - }, - 'api/v1/service/logs/files': { - hostLogFiles: { - clusters: clusters, - services: services - } - }, - 'api/v1/service/logs/histogram': (query: URLSearchParams) => { - const unitParam: string[] = decodeURIComponent(query.get('unit')).match(/(\d{1,})([a-zA-Z]{1,})/); - const unit: Base = unitParam[2]; - const amount: number = parseInt(unitParam[1], 0); - const from = moment(decodeURIComponent(query.get('from'))); - const to = moment(decodeURIComponent(query.get('to'))); - return { - graphData: generateGraphData(from, to, unit, amount) - }; - }, - 'api/v1/service/logs/hosts': { - groupList: hosts.map(host => Object.assign({}, {host})) - }, - 'api/v1/service/logs/schema/fields': [{ - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, { - 'name': 'key_log_message', - 'label': 'Key Log Message', - 'filterable': true, - 'visible': false - }, { - 'name': 'type', - 'label': 'Component', - 'filterable': true, - 'visible': true - }, { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, { - 'name': 'logtype', - 'label': 'Logtype', - 'filterable': true, - 'visible': false - }, { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, { - 'name': 'line_number', - 'label': 'Line Number', - 'filterable': true, - 'visible': true - }, { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, { - 'name': 'log_message', - 'label': 'Message', - 'filterable': true, - 'visible': true - }, { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, { - 'name': 'group', - 'label': 'Group', - 'filterable': true, - 'visible': false - }, { - 'name': 'method', - 'label': 'Method', - 'filterable': true, - 'visible': false - }, { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': true - }, { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, { - 'name': 'thread_name', - 'label': 'Thread', - 'filterable': true, - 'visible': false - }, { - 'name': 'logtime', - 'label': 'Log Time', - 'filterable': true, - 'visible': true - }], - 'api/v1/service/logs/serviceconfig': '', - 'api/v1/service/logs/tree': { - vNodeList: [ - { - name: hosts[0], - type: 'H', - value: '1', - childs: [ - { - name: 'ams_collector', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'WARN', - value: '1' - } - ], - isParent: false, - isRoot: false - } - ], - logLevelCount: [ - { - name: 'WARN', - value: '1' - } - ], - isParent: true, - isRoot: true - }, - { - name: hosts[1], - type: 'H', - value: '6', - childs: [ - { - name: 'ams_collector', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'ERROR', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'ambari_agent', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'FATAL', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper_server', - type: 'C', - value: '2', - logLevelCount: [ - { - name: 'INFO', - value: '1' - }, - { - name: 'DEBUG', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper_client', - type: 'C', - value: '2', - logLevelCount: [ - { - name: 'TRACE', - value: '1' - }, - { - name: 'UNKNOWN', - value: '1' - } - ], - isParent: false, - isRoot: false - } - ], - logLevelCount: [ - { - name: 'ERROR', - value: '1' - }, - { - name: 'FATAL', - value: '1' - }, - { - name: 'INFO', - value: '1' - }, - { - name: 'DEBUG', - value: '1' - }, - { - name: 'TRACE', - value: '1' - }, - { - name: 'UNKNOWN', - value: '1' - } - ], - isParent: true, - isRoot: true - } - ] - }, - 'api/v1/service/logs/truncated': { - logList: [ - { - path: '/var/log/ambari-metrics-collector/ambari-metrics-collector.log', - host: 'h0', - level: 'WARN', - logtime: '2017-05-28T11:30:22.531Z', - ip: '192.168.0.1', - logfile_line_number: 8, - type: 'ams_collector', - _version_: 9, - id: 'id2', - file: 'ambari-metrics-collector.log', - seq_num: 10, - bundle_id: 'b2', - case_id: 'c2', - log_message: 'Connection refused', - message_md5: '1357908642', - cluster: 'cl2', - event_count: 5, - event_md5: '1908755391', - event_dur_ms: 200, - _ttl_: '+5DAYS', - _expire_at_: '2017-05-29T11:30:22.531Z', - _router_field_: 20 - }, - { - path: '/var/log/ambari-metrics-collector/ambari-metrics-collector.log', - host: 'h1', - level: 'ERROR', - logtime: '2017-05-28T10:30:22.531Z', - ip: '192.168.0.2', - type: 'ams_collector', - _version_: 14, - id: 'id3', - file: 'ambari-metrics-collector.log', - seq_num: 15, - bundle_id: 'b3', - case_id: 'c3', - log_message: 'Connection refused', - logfile_line_number: 16, - message_md5: '1357908642', - cluster: 'cl3', - event_count: 2, - event_md5: '1029384756', - event_dur_ms: 700, - _ttl_: '+5DAYS', - _expire_at_: '2017-05-29T10:30:22.531Z', - _router_field_: 5 - } - ] - }, - 'api/v1/service/logs/clusters': clusters, - - 'api/v1/status': { - auditlogs: { - znodeReady: true, - solrCollectionReady: true, - solrAliasReady: false, - configurationUploaded: true - }, - servicelogs: { - znodeReady: true, - solrCollectionReady: true, - configurationUploaded: true - }, - userconfig: { - znodeReady: true, - solrCollectionReady: true, - configurationUploaded: true - } - }, - 'api/v1/shipper/filters/\[a-zA-Z0-9\]{1,}/level': { - 'filter': { - 'ambari_agent': { - 'label': 'ambari_agent', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'ambari_alerts': { - 'label': 'ambari_alerts', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'ambari_server': { - 'label': 'ambari_server', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'ams_collector': { - 'label': 'ams_collector', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'ams_hbase_master': { - 'label': 'ams_hbase_master', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'ams_monitor': { - 'label': 'ams_monitor', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'logsearch_app': { - 'label': 'logsearch_app', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - }, - 'logsearch_feeder': { - 'label': 'logsearch_feeder', - 'hosts': [], - 'defaultLevels': [ - 'FATAL', - 'ERROR', - 'WARN' - ], - 'overrideLevels': [], - 'expiryTime': null - } - } - }, - 'api/v1/shipper/input/[a-zA-Z0-9]{1,}/services$': [ - 'zookeeper', - 'ambari-infra', - 'logsearch', - 'ambari', - 'ambari-metrics', - 'hdfs' - ], - 'api/v1/shipper/input/[a-zA-Z0-9\\-]{1,}/services/[a-zA-Z0-9\\-]{1,}$': { - 'input': [ - { - 'type': 'hdfs_datanode', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-datanode-*.log' - }, - { - 'type': 'hdfs_namenode', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-namenode-*.log' - }, - { - 'type': 'hdfs_journalnode', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-journalnode-*.log' - }, - { - 'type': 'hdfs_secondarynamenode', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-secondarynamenode-*.log' - }, - { - 'type': 'hdfs_zkfc', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-zkfc-*.log' - }, - { - 'type': 'hdfs_nfs3', - 'rowtype': 'service', - 'path': '/var/log/hadoop/hdfs/hadoop-hdfs-nfs3-*.log' - }, - { - 'type': 'hdfs_audit', - 'rowtype': 'audit', - 'path': '/var/log/hadoop/hdfs/hdfs-audit.log', - 'add_fields': { - 'logType': 'HDFSAudit', - 'enforcer': 'hadoop-acl', - 'repoType': '1', - 'repo': 'hdfs' - }, - 'is_enabled': true - } - ], - 'filter': [ - { - 'filter': 'grok', - 'conditions': { - 'fields': { - 'type': [ - 'hdfs_datanode', - 'hdfs_journalnode', - 'hdfs_secondarynamenode', - 'hdfs_namenode', - 'hdfs_zkfc', - 'hdfs_nfs3' - ] - } - }, - 'post_map_values': { - 'logtime': [ - { - 'map_date': { - 'target_date_pattern': 'yyyy-MM-dd HH:mm:ss,SSS' - } - } - ] - }, - 'log4j_format': '%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n', - 'multiline_pattern': '^(%{TIMESTAMP_ISO8601:logtime})', - 'message_pattern': '(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}' - }, - { - 'filter': 'grok', - 'conditions': { - 'fields': { - 'type': [ - 'hdfs_audit' - ] - } - }, - 'post_map_values': { - 'evtTime': [ - { - 'map_date': { - 'target_date_pattern': 'yyyy-MM-dd HH:mm:ss,SSS' - } - } - ] - }, - 'log4j_format': '%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n', - 'multiline_pattern': '^(%{TIMESTAMP_ISO8601:evtTime})', - 'message_pattern': '(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}' - }, - { - 'filter': 'keyvalue', - 'conditions': { - 'fields': { - 'type': [ - 'hdfs_audit' - ] - } - }, - 'sort_order': 1, - 'source_field': 'log_message', - 'post_map_values': { - 'callerContext': [ - { - 'map_field_name': { - 'new_field_name': 'req_caller_id' - } - } - ], - 'src': [ - { - 'map_field_name': { - 'new_field_name': 'resource' - } - } - ], - 'allowed': [ - { - 'map_field_value': { - 'pre_value': 'true', - 'post_value': '1' - } - }, - { - 'map_field_value': { - 'pre_value': 'false', - 'post_value': '0' - } - }, - { - 'map_field_name': { - 'new_field_name': 'result' - } - } - ], - 'ip': [ - { - 'map_field_name': { - 'new_field_name': 'cliIP' - } - } - ], - 'proto': [ - { - 'map_field_name': { - 'new_field_name': 'cliType' - } - } - ], - 'cmd': [ - { - 'map_field_name': { - 'new_field_name': 'action' - } - } - ] - }, - 'field_split': '\t', - 'value_split': '=' - }, - { - 'filter': 'grok', - 'conditions': { - 'fields': { - 'type': [ - 'hdfs_audit' - ] - } - }, - 'sort_order': 2, - 'source_field': 'ugi', - 'remove_source_field': false, - 'post_map_values': { - 'k_authType': [ - { - 'map_field_name': { - 'new_field_name': 'proxyAuthType' - } - } - ], - 'p_authType': [ - { - 'map_field_name': { - 'new_field_name': 'authType' - } - } - ], - 'x_user': [ - { - 'map_field_name': { - 'new_field_name': 'reqUser' - } - } - ], - 'k_user': [ - { - 'map_field_name': { - 'new_field_name': 'proxyUsers' - } - } - ], - 'p_user': [ - { - 'map_field_name': { - 'new_field_name': 'reqUser' - } - } - ], - 'user': [ - { - 'map_field_name': { - 'new_field_name': 'reqUser' - } - } - ] - }, - 'message_pattern': '%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}' - } - ] - } -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-post.ts b/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-post.ts deleted file mode 100644 index 0bb4208d274..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-post.ts +++ /dev/null @@ -1,1677 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Request} from '@angular/http'; - -import * as moment from 'moment'; -import {Moment} from 'moment'; - -import { - clusters, - hosts, - services, - users, - components, - ucFirst, - getRandomInt, - getRandomElement, - generateServiceLog, - generateAuditLog, - generateGraphData -} from './mock-data-common'; -import Base = moment.unitOfTime.Base; - -const currentTime: Moment = moment(); -export const mockDataPost = { - 'login': {}, - 'logout': {}, - - 'api/v1/shipper/[a-zA-Z0-9\\-]{1,}/services/[a-zA-Z0-9\\-]{1,}$': (query: any, request: Request) => { - return {}; - }, - 'api/v1/audit/logs': function (query, request: Request) { - const list = []; - const params = request.json(); - params.to = moment(params.to); - params.from = moment(params.from); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = {logtime: startTime + (i * intervalSteps)}; - list.push(generateAuditLog(defaults)); - } - return { - 'startIndex': params.startIndex, - 'pageSize': pageSize, - 'totalCount': 10 * pageSize, - 'resultSize': 10 * pageSize, - 'sortType': params.sortType, - 'sortBy': params.sortBy, - 'queryTimeMS': 1518013198573, - 'logList': list - }; - }, - 'api/v1/audit/logs/bargraph': { - graphData: [{ - dataCount: [ - { - name: currentTime.toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(20, 'm').toISOString(), - value: '100' - }, - { - name: currentTime.clone().subtract(40, 'm').toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(1, 'h').toISOString(), - value: '50' - } - ], - name: 'AMBARI' - }, { - dataCount: [ - { - name: currentTime.toISOString(), - value: '150' - }, - { - name: currentTime.clone().subtract(20, 'm').toISOString(), - value: '50' - }, - { - name: currentTime.clone().subtract(40, 'm').toISOString(), - value: '75' - }, - { - name: currentTime.clone().subtract(1, 'h').toISOString(), - value: '100' - } - ], - name: 'HDFS' - } - ]}, - 'api/v1/audit/logs/components': { - 'groups': {}, - 'metadata': components.map(comp => { - return { - name: comp, - label: comp.split('_').map(ucFirst).join(' '), - group: null - }; - }) - }, - 'api/v1/audit/logs/resources/\\d': function (query, request: Request) { - const graphData = users.map((user: string) => { - return { - name: user, - dataCount: services.map((service: string) => { - return { - name: service, - value: getRandomInt(1000) - }; - }) - }; - }); - return { - graphData: graphData - }; - }, - 'api/v1/audit/logs/schema/fields': { - 'defaults': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'DST', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Session', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': true - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'UGI', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': true - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': true - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Client Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': true - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'IP', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Event Time', - 'filterable': true, - 'visible': true - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Access Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Client Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'overrides': { - 'ambari': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'RangerAudit': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ], - 'hdfs': [ - { - 'name': 'logType', - 'label': 'Log Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reason', - 'label': 'Reason', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agent', - 'label': 'Agent', - 'filterable': true, - 'visible': false - }, - { - 'name': 'access', - 'label': 'Access', - 'filterable': true, - 'visible': false - }, - { - 'name': 'dst', - 'label': 'Dst', - 'filterable': true, - 'visible': false - }, - { - 'name': 'perm', - 'label': 'Perm', - 'filterable': true, - 'visible': false - }, - { - 'name': 'event_count', - 'label': 'Event Count', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repo', - 'label': 'Repo', - 'filterable': true, - 'visible': false - }, - { - 'name': 'sess', - 'label': 'Sess', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqUser', - 'label': 'Req User', - 'filterable': true, - 'visible': false - }, - { - 'name': 'task_id', - 'label': 'Task Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'type', - 'label': 'Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqData', - 'label': 'Req Data', - 'filterable': true, - 'visible': false - }, - { - 'name': 'result', - 'label': 'Result', - 'filterable': true, - 'visible': false - }, - { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, - { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ugi', - 'label': 'Ugi', - 'filterable': true, - 'visible': false - }, - { - 'name': 'case_id', - 'label': 'Case Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'action', - 'label': 'Action', - 'filterable': true, - 'visible': false - }, - { - 'name': 'log_message', - 'label': 'Log Message', - 'filterable': true, - 'visible': false - }, - { - 'name': 'agentHost', - 'label': 'Agent Host', - 'filterable': true, - 'visible': false - }, - { - 'name': 'id', - 'label': 'Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, - { - 'name': 'text', - 'label': 'Text', - 'filterable': true, - 'visible': false - }, - { - 'name': 'authType', - 'label': 'Auth Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, - { - 'name': 'policy', - 'label': 'Policy', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliIP', - 'label': 'Cli I P', - 'filterable': true, - 'visible': false - }, - { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resource', - 'label': 'Resource', - 'filterable': true, - 'visible': false - }, - { - 'name': 'resType', - 'label': 'Res Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, - { - 'name': 'evtTime', - 'label': 'Evt Time', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_self_id', - 'label': 'Req Self Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'repoType', - 'label': 'Repo Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'req_caller_id', - 'label': 'Req Caller Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'enforcer', - 'label': 'Enforcer', - 'filterable': true, - 'visible': false - }, - { - 'name': 'bundle_id', - 'label': 'Bundle Id', - 'filterable': true, - 'visible': false - }, - { - 'name': 'cliType', - 'label': 'Cli Type', - 'filterable': true, - 'visible': false - }, - { - 'name': 'reqContext', - 'label': 'Req Context', - 'filterable': true, - 'visible': false - }, - { - 'name': 'proxyUsers', - 'label': 'Proxy Users', - 'filterable': true, - 'visible': false - } - ] - } - }, - 'api/v1/audit/logs/serviceload': { - graphData: [ - { - dataCount: [ - { - name: 'n4', - value: 1 - }, - { - name: 'n5', - value: 2 - } - ], - name: 'graph2' - }, - { - dataCount: [ - { - name: 'n6', - value: 10 - }, - { - name: 'n7', - value: 20 - } - ], - name: 'graph3' - } - ] - }, - 'api/v1/service/logs': function (query, request: Request) { - const list = []; - const params = request.json(); - params.to = moment(params.to); - params.from = moment(params.from); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = {logtime: startTime + (i * intervalSteps)}; - if (params.mustBe) { - defaults.type = getRandomElement(params.mustBe); - } - if (params.hostList) { - defaults.host = getRandomElement(params.hostList); - } - if (params.level) { - defaults.level = getRandomElement(params.level); - } - list.push(generateServiceLog(defaults)); - } - return { - 'startIndex': params.startIndex, - 'pageSize': pageSize, - 'totalCount': 10 * pageSize, - 'resultSize': 10 * pageSize, - 'sortType': params.sortType, - 'sortBy': params.sortBy, - 'queryTimeMS': 1518013198573, - 'logList': list - }; - }, - 'api/v1/service/logs/logList': (query, request: Request) => { - const list = []; - const params = request.json(); - params.to = moment(params.to); - params.from = moment(params.from); - const pageSize = params.pageSize || 50; - const intervalSteps = params.to.diff(params.from) / pageSize; - const startTime = params.from.valueOf(); - - for (let i = 0; i < pageSize; i += 1) { - const defaults: {[key: string]: any} = { - logtime: startTime + (i * intervalSteps), - event_dur_ms: getRandomInt(1000) - }; - if (params.mustBe) { - defaults.type = getRandomElement(params.mustBe); - } - if (params.hostList) { - defaults.host = getRandomElement(params.hostList); - } - if (params.level) { - defaults.level = getRandomElement(params.level); - } - list.push(generateServiceLog(defaults)); - } - return list; - }, - 'api/v1/service/logs/aggregated': { - graphData: [ - { - name: 'n0', - count: 100, - dataList: [ - { - name: 'n1', - count: 50, - dataList: null - }, - { - name: 'n2', - count: 200, - dataList: null - } - ] - }, - { - name: 'n3', - count: 10, - dataList: [ - { - name: 'n4', - count: 5, - dataList: null - }, - { - name: 'n5', - count: 20, - dataList: null - } - ] - } - ] - }, - 'api/v1/service/logs/components': { - 'groups': {}, - 'metadata': components.map(comp => { - return { - name: comp, - label: comp.split('_').map(ucFirst).join(' '), - group: null - }; - }) - }, - 'api/v1/service/logs/components/levels/counts': { - vNodeList: [ - { - name: 'ambari', - type: 0, - logLevelCount: [ - { - name: 'ERROR', - value: '10' - }, - { - name: 'WARN', - value: '50' - } - ], - childs: [ - { - name: 'hdfs', - type: 2, - logLevelCount: [ - { - name: 'ERROR', - value: '10' - }, - { - name: 'WARN', - value: '20' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper', - type: 3, - logLevelCount: [ - { - name: 'ERROR', - value: '20' - }, - { - name: 'WARN', - value: '40' - } - ], - isParent: false, - isRoot: false - } - ], - isParent: true, - isRoot: false - }, - { - name: 'ambari_agent', - type: 1, - logLevelCount: [ - { - name: 'ERROR', - value: '100' - }, - { - name: 'WARN', - value: '500' - } - ], - isParent: false, - isRoot: false - } - ] - }, - 'api/v1/service/logs/files': { - hostLogFiles: { - clusters: clusters, - services: services - } - }, - 'api/v1/service/logs/histogram': (query: URLSearchParams, request: Request) => { - const body = request.json(); - const unitParam: string[] = decodeURIComponent(body.unit).match(/(\d{1,})([a-zA-Z]{1,})/); - const unit: Base = unitParam[2]; - const amount: number = parseInt(unitParam[1], 0); - const from = moment(decodeURIComponent(body.from)); - const to = moment(decodeURIComponent(body.to)); - return { - graphData: generateGraphData(from, to, unit, amount) - }; - }, - 'api/v1/service/logs/hosts': { - groupList: hosts.map(host => Object.assign({}, {host})) - }, - 'api/v1/service/logs/schema/fields': [{ - 'name': 'cluster', - 'label': 'Cluster', - 'filterable': true, - 'visible': false - }, { - 'name': 'key_log_message', - 'label': 'Key Log Message', - 'filterable': true, - 'visible': false - }, { - 'name': 'type', - 'label': 'Component', - 'filterable': true, - 'visible': true - }, { - 'name': 'path', - 'label': 'Path', - 'filterable': true, - 'visible': false - }, { - 'name': 'logtype', - 'label': 'Logtype', - 'filterable': true, - 'visible': false - }, { - 'name': 'file', - 'label': 'File', - 'filterable': true, - 'visible': false - }, { - 'name': 'line_number', - 'label': 'Line Number', - 'filterable': true, - 'visible': true - }, { - 'name': 'host', - 'label': 'Host', - 'filterable': true, - 'visible': false - }, { - 'name': 'log_message', - 'label': 'Message', - 'filterable': true, - 'visible': true - }, { - 'name': 'logger_name', - 'label': 'Logger Name', - 'filterable': true, - 'visible': false - }, { - 'name': 'logfile_line_number', - 'label': 'Logfile Line Number', - 'filterable': true, - 'visible': false - }, { - 'name': 'group', - 'label': 'Group', - 'filterable': true, - 'visible': false - }, { - 'name': 'method', - 'label': 'Method', - 'filterable': true, - 'visible': false - }, { - 'name': 'level', - 'label': 'Level', - 'filterable': true, - 'visible': true - }, { - 'name': 'ip', - 'label': 'Ip', - 'filterable': true, - 'visible': false - }, { - 'name': 'thread_name', - 'label': 'Thread', - 'filterable': true, - 'visible': false - }, { - 'name': 'logtime', - 'label': 'Log Time', - 'filterable': true, - 'visible': true - }], - 'api/v1/service/logs/serviceconfig': '', - 'api/v1/service/logs/tree': { - vNodeList: [ - { - name: hosts[0], - type: 'H', - value: '1', - childs: [ - { - name: 'ams_collector', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'WARN', - value: '1' - } - ], - isParent: false, - isRoot: false - } - ], - logLevelCount: [ - { - name: 'WARN', - value: '1' - } - ], - isParent: true, - isRoot: true - }, - { - name: hosts[1], - type: 'H', - value: '6', - childs: [ - { - name: 'ams_collector', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'ERROR', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'ambari_agent', - type: 'C', - value: '1', - logLevelCount: [ - { - name: 'FATAL', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper_server', - type: 'C', - value: '2', - logLevelCount: [ - { - name: 'INFO', - value: '1' - }, - { - name: 'DEBUG', - value: '1' - } - ], - isParent: false, - isRoot: false - }, - { - name: 'zookeeper_client', - type: 'C', - value: '2', - logLevelCount: [ - { - name: 'TRACE', - value: '1' - }, - { - name: 'UNKNOWN', - value: '1' - } - ], - isParent: false, - isRoot: false - } - ], - logLevelCount: [ - { - name: 'ERROR', - value: '1' - }, - { - name: 'FATAL', - value: '1' - }, - { - name: 'INFO', - value: '1' - }, - { - name: 'DEBUG', - value: '1' - }, - { - name: 'TRACE', - value: '1' - }, - { - name: 'UNKNOWN', - value: '1' - } - ], - isParent: true, - isRoot: true - } - ] - }, - 'api/v1/service/logs/truncated': { - logList: [ - { - path: '/var/log/ambari-metrics-collector/ambari-metrics-collector.log', - host: 'h0', - level: 'WARN', - logtime: '2017-05-28T11:30:22.531Z', - ip: '192.168.0.1', - logfile_line_number: 8, - type: 'ams_collector', - _version_: 9, - id: 'id2', - file: 'ambari-metrics-collector.log', - seq_num: 10, - bundle_id: 'b2', - case_id: 'c2', - log_message: 'Connection refused', - message_md5: '1357908642', - cluster: 'cl2', - event_count: 5, - event_md5: '1908755391', - event_dur_ms: 200, - _ttl_: '+5DAYS', - _expire_at_: '2017-05-29T11:30:22.531Z', - _router_field_: 20 - }, - { - path: '/var/log/ambari-metrics-collector/ambari-metrics-collector.log', - host: 'h1', - level: 'ERROR', - logtime: '2017-05-28T10:30:22.531Z', - ip: '192.168.0.2', - type: 'ams_collector', - _version_: 14, - id: 'id3', - file: 'ambari-metrics-collector.log', - seq_num: 15, - bundle_id: 'b3', - case_id: 'c3', - log_message: 'Connection refused', - logfile_line_number: 16, - message_md5: '1357908642', - cluster: 'cl3', - event_count: 2, - event_md5: '1029384756', - event_dur_ms: 700, - _ttl_: '+5DAYS', - _expire_at_: '2017-05-29T10:30:22.531Z', - _router_field_: 5 - } - ] - }, - 'api/v1/service/logs/clusters': clusters, - '.*': {} -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-put.ts b/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-put.ts deleted file mode 100644 index 63caf4b79e3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/mockdata/mock-data-put.ts +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import {Request} from "@angular/http"; - -import * as moment from 'moment'; -import {Moment} from "moment"; - -import { - clusters, - hosts, - services, - users, - components, - levels, - ucFirst, - getRandomInt, - getRandomElement, - generatePath, - generateServiceLog, - generateAuditLog -} from './mock-data-common'; - -export const mockDataPost = { - "login": {}, - "logout": {}, - - "api/v1/shipper/[a-zA-Z0-9\\-]{1,}/services/[a-zA-Z0-9\\-]{1,}$": (query: any, request: Request) => { - const body = request.getBody(); - return {}; - }, - ".*": {} -}; diff --git a/ambari-logsearch/ambari-logsearch-web/src/polyfills.ts b/ambari-logsearch/ambari-logsearch-web/src/polyfills.ts deleted file mode 100644 index 016ab77dca0..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/polyfills.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * This file includes polyfills needed by Angular and is loaded before the app. - * You can add your own extra polyfills to this file. - * - * This file is divided into 2 sections: - * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers. - * 2. Application imports. Files imported after ZoneJS that should be loaded before your main - * file. - * - * The current setup is for so-called "evergreen" browsers; the last versions of browsers that - * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), - * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. - * - * Learn more in https://angular.io/docs/ts/latest/guide/browser-support.html - */ - -/*************************************************************************************************** - * BROWSER POLYFILLS - */ - -/** IE9, IE10 and IE11 requires all of the following polyfills. **/ -// import 'core-js/es6/symbol'; -import 'core-js/es6/object'; -// import 'core-js/es6/function'; -// import 'core-js/es6/parse-int'; -// import 'core-js/es6/parse-float'; -import 'core-js/es6/number'; -// import 'core-js/es6/math'; -import 'core-js/es6/string'; -// import 'core-js/es6/date'; -import 'core-js/es6/array'; -// import 'core-js/es6/regexp'; -// import 'core-js/es6/map'; -// import 'core-js/es6/set'; - -/** IE10 and IE11 requires the following for NgClass support on SVG elements */ -// import 'classlist.js'; // Run `npm install --save classlist.js`. - -/** IE10 and IE11 requires the following to support `@angular/animation`. */ -// import 'web-animations-js'; // Run `npm install --save web-animations-js`. - - -/** Evergreen browsers require these. **/ -import 'core-js/es6/reflect'; -import 'core-js/es7/reflect'; - - -/** ALL Firefox browsers require the following to support `@angular/animation`. **/ -// import 'web-animations-js'; // Run `npm install --save web-animations-js`. - - - -/*************************************************************************************************** - * Zone JS is required by Angular itself. - */ -import 'zone.js/dist/zone'; // Included with Angular CLI. - - - -/*************************************************************************************************** - * APPLICATION IMPORTS - */ - -/** - * Date, currency, decimal and percent pipes. - * Needed for: All but Chrome, Firefox, Edge, IE11 and Safari 10 - */ -// import 'intl'; // Run `npm install --save intl`. diff --git a/ambari-logsearch/ambari-logsearch-web/src/styles.less b/ambari-logsearch/ambari-logsearch-web/src/styles.less deleted file mode 100644 index e3ecbb77ad2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/styles.less +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@import './app/modules/shared/main'; -body { - background-color: @main-background-color; -} -.initial-color { - color: initial; -} - -/** Override Bootstrap rules **/ -.btn-link { - &:hover, &:focus { - text-decoration: none; - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/test.ts b/ambari-logsearch/ambari-logsearch-web/src/test.ts deleted file mode 100644 index 0fc13fb7636..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/test.ts +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file is required by karma.conf.js and loads recursively all the .spec and framework files - -import 'zone.js/dist/long-stack-trace-zone'; -import 'zone.js/dist/proxy.js'; -import 'zone.js/dist/sync-test'; -import 'zone.js/dist/jasmine-patch'; -import 'zone.js/dist/async-test'; -import 'zone.js/dist/fake-async-test'; -import {getTestBed} from '@angular/core/testing'; -import {BrowserDynamicTestingModule, platformBrowserDynamicTesting} from '@angular/platform-browser-dynamic/testing'; - -// Unfortunately there's no typing for the `__karma__` variable. Just declare it as any. -declare var __karma__: any; -declare var require: any; - -// Prevent Karma from running prematurely. -__karma__.loaded = function () {}; - -// First, initialize the Angular testing environment. -getTestBed().initTestEnvironment( - BrowserDynamicTestingModule, - platformBrowserDynamicTesting() -); -// Then we find all the tests. -const context = require.context('./', true, /\.spec\.ts$/); -// And load the modules. -context.keys().map(context); -// Finally, start Karma to run the tests. -__karma__.start(); diff --git a/ambari-logsearch/ambari-logsearch-web/src/tsconfig.app.json b/ambari-logsearch/ambari-logsearch-web/src/tsconfig.app.json deleted file mode 100644 index 7166e040435..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/tsconfig.app.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "../out-tsc/app", - "module": "es2015", - "baseUrl": ".", - "types": [] - }, - "exclude": [ - "test.ts", - "**/*.spec.ts" - ] -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/tsconfig.spec.json b/ambari-logsearch/ambari-logsearch-web/src/tsconfig.spec.json deleted file mode 100644 index 510e3f1fdae..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/tsconfig.spec.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "../out-tsc/spec", - "module": "commonjs", - "target": "es5", - "baseUrl": "", - "types": [ - "jasmine", - "node" - ] - }, - "files": [ - "test.ts" - ], - "include": [ - "**/*.spec.ts", - "**/*.d.ts" - ] -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/typings.d.ts b/ambari-logsearch/ambari-logsearch-web/src/typings.d.ts deleted file mode 100644 index e4a302bc5bf..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/typings.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* SystemJS module definition */ -declare var module: NodeModule; -interface NodeModule { - id: string; -} diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-datetimepicker.min.css b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-datetimepicker.min.css deleted file mode 100644 index 5950ad272e3..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-datetimepicker.min.css +++ /dev/null @@ -1,5 +0,0 @@ -/*! - * Datetimepicker for Bootstrap 3 - * version : 4.17.47 - * https://github.com/Eonasdan/bootstrap-datetimepicker/ - */.bootstrap-datetimepicker-widget{list-style:none}.bootstrap-datetimepicker-widget.dropdown-menu{display:block;margin:2px 0;padding:4px;width:19em}@media (min-width:768px){.bootstrap-datetimepicker-widget.dropdown-menu.timepicker-sbs{width:38em}}@media (min-width:992px){.bootstrap-datetimepicker-widget.dropdown-menu.timepicker-sbs{width:38em}}@media (min-width:1200px){.bootstrap-datetimepicker-widget.dropdown-menu.timepicker-sbs{width:38em}}.bootstrap-datetimepicker-widget.dropdown-menu:before,.bootstrap-datetimepicker-widget.dropdown-menu:after{content:'';display:inline-block;position:absolute}.bootstrap-datetimepicker-widget.dropdown-menu.bottom:before{border-left:7px solid transparent;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-bottom-color:rgba(0,0,0,0.2);top:-7px;left:7px}.bootstrap-datetimepicker-widget.dropdown-menu.bottom:after{border-left:6px solid transparent;border-right:6px solid transparent;border-bottom:6px solid white;top:-6px;left:8px}.bootstrap-datetimepicker-widget.dropdown-menu.top:before{border-left:7px solid transparent;border-right:7px solid transparent;border-top:7px solid #ccc;border-top-color:rgba(0,0,0,0.2);bottom:-7px;left:6px}.bootstrap-datetimepicker-widget.dropdown-menu.top:after{border-left:6px solid transparent;border-right:6px solid transparent;border-top:6px solid white;bottom:-6px;left:7px}.bootstrap-datetimepicker-widget.dropdown-menu.pull-right:before{left:auto;right:6px}.bootstrap-datetimepicker-widget.dropdown-menu.pull-right:after{left:auto;right:7px}.bootstrap-datetimepicker-widget .list-unstyled{margin:0}.bootstrap-datetimepicker-widget a[data-action]{padding:6px 0}.bootstrap-datetimepicker-widget a[data-action]:active{box-shadow:none}.bootstrap-datetimepicker-widget .timepicker-hour,.bootstrap-datetimepicker-widget .timepicker-minute,.bootstrap-datetimepicker-widget .timepicker-second{width:54px;font-weight:bold;font-size:1.2em;margin:0}.bootstrap-datetimepicker-widget button[data-action]{padding:6px}.bootstrap-datetimepicker-widget .btn[data-action="incrementHours"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Increment Hours"}.bootstrap-datetimepicker-widget .btn[data-action="incrementMinutes"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Increment Minutes"}.bootstrap-datetimepicker-widget .btn[data-action="decrementHours"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Decrement Hours"}.bootstrap-datetimepicker-widget .btn[data-action="decrementMinutes"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Decrement Minutes"}.bootstrap-datetimepicker-widget .btn[data-action="showHours"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Show Hours"}.bootstrap-datetimepicker-widget .btn[data-action="showMinutes"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Show Minutes"}.bootstrap-datetimepicker-widget .btn[data-action="togglePeriod"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Toggle AM/PM"}.bootstrap-datetimepicker-widget .btn[data-action="clear"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Clear the picker"}.bootstrap-datetimepicker-widget .btn[data-action="today"]::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Set the date to today"}.bootstrap-datetimepicker-widget .picker-switch{text-align:center}.bootstrap-datetimepicker-widget .picker-switch::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Toggle Date and Time Screens"}.bootstrap-datetimepicker-widget .picker-switch td{padding:0;margin:0;height:auto;width:auto;line-height:inherit}.bootstrap-datetimepicker-widget .picker-switch td span{line-height:2.5;height:2.5em;width:100%}.bootstrap-datetimepicker-widget table{width:100%;margin:0}.bootstrap-datetimepicker-widget table td,.bootstrap-datetimepicker-widget table th{text-align:center;border-radius:4px}.bootstrap-datetimepicker-widget table th{height:20px;line-height:20px;width:20px}.bootstrap-datetimepicker-widget table th.picker-switch{width:145px}.bootstrap-datetimepicker-widget table th.disabled,.bootstrap-datetimepicker-widget table th.disabled:hover{background:none;color:#777;cursor:not-allowed}.bootstrap-datetimepicker-widget table th.prev::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Previous Month"}.bootstrap-datetimepicker-widget table th.next::after{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0;content:"Next Month"}.bootstrap-datetimepicker-widget table thead tr:first-child th{cursor:pointer}.bootstrap-datetimepicker-widget table thead tr:first-child th:hover{background:#eee}.bootstrap-datetimepicker-widget table td{height:54px;line-height:54px;width:54px}.bootstrap-datetimepicker-widget table td.cw{font-size:.8em;height:20px;line-height:20px;color:#777}.bootstrap-datetimepicker-widget table td.day{height:20px;line-height:20px;width:20px}.bootstrap-datetimepicker-widget table td.day:hover,.bootstrap-datetimepicker-widget table td.hour:hover,.bootstrap-datetimepicker-widget table td.minute:hover,.bootstrap-datetimepicker-widget table td.second:hover{background:#eee;cursor:pointer}.bootstrap-datetimepicker-widget table td.old,.bootstrap-datetimepicker-widget table td.new{color:#777}.bootstrap-datetimepicker-widget table td.today{position:relative}.bootstrap-datetimepicker-widget table td.today:before{content:'';display:inline-block;border:solid transparent;border-width:0 0 7px 7px;border-bottom-color:#337ab7;border-top-color:rgba(0,0,0,0.2);position:absolute;bottom:4px;right:4px}.bootstrap-datetimepicker-widget table td.active,.bootstrap-datetimepicker-widget table td.active:hover{background-color:#337ab7;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.bootstrap-datetimepicker-widget table td.active.today:before{border-bottom-color:#fff}.bootstrap-datetimepicker-widget table td.disabled,.bootstrap-datetimepicker-widget table td.disabled:hover{background:none;color:#777;cursor:not-allowed}.bootstrap-datetimepicker-widget table td span{display:inline-block;width:54px;height:54px;line-height:54px;margin:2px 1.5px;cursor:pointer;border-radius:4px}.bootstrap-datetimepicker-widget table td span:hover{background:#eee}.bootstrap-datetimepicker-widget table td span.active{background-color:#337ab7;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.bootstrap-datetimepicker-widget table td span.old{color:#777}.bootstrap-datetimepicker-widget table td span.disabled,.bootstrap-datetimepicker-widget table td span.disabled:hover{background:none;color:#777;cursor:not-allowed}.bootstrap-datetimepicker-widget.usetwentyfour td.hour{height:27px;line-height:27px}.bootstrap-datetimepicker-widget.wider{width:21em}.bootstrap-datetimepicker-widget .datepicker-decades .decade{line-height:1.8em !important}.input-group.date .input-group-addon{cursor:pointer}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0} \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-logsearch.min.css b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-logsearch.min.css deleted file mode 100644 index aabda78b20b..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/bootstrap-logsearch.min.css +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@font-face{font-family:Roboto;font-weight:400;font-style:normal;src:url(fonts/Roboto-Regular-webfont.eot);src:url(fonts/Roboto-Regular-webfont.eot?#iefix) format('embedded-opentype'),url(fonts/Roboto-Regular-webfont.woff) format('woff'),url(fonts/Roboto-Regular-webfont.ttf) format('truetype'),url(fonts/Roboto-Regular-webfont.svg#robotoregular) format('svg')}@font-face{font-family:Roboto;font-weight:700;font-style:normal;src:url(fonts/Roboto-Bold-webfont.eot);src:url(fonts/Roboto-Bold-webfont.eot?#iefix) format('embedded-opentype'),url(fonts/Roboto-Bold-webfont.woff) format('woff'),url(fonts/Roboto-Bold-webfont.ttf) format('truetype'),url(fonts/Roboto-Bold-webfont.svg#robotobold) format('svg')}.font-mixin{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333}.btn,.btn:focus{outline:0;font-family:Roboto,sans-serif;text-transform:uppercase;height:34px;font-size:14px;padding:10px 20px;line-height:14px}.btn .glyphicon,.btn:focus .glyphicon{top:-1px;float:left}.box-shadow{box-shadow:0 0 2px 0 #1391c1}.btn-disabled{opacity:.6;box-shadow:none}.btn-default-disabled{opacity:.6;box-shadow:none;color:#fff;background-color:#808793;border:none}.btn-default,.btn-default:focus{color:#666;background-color:#fff;border:1px solid #cfd3d7}.btn-default:focus:hover,.btn-default:hover{color:#fff;background-color:#808793}.btn-default:active,.btn-default:focus:active{color:#666;background-color:#fff;border:1px solid #cfd3d7;box-shadow:0 0 2px 0 #1391c1}.btn-default.disabled,.btn-default:focus.disabled,.btn-default:focus[disabled],.btn-default[disabled]{opacity:.6;box-shadow:none;color:#fff;background-color:#808793;border:none}.btn-default.disabled.active,.btn-default.disabled:active,.btn-default.disabled:hover,.btn-default:focus.disabled.active,.btn-default:focus.disabled:active,.btn-default:focus.disabled:hover,.btn-default:focus[disabled].active,.btn-default:focus[disabled]:active,.btn-default:focus[disabled]:hover,.btn-default[disabled].active,.btn-default[disabled]:active,.btn-default[disabled]:hover{opacity:.6;box-shadow:none;color:#fff;background-color:#808793;border:none}.btn-primary-disabled{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#3fae2a;border:1px solid #3fae2a}.btn-primary,.btn-primary:focus{color:#fff;background-color:#3fae2a;border:1px solid #3fae2a}.btn-primary:focus:hover,.btn-primary:hover{color:#fff;background-color:#429929;border:1px solid #429929}.btn-primary.active,.btn-primary:active,.btn-primary:focus.active,.btn-primary:focus:active{color:#fff;background-color:#3fae2a;border:1px solid #3fae2a;box-shadow:0 0 2px 0 #1391c1}.btn-primary.disabled,.btn-primary:focus.disabled,.btn-primary:focus[disabled],.btn-primary[disabled]{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#3fae2a;border:1px solid #3fae2a}.btn-primary.disabled.active,.btn-primary.disabled:active,.btn-primary.disabled:hover,.btn-primary:focus.disabled.active,.btn-primary:focus.disabled:active,.btn-primary:focus.disabled:hover,.btn-primary:focus[disabled].active,.btn-primary:focus[disabled]:active,.btn-primary:focus[disabled]:hover,.btn-primary[disabled].active,.btn-primary[disabled]:active,.btn-primary[disabled]:hover{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#3fae2a;border:1px solid #3fae2a}.btn-secondary-disabled{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#429929;border:1px solid #3fae2a}.btn-secondary,.btn-secondary:focus{color:#429929;background-color:#fff;border:1px solid #3fae2a}.btn-secondary:focus:hover,.btn-secondary:hover{color:#fff;background-color:#429929}.btn-secondary:active,.btn-secondary:focus:active{color:#429929;background-color:#fff;box-shadow:0 0 2px 0 #1391c1}.btn-secondary.disabled,.btn-secondary:focus.disabled,.btn-secondary:focus[disabled],.btn-secondary[disabled]{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#429929;border:1px solid #3fae2a}.btn-secondary.disabled.active,.btn-secondary.disabled:active,.btn-secondary.disabled:hover,.btn-secondary:focus.disabled.active,.btn-secondary:focus.disabled:active,.btn-secondary:focus.disabled:hover,.btn-secondary:focus[disabled].active,.btn-secondary:focus[disabled]:active,.btn-secondary:focus[disabled]:hover,.btn-secondary[disabled].active,.btn-secondary[disabled]:active,.btn-secondary[disabled]:hover{opacity:.6;box-shadow:none;color:#d1e8d1;background-color:#429929;border:1px solid #3fae2a}.btn-success{border:none}.btn-regular-default-state{background-color:#fff;color:#666;border:1px solid #cfd3d7}.btn-primary-default-state{background-color:#3fae2a;border:1px solid #3fae2a;color:#fff}.btn-group.open .btn.dropdown-toggle,.dropdown.open .btn.dropdown-toggle{box-shadow:inset 0 0 3px 0 #1391c1}.btn-group.open .btn.dropdown-toggle,.btn-group.open .btn.dropdown-toggle.btn-default,.dropdown.open .btn.dropdown-toggle,.dropdown.open .btn.dropdown-toggle.btn-default{background-color:#fff;color:#666;border:1px solid #cfd3d7}.btn-group.open .btn.dropdown-toggle.btn-default:hover,.btn-group.open .btn.dropdown-toggle:hover,.dropdown.open .btn.dropdown-toggle.btn-default:hover,.dropdown.open .btn.dropdown-toggle:hover{background-color:#fff;color:#666;border:1px solid #cfd3d7}.btn-group.open .btn.dropdown-toggle+.dropdown-menu>li>a:hover,.btn-group.open .btn.dropdown-toggle.btn-default+.dropdown-menu>li>a:hover,.dropdown.open .btn.dropdown-toggle+.dropdown-menu>li>a:hover,.dropdown.open .btn.dropdown-toggle.btn-default+.dropdown-menu>li>a:hover{background-color:#808793;color:#fff}.btn-group.open .btn.dropdown-toggle.btn-primary,.dropdown.open .btn.dropdown-toggle.btn-primary{background-color:#3fae2a;border:1px solid #3fae2a;color:#fff}.btn-group.open .btn.dropdown-toggle.btn-primary:hover,.dropdown.open .btn.dropdown-toggle.btn-primary:hover{background-color:#3fae2a;border:1px solid #3fae2a;color:#fff}.btn-group.open .btn.dropdown-toggle.btn-primary+.dropdown-menu>li>a:hover,.dropdown.open .btn.dropdown-toggle.btn-primary+.dropdown-menu>li>a:hover{background-color:#429929;color:#fff}.btn-group.open .dropdown-menu,.dropdown.open .dropdown-menu{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;border-radius:2px;font-size:14px;min-width:200px;background:#fff;color:#666;border:1px solid #cfd3d7}.btn-group.open .dropdown-menu>li,.dropdown.open .dropdown-menu>li{margin-bottom:1px}.btn-group.open .dropdown-menu>li>a,.dropdown.open .dropdown-menu>li>a{height:24px}.btn-group .btn.dropdown-toggle:first-child,.dropdown .btn.dropdown-toggle:first-child{min-width:80px}.btn-group .btn.dropdown-toggle.disabled,.btn-group .btn.dropdown-toggle[disabled],.dropdown .btn.dropdown-toggle.disabled,.dropdown .btn.dropdown-toggle[disabled]{opacity:.6}input.form-control{font-size:14px;border-radius:2px;color:#666;border:1px solid #cfd3d7;height:34px;padding:10px}input.form-control:focus{border-color:#1291c1;box-shadow:none}.help-block{color:#999;font-size:14px}.help-block.validation-block{color:#999;margin-top:10px}.help-block.validation-block::before{position:relative;top:2px;margin-right:5px;font-family:'Glyphicons Halflings'}.has-success input.form-control{color:#666;border:1px solid #1eb475}.has-success input.form-control:focus{border-color:#1eb475;box-shadow:none}.has-success .help-block.validation-block::before{content:'\e084';color:#1eb475}.has-error input.form-control{color:#666;border:1px solid #ef6162}.has-error input.form-control:focus{border-color:#ef6162;box-shadow:none}.has-error .help-block.validation-block::before{content:'\e083';color:#ef6162}.has-warning input.form-control{color:#666;border:1px solid #e98a40}.has-warning input.form-control:focus{border-color:#e98a40;box-shadow:none}.has-warning .help-block.validation-block::before{content:'\e101';color:#e98a40}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{color:#999;border-color:#ccc;background-color:#ddd}h2.table-title{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;margin-top:10px;font-size:20px}.table{color:#666;font-size:13px}.table tfoot,.table thead{color:#999}.table input[type=checkbox]+label{position:relative;line-height:1.3em;font-size:initial;top:4px;margin-bottom:0}.table thead>tr>th{border-bottom-color:#eee}.table tfoot>tr:first-of-type>td{border-top-width:2px;border-top-color:#eee}.table>tbody>tr>td{border-top-color:#eee}.table>tbody>tr.active{background-color:#eee}.table>tbody>tr.active>td{background-color:#eee}.table.table-hover .action{visibility:hidden;padding:0;line-height:1}.table.table-hover .action:hover{text-decoration:none}.table.table-hover>tbody>tr{border-width:0 1px 1px;border-style:solid;border-color:#eee transparent}.table.table-hover>tbody>tr>td{border-width:0}.table.table-hover>tbody>tr:hover{border-color:#a7dff2;background-color:#e7f6fc}.table.table-hover>tbody>tr:hover>td{border-top:1px solid #a7dff2;background-color:#e7f6fc}.table.table-hover>tbody>tr:hover>td .action{visibility:visible}.table.table-hover>tbody>tr:first-of-type>td{border-top:1px solid transparent}.table.table-hover>tbody>tr:first-of-type:hover>td{border-color:#a7dff2}.pagination-block .pagination-block-item{float:left;padding:0 5px}.pagination-block .pagination-block-item a,.pagination-block .pagination-block-item a:focus,.pagination-block .pagination-block-item a:visited{text-decoration:none}.pagination-block .pagination-block-item select{border:none;background-color:transparent;color:#1491c1}.nav.nav-tabs{border:none;margin-bottom:20px}.nav.nav-tabs li a{border-width:0;border-radius:0;border-bottom:3px solid transparent;color:#6b6c6c;text-transform:uppercase}.nav.nav-tabs li a:active,.nav.nav-tabs li a:focus,.nav.nav-tabs li a:hover{color:#333;border-top-width:0;border-left-width:0;border-right-width:0;background:0 0}.nav.nav-tabs li a .badge.badge-important{display:inline;vertical-align:baseline}.nav.nav-tabs li.active a{color:#333;border-bottom:3px solid #3fae2a;padding-bottom:2px}.nav-tabs-left li,.nav-tabs-right li{float:none;margin-bottom:3px}.nav-tabs-left li a,.nav-tabs-right li a{margin-right:0}.nav-tabs-left li{margin-right:-1px}.nav-tabs-left li a{border:3px solid transparent!important}.nav-tabs-left li.active a,.nav-tabs-left li.active a:active,.nav-tabs-left li.active a:focus,.nav-tabs-left li.active a:hover{border-right:3px solid #3fae2a!important}.nav-tabs-right li{margin-left:-1px}.nav-tabs-right li a{border:3px solid transparent!important}.nav-tabs-right li.active a,.nav-tabs-right li.active a:active,.nav-tabs-right li.active a:focus,.nav-tabs-right li.active a:hover{border-left:3px solid #3fae2a!important}.wizard{border:2px solid rgba(233,233,233,.5)}.wizard .wizard-header h3{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;font-size:20px;color:#333;margin:15px 20px}.wizard .wizard-body{overflow:hidden;margin:0}.wizard .wizard-body .wizard-content{background:rgba(233,233,233,.5);padding-top:15px;float:left;margin-bottom:-99999px;padding-bottom:99999px}.wizard .wizard-body .wizard-content .step-header{font-family:Roboto,sans-serif;font-weight:400;color:#666;font-size:14px;font-style:normal;line-height:1;margin-bottom:5px}.wizard .wizard-body .wizard-content .step-title{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:16px}.wizard .wizard-body .wizard-content .step-description{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:12px;line-height:1.4;color:#999}.wizard .wizard-body .wizard-content .panel.panel-default{border:none;box-shadow:none;margin-top:20px}.wizard .wizard-body .wizard-content .panel.panel-default .panel-body{padding:10px 20px}.wizard .wizard-body .wizard-nav{min-height:550px;padding-top:25px;background-color:#323544;float:left;margin-bottom:-99999px;padding-bottom:99999px}.wizard .wizard-body .wizard-nav .nav li{padding:0 15px}.wizard .wizard-body .wizard-nav .nav li a{height:48px;padding:0 5px;display:table-cell;vertical-align:middle}.wizard .wizard-body .wizard-nav .nav li .step-marker{position:absolute;top:9px;line-height:16px;text-align:center;width:23px;height:23px;border:2px solid #1eb475;border-radius:50%;font-size:12px;font-style:inherit;color:#1eb475;background-color:#323544}.wizard .wizard-body .wizard-nav .nav li .step-name{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:14px;color:#999;margin-left:30px;margin-bottom:5px}.wizard .wizard-body .wizard-nav .nav li .step-index{line-height:18px}.wizard .wizard-body .wizard-nav .nav li .step-description{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:12px;color:#999;margin-left:30px}.wizard .wizard-body .wizard-nav .nav li.completed .step-marker{background-color:#1eb475;color:#fff;font-size:10px;padding-left:2px}.wizard .wizard-body .wizard-nav .nav li.completed .step-marker .step-index{display:none}.wizard .wizard-body .wizard-nav .nav li.completed .step-marker:after{font-family:"Glyphicons Halflings";content:"\e013";position:relative;top:1px;left:-1px}.wizard .wizard-body .wizard-nav .nav li.completed:after{width:2px;height:100%;position:absolute;background-color:#1eb475;content:"";top:25px;left:31px}.wizard .wizard-body .wizard-nav .nav li.completed:last-child:after{content:none}.wizard .wizard-body .wizard-nav .nav li.active .step-name{font-weight:700}.wizard .wizard-body .wizard-nav .nav li.disabled .step-marker{color:#666;border-color:#666}.wizard .wizard-body .wizard-nav .nav li.disabled .step-description,.wizard .wizard-body .wizard-nav .nav li.disabled .step-name{color:#666}.wizard .wizard-body .wizard-nav .nav li.disabled.completed .step-marker{background-color:#1eb475;border:2px solid #1eb475;color:#fff}.wizard .wizard-body .wizard-nav .nav-pills>li.active>a,.wizard .wizard-body .wizard-nav .nav-pills>li.active>a:focus,.wizard .wizard-body .wizard-nav .nav-pills>li.active>a:hover,.wizard .wizard-body .wizard-nav .nav>li>a:focus,.wizard .wizard-body .wizard-nav .nav>li>a:hover{background-color:inherit}.wizard .wizard-body .wizard-footer{background:#fff;padding:15px 20px}.wizard .wizard-body .wizard-footer button{margin:0 10px}.checkbox-disabled-style{background-color:#b2b8c1;border-color:#b2b8c1}input[type=checkbox]:checked,input[type=checkbox]:not(:checked),input[type=radio]:checked,input[type=radio]:not(:checked){display:none}input[type=checkbox]:checked+label,input[type=checkbox]:not(:checked)+label,input[type=radio]:checked+label,input[type=radio]:not(:checked)+label{position:relative;padding-left:20px}input[type=checkbox]:checked+label:hover:before,input[type=checkbox]:not(:checked)+label:hover:before,input[type=radio]:checked+label:hover:before,input[type=radio]:not(:checked)+label:hover:before{border-color:#1491c1;background-color:#1491c1}input[type=checkbox]:checked+label:before,input[type=radio]:checked+label:before{background-color:#1491c1;border-color:#1491c1}input[type=checkbox].disabled+label:before,input[type=checkbox].disabled+label:hover:before,input[type=checkbox][disabled]+label:before,input[type=checkbox][disabled]+label:hover:before,input[type=radio].disabled+label:before,input[type=radio].disabled+label:hover:before,input[type=radio][disabled]+label:before,input[type=radio][disabled]+label:hover:before{background-color:#b2b8c1;border-color:#b2b8c1}input[type=checkbox]+label:before{content:'';position:absolute;left:0;top:4px;width:10px;height:10px;-moz-box-sizing:border-box;box-sizing:border-box;border-radius:2px;border-width:1px;border-style:solid;border-color:#ddd}input[type=checkbox]:checked+label:after{content:'\2714';color:#fff;position:absolute;top:0;left:2px;font-size:9px}input.radio+label:before,input[type=radio]+label:before{content:'';position:absolute;left:0;top:3px;width:12px;height:12px;-moz-box-sizing:border-box;box-sizing:border-box;border-radius:12px;border-width:1px;border-style:solid;border-color:#ddd}input.radio:checked+label:after,input[type=radio]:checked+label:after{content:'';background-color:#fff;position:absolute;top:6px;left:3px;width:6px;height:6px;border-radius:6px}.navigation-bar-container{height:auto;width:230px;background-color:#323544;padding:0;-ms-overflow-style:none;transition:width .5s ease-out;-webkit-font-smoothing:antialiased}.navigation-bar-container ul.nav.side-nav-header{width:230px;transition:width .5s ease-out}.navigation-bar-container ul.nav.side-nav-header li.navigation-header{background:#313d54;padding:15px 5px 15px 25px;height:55px}.navigation-bar-container ul.nav.side-nav-header li.navigation-header>a.ambari-logo{padding:0}.navigation-bar-container ul.nav.side-nav-header li.navigation-header>a.ambari-logo>img{height:25px;float:left;margin-left:-3px}.navigation-bar-container ul.nav.side-nav-header li.navigation-header .btn-group{cursor:pointer;margin-top:3px}.navigation-bar-container ul.nav.side-nav-header li.navigation-header .btn-group:hover span.ambari-header{color:#fff}.navigation-bar-container ul.nav.side-nav-header li.navigation-header .btn-group span.ambari-header{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:20px;width:55px;display:inline;color:#b8bec4;padding:0 8px 0 10px}.navigation-bar-container ul.nav.side-nav-header li.navigation-header .btn-group span.toggle-icon{margin-bottom:5px;font-size:13px;display:inline-block;vertical-align:middle;color:#43ad49}.navigation-bar-container ul.nav.side-nav-header li.navigation-header .btn-group.open .dropdown-toggle{box-shadow:none}.navigation-bar-container ul.nav.side-nav-header li.navigation-header ul.dropdown-menu{top:30px}.navigation-bar-container ul.nav.side-nav-header li.navigation-header ul.dropdown-menu li>a{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:14px;color:#666;line-height:1.42;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.navigation-bar-container ul.nav.side-nav-header li.navigation-header ul.dropdown-menu li>a:hover{background:#f5f5f5}.navigation-bar-container ul.nav.side-nav-footer,.navigation-bar-container ul.nav.side-nav-menu{background-color:#323544;width:230px;transition:width .5s ease-out}.navigation-bar-container ul.nav.side-nav-footer li,.navigation-bar-container ul.nav.side-nav-menu li{padding:0;margin:0}.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li>a,.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer>a,.navigation-bar-container ul.nav.side-nav-footer li.submenu-li>a,.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li>a,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer>a,.navigation-bar-container ul.nav.side-nav-menu li.submenu-li>a{display:table-cell;vertical-align:middle;width:230px;border-radius:0;-moz-border-radius:0;-webkit-border-radius:0;white-space:nowrap}.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li.submenu-li>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li.submenu-li>a .navigation-menu-item{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:14px;color:#b8bec4;padding-left:8px}.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li.submenu-li>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.submenu-li>a .navigation-icon{line-height:18px;font-size:16px;color:#b8bec4}.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-footer li.submenu-li>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li.submenu-li>a .toggle-icon{line-height:14px;font-size:14px;color:#b8bec4;padding:3px 5px 3px 10px}.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li>a,.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li>a{padding:10px 5px 10px 20px}.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer>a,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer>a{padding:14px 5px 14px 20px}.navigation-bar-container ul.nav.side-nav-footer li.submenu-li>a,.navigation-bar-container ul.nav.side-nav-menu li.submenu-li>a{padding:10px 5px 10px 25px}.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer{background:#313d54;height:48px}.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer a .navigation-icon{color:#3fae2a;font-size:19px;position:relative;padding:0 15px;left:calc(30%)}.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer a .navigation-icon:hover,.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer a .navigation-icon:hover{color:#fff}.navigation-bar-container ul.nav.side-nav-footer li>ul>li,.navigation-bar-container ul.nav.side-nav-menu li>ul>li{background-color:#323544}.navigation-bar-container ul.nav.side-nav-footer li>ul>li a,.navigation-bar-container ul.nav.side-nav-menu li>ul>li a{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:14px;color:#999}.navigation-bar-container ul.nav.side-nav-footer li>ul>li a .submenu-icon,.navigation-bar-container ul.nav.side-nav-menu li>ul>li a .submenu-icon{line-height:14px;font-size:14px}.navigation-bar-container ul.nav.side-nav-footer li>a:hover,.navigation-bar-container ul.nav.side-nav-footer li>ul>li>a:hover,.navigation-bar-container ul.nav.side-nav-menu li>a:hover,.navigation-bar-container ul.nav.side-nav-menu li>ul>li>a:hover{background:#404351;cursor:pointer;color:#fff}.navigation-bar-container ul.nav.side-nav-footer li>a:hover .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li>a:hover .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li>a:hover .submenu-item,.navigation-bar-container ul.nav.side-nav-footer li>a:hover .toggle-icon,.navigation-bar-container ul.nav.side-nav-footer li>ul>li>a:hover .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li>ul>li>a:hover .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li>ul>li>a:hover .submenu-item,.navigation-bar-container ul.nav.side-nav-footer li>ul>li>a:hover .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li>a:hover .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li>a:hover .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li>a:hover .submenu-item,.navigation-bar-container ul.nav.side-nav-menu li>a:hover .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li>ul>li>a:hover .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li>ul>li>a:hover .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li>ul>li>a:hover .submenu-item,.navigation-bar-container ul.nav.side-nav-menu li>ul>li>a:hover .toggle-icon{color:#fff}.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu),.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu){background:#404351;cursor:pointer}.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a{color:#fff}.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a .submenu-item,.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a .submenu-item,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a .submenu-item,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a .toggle-icon,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a .navigation-icon,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a .navigation-menu-item,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a .submenu-item,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a .toggle-icon{color:#fff}.navigation-bar-container ul.nav.side-nav-footer li.active.collapsed>a:after,.navigation-bar-container ul.nav.side-nav-footer li.active:not(.has-sub-menu)>a:after,.navigation-bar-container ul.nav.side-nav-menu li.active.collapsed>a:after,.navigation-bar-container ul.nav.side-nav-menu li.active:not(.has-sub-menu)>a:after{left:0;top:50%;border:solid transparent;border-width:10px 7px;content:" ";height:0;width:0;position:absolute;pointer-events:none;border-color:transparent;border-left-color:#3fae2a;margin-top:-12px}.navigation-bar-container ul.nav.side-nav-footer .more-actions,.navigation-bar-container ul.nav.side-nav-menu .more-actions{display:block;position:absolute;top:14px;right:33px;line-height:25px;width:20px;text-align:center;font-size:14px;cursor:pointer;vertical-align:middle;color:#fff}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a{color:#333}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a i,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a i{color:#333}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a:hover,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a:hover{background:#f5f5f5}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a.disabled,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a.disabled{color:#666}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a.disabled i,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a.disabled i{color:#666}.navigation-bar-container ul.nav.side-nav-footer .more-actions .dropdown-menu>li>a.disabled:hover,.navigation-bar-container ul.nav.side-nav-menu .more-actions .dropdown-menu>li>a.disabled:hover{background:#f5f5f5}.navigation-bar-container ul.nav.side-nav-footer .menu-item-name,.navigation-bar-container ul.nav.side-nav-menu .menu-item-name{display:inline-block;vertical-align:bottom;max-width:100px;overflow:hidden;text-overflow:ellipsis;-o-text-overflow:ellipsis;-ms-text-overflow:ellipsis;white-space:nowrap}.navigation-bar-container .nav-pills>li.active>a,.navigation-bar-container .nav-pills>li.active>a:focus,.navigation-bar-container .nav-pills>li.active>a:hover,.navigation-bar-container .nav>li>a:focus,.navigation-bar-container .nav>li>a:hover{background-color:inherit}.navigation-bar-container.collapsed{width:50px}.navigation-bar-container.collapsed ul.nav.side-nav-header{width:50px}.navigation-bar-container.collapsed ul.nav.side-nav-header li.navigation-header{padding:15px 0 15px 15px}.navigation-bar-container.collapsed ul.nav.side-nav-header li.navigation-header .dropdown-menu,.navigation-bar-container.collapsed ul.nav.side-nav-header li.navigation-header span.ambari-header,.navigation-bar-container.collapsed ul.nav.side-nav-header li.navigation-header span.toggle-icon{display:none}.navigation-bar-container.collapsed ul.nav.side-nav-footer,.navigation-bar-container.collapsed ul.nav.side-nav-menu{width:50px}.navigation-bar-container.collapsed ul.nav.side-nav-footer li a,.navigation-bar-container.collapsed ul.nav.side-nav-menu li a{padding:15px 0 15px 15px;width:50px}.navigation-bar-container.collapsed ul.nav.side-nav-footer li a .navigation-menu-item,.navigation-bar-container.collapsed ul.nav.side-nav-footer li a .toggle-icon,.navigation-bar-container.collapsed ul.nav.side-nav-menu li a .navigation-menu-item,.navigation-bar-container.collapsed ul.nav.side-nav-menu li a .toggle-icon{display:none}.navigation-bar-container.collapsed ul.nav.side-nav-footer li a .navigation-icon,.navigation-bar-container.collapsed ul.nav.side-nav-menu li a .navigation-icon{font-size:19px}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.navigation-footer a .navigation-icon,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.navigation-footer a .navigation-icon{padding:0 5px;left:0}.navigation-bar-container.collapsed ul.nav.side-nav-footer li ul.sub-menu,.navigation-bar-container.collapsed ul.nav.side-nav-menu li ul.sub-menu{display:none;width:230px;position:absolute;z-index:100;top:0;left:50px}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.submenu-li>a,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.submenu-li>a{padding:10px 5px 10px 25px;width:230px}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active{background:#404351;cursor:pointer}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a{color:#fff}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a .navigation-icon,.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a .navigation-menu-item,.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a .submenu-item,.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a .toggle-icon,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a .navigation-icon,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a .navigation-menu-item,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a .submenu-item,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a .toggle-icon{color:#fff}.navigation-bar-container.collapsed ul.nav.side-nav-footer li.active>a:after,.navigation-bar-container.collapsed ul.nav.side-nav-menu li.active>a:after{left:0;top:50%;border:solid transparent;border-width:12px 6px;content:" ";height:0;width:0;position:absolute;pointer-events:none;border-color:transparent;border-left-color:#3fae2a;margin-top:-12px}.navigation-bar-container.collapsed ul.nav.side-nav-footer .more-actions,.navigation-bar-container.collapsed ul.nav.side-nav-menu .more-actions{display:none}.navigation-bar-fit-height{position:fixed;top:0;bottom:0;left:0;z-index:2079}.navigation-bar-fit-height .side-nav-header{position:absolute;top:0}.navigation-bar-fit-height .side-nav-menu{position:absolute;top:55px;bottom:50px}.navigation-bar-fit-height .side-nav-footer{position:absolute;bottom:0}.navigation-bar-fit-height .more-actions .dropdown-menu{position:fixed;top:auto;left:auto}.navigation-bar-fit-height .navigation-bar-container{height:100%}.navigation-bar-fit-height .navigation-bar-container .side-nav-menu{overflow-y:auto}.notifications-group{position:relative;top:1px}#notifications-dropdown.dropdown-menu,.notifications-dropdown{min-width:400px;max-width:400px;min-height:150px;padding:0;z-index:1000;right:-50px;left:auto;top:260%;border:none;box-shadow:0 2px 10px 2px rgba(0,0,0,.29)}#notifications-dropdown.dropdown-menu .popup-arrow-up,.notifications-dropdown .popup-arrow-up{position:absolute;right:37px;top:-40px;width:40px;height:40px;overflow:hidden}#notifications-dropdown.dropdown-menu .popup-arrow-up:after,.notifications-dropdown .popup-arrow-up:after{content:"";position:absolute;width:20px;height:20px;background:#fff;-ms-transform:rotate(45deg);transform:rotate(45deg);top:30px;left:10px;box-shadow:-1px -1px 10px -2px rgba(0,0,0,.5)}#notifications-dropdown.dropdown-menu .notifications-header,.notifications-dropdown .notifications-header{border-bottom:1px solid #eee;padding:15px 20px}#notifications-dropdown.dropdown-menu .notifications-header .notifications-title,.notifications-dropdown .notifications-header .notifications-title{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:16px}#notifications-dropdown.dropdown-menu .notifications-body,.notifications-dropdown .notifications-body{padding:0 15px;overflow:auto;max-height:500px}#notifications-dropdown.dropdown-menu .notifications-body .no-alert-text,.notifications-dropdown .notifications-body .no-alert-text{padding:15px 5px}#notifications-dropdown.dropdown-menu .notifications-body .table-controls,.notifications-dropdown .notifications-body .table-controls{padding:10px 0;margin:0;border-bottom:1px solid #eee}#notifications-dropdown.dropdown-menu .notifications-body .table-controls .state-filter,.notifications-dropdown .notifications-body .table-controls .state-filter{padding:0;font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:12px;color:#666;position:relative}#notifications-dropdown.dropdown-menu .notifications-body .table-controls .state-filter .form-control.filter-select,.notifications-dropdown .notifications-body .table-controls .state-filter .form-control.filter-select{font-size:12px;color:#666;height:25px}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table,.notifications-dropdown .notifications-body .table.alerts-table{margin-top:0}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody tr,.notifications-dropdown .notifications-body .table.alerts-table tbody tr{cursor:pointer}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody tr.no-alert-tr:hover,.notifications-dropdown .notifications-body .table.alerts-table tbody tr.no-alert-tr:hover{cursor:default;border-color:transparent;border-bottom-color:#eee}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody tr.no-alert-tr:hover>td,.notifications-dropdown .notifications-body .table.alerts-table tbody tr.no-alert-tr:hover>td{border-color:transparent;background-color:#fff}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.status,.notifications-dropdown .notifications-body .table.alerts-table tbody td.status{width:9%;padding:15px 3px}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.status .alert-state-CRITICAL,.notifications-dropdown .notifications-body .table.alerts-table tbody td.status .alert-state-CRITICAL{color:#ef6162}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.status .alert-state-WARNING,.notifications-dropdown .notifications-body .table.alerts-table tbody td.status .alert-state-WARNING{color:#e98a40}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.content,.notifications-dropdown .notifications-body .table.alerts-table tbody td.content{width:90%;padding:15px 3px 10px 3px;font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;line-height:1.3}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.content .name,.notifications-dropdown .notifications-body .table.alerts-table tbody td.content .name{font-weight:700;font-size:14px;color:#333;margin-bottom:5px}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.content .description,.notifications-dropdown .notifications-body .table.alerts-table tbody td.content .description{font-size:12px;color:#666;margin-bottom:4px;display:block;display:-webkit-box;-webkit-line-clamp:3;max-height:47px;overflow:hidden;text-overflow:ellipsis;overflow-wrap:break-word;word-wrap:break-word;-ms-word-break:break-all;word-break:break-all;word-break:break-word;-ms-hyphens:auto;-moz-hyphens:auto;-webkit-hyphens:auto;hyphens:auto}#notifications-dropdown.dropdown-menu .notifications-body .table.alerts-table tbody td.content .timestamp,.notifications-dropdown .notifications-body .table.alerts-table tbody td.content .timestamp{text-align:right;font-size:11px;color:#999}#notifications-dropdown.dropdown-menu .notifications-footer,.notifications-dropdown .notifications-footer{border-top:1px solid #eee;padding:15px}.modal-backdrop{background-color:grey}.modal .modal-content{border-radius:2px}.modal .modal-content .modal-body,.modal .modal-content .modal-footer,.modal .modal-content .modal-header{padding-left:20px;padding-right:20px}.modal .modal-content .modal-header{border-bottom:none;padding-top:20px;color:#666;font-size:20px}.modal .modal-content .modal-header h4{margin:0;color:inherit;font-size:inherit}.modal .modal-content .modal-body{color:#666;font-size:12px}.modal .modal-content .modal-footer{border-top:none;padding-bottom:20px}.modal .modal-content .modal-footer .btn~.btn{margin-left:10px}.accordion .panel-group,.wizard .wizard-body .wizard-content .accordion .panel-group{margin-bottom:0}.accordion .panel-group .panel,.wizard .wizard-body .wizard-content .accordion .panel-group .panel{border-radius:0;border:none;margin-top:0;padding:0 10px}.accordion .panel-group .panel .panel-heading,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-heading{height:50px;padding:15px 10px;border:1px solid;border-color:#ddd transparent;border-top:none;background:#fff}.accordion .panel-group .panel .panel-heading .panel-title,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-heading .panel-title{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333}.accordion .panel-group .panel .panel-heading .panel-title>a,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-heading .panel-title>a{font-size:18px;color:#333}.accordion .panel-group .panel .panel-heading .panel-title>i,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-heading .panel-title>i{font-size:20px;color:#1491c1}.accordion .panel-group .panel .panel-heading:hover,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-heading:hover{background:#f3faff;cursor:pointer}.accordion .panel-group .panel .panel-body,.wizard .wizard-body .wizard-content .accordion .panel-group .panel .panel-body{padding:15px 10px 20px 20px}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:Roboto,sans-serif}.h1,h1{font-size:24px}.h2,h2{font-size:18px}.body,body{font-family:Roboto,sans-serif;font-weight:400;font-style:normal;line-height:1;color:#333;font-size:14px}.description{font-family:Roboto,sans-serif;font-size:12px;color:#000}a,a:focus,a:visited{color:#1491c1;text-decoration:none}a:focus:hover,a:hover,a:visited:hover{text-decoration:underline}a:active,a:focus:active,a:visited:active{text-decoration:none}a.disabled,a:focus.disabled,a:focus[disabled],a:visited.disabled,a:visited[disabled],a[disabled]{cursor:not-allowed;color:#666;text-decoration:none}a.disabled:hover,a:focus.disabled:hover,a:focus[disabled]:hover,a:visited.disabled:hover,a:visited[disabled]:hover,a[disabled]:hover{text-decoration:none} diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.eot b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.eot deleted file mode 100644 index 94427c335ce..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.eot and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.svg b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.svg deleted file mode 100644 index 32edd3d563f..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.svg +++ /dev/null @@ -1,607 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.ttf b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.ttf deleted file mode 100644 index f5d90eca0e4..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.ttf and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.woff b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.woff deleted file mode 100644 index ee614ee1948..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Bold-webfont.woff and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.eot b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.eot deleted file mode 100644 index d4e185d153a..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.eot and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.svg b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.svg deleted file mode 100644 index 06824bf0908..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.svg +++ /dev/null @@ -1,7606 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.ttf b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.ttf deleted file mode 100644 index 305f0d58cdf..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.ttf and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.woff b/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.woff deleted file mode 100644 index ac7452a6ddd..00000000000 Binary files a/ambari-logsearch/ambari-logsearch-web/src/vendor/css/fonts/Roboto-Regular-webfont.woff and /dev/null differ diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/WorldMapGenerator.min.js b/ambari-logsearch/ambari-logsearch-web/src/vendor/js/WorldMapGenerator.min.js deleted file mode 100644 index 0b540917db9..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/WorldMapGenerator.min.js +++ /dev/null @@ -1,11 +0,0 @@ -/** - * @version: 1.0.1 - * @author: Keval Bhatt - * @copyright: Copyright (c) 2015 Keval Bhatt. All rights reserved. - * @license: Licensed under the MIT license. See http://www.opensource.org/licenses/mit-license.php - * @website: http://kevalbhatt.github.io/WorldMapGenerator/ - */ -"use strict";!function(e,n){if("function"==typeof define&&define.amd)define(["moment","jquery"],function(o,t){e.worldMapTime={},n(o,t,e.worldMapTime)});else{if("undefined"==typeof e.moment){var o=!1;console.log("Day light feature requires moment.js")}else if(!e.moment.tz)throw new Error("moment-timezone dependency not found");if("undefined"==e.jQuery&&"undefined"==e.Zepto&&"undefined"==e.ender&&"undefined"==e.$)throw new Error("jQuery dependnecy not found");var t=e.worldMapTime={};n(e.moment||o,e.jQuery||e.Zepto||e.ender||e.$,t)}}(this,function(e,n,o){function t(e){return this.each(function(){var o=n(this),t=n.extend({},a.DEFAULTS,o.data(),"object"==typeof e&&e);o.data("WorldMapGenerator",new a(o,t)),o.trigger("map:loaded")})}var i=function(e,o){for(var t=[],i=(a.timeZoneValue.filter(function(i){return i[e]===o?(t.push(n.extend(!0,{},i)),i):void 0}),0);i0){for(var r in n.quickLink[0])i.push(this.genrateElement("span",{"data-select":n.quickLink[0][r]},r));var p=this.genrateElement("div",{"class":"quickLink"},i);s.push(p)}var z=this.genrateElement("svg",{"class":"timezone-map",viewBox:"0 0 "+n.width+" "+n.height},o,!0);if(s.length>0){var c=this.genrateElement("div",{"class":"Cbox"},s);this.$el.append(c)}if(this.$el.append(z),n.showHoverText){var u=this.genrateElement("span",{"class":"hoverZone"});this.$el.append(u)}n.defaultCss&&this.createCss(n),this.bindEvent(n)},bindEvent:function(){var e=this;this.$el.on("mouseenter","svg polygon",function(o){var t=n(this).data();n('.timezone-map polygon[data-zonename="'+t.zonename+'"]').attr("class","active"),e.$el.find(".hoverZone").text(t.timezone+" ("+t.zonename+")")}),this.$el.on("mouseleave","svg polygon",function(o){n(".timezone-map polygon").attr("class",""),e.$el.find(".hoverZone").text("")}),this.$el.on("click","svg polygon",function(){e.setValue(n(this).attr("data-timezone")),e.$el.trigger("map:clicked")}),this.$el.on("change","select",function(){e.setValue(n(this).val()),e.$el.trigger("map:clicked")}),this.$el.on("click",".quickLink span",function(){var o=n(this).data().select;o.search("/")>0?e.setValue(o,"timezone"):e.setValue(o,"zonename"),e.$el.trigger("map:clicked")})},genrateElement:function(e,n,o,t){if(t)var i=document.createElementNS("http://www.w3.org/2000/svg",e);else var i=document.createElement(e);if(n)for(var a in n)i.setAttribute(a,n[a]);if(o)if(o instanceof Array)for(var s in o)i.appendChild(o[s]);else"string"==typeof o?i.innerHTML=o:i.appendChild(o);return i},createCss:function(e){var n=document.createElement("style");n.type="text/css",n.innerHTML='.timezone-map polygon[data-selected="true"] {fill: '+e.selectedColor+"}.timezone-map polygon { fill: "+e.mapColor+";}.timezone-map polygon.active {fill: "+e.hoverColor+";}.timezone-map polygon:hover { cursor: pointer;}.Cbox .quickLink{width: 52%;float: right;padding-bottom: 11px;overflow-x: auto; white-space: nowrap;overflow-y: hidden;}.Cbox .quickLink span:hover {color:#FFF;background-color: #496A84; cursor: pointer;}.Cbox select{width: 45%;float: left;height: 27px; padding: 0px 0px 0px 10px;}.Cbox .quickLink span.active {color: #FFF; background-color: #496A84;}.Cbox .quickLink span{ font-weight: 300; border-radius: 3px; color: #000; background-color: #FFF; border: solid 1px #CCC;margin-left: 10px;font-size: 9px;padding: 4px 6px 4px 6px;}",document.getElementsByTagName("head")[0].appendChild(n)}},n.fn.WorldMapGenerator=t,a.timeZoneValue=[{timezone:"Africa/Abidjan",country:"CI",pin:"244,118",offset:0,points:"241,118,240,119,240,117,238,116,238,115,239,114,239,113,239,113,239,111,241,110,241,111,243,111,244,112,246,111,247,113,245,116,246,118,241,118",zoneName:"GMT"},{timezone:"Africa/Accra",country:"GH",pin:"250,117",offset:0,points:"251,117,247,118,246,118,246,118,246,116,247,114,246,110,250,110,251,113,251,116,252,116,251,117",zoneName:"GMT"},{timezone:"Africa/Addis_Ababa",country:"ET",pin:"304,112",offset:3,points:"313,118,311,118,308,120,307,119,305,120,300,119,299,116,296,114,296,113,297,113,298,110,299,110,300,107,300,107,301,105,302,105,303,104,303,105,307,105,309,108,308,110,310,110,309,110,310,112,317,114,313,118",zoneName:"EAT"},{timezone:"Africa/Algiers",country:"DZ",pin:"254,74",offset:1,points:"263,83,264,86,264,88,263,89,264,91,266,91,267,92,258,98,255,99,254,97,252,97,250,95,238,87,238,85,239,85,243,83,243,82,245,82,245,81,246,81,246,80,248,80,249,80,248,79,248,77,247,76,254,74,262,74,261,74,262,77,260,78,263,81,263,83",zoneName:"CET"},{timezone:"Africa/Asmara",country:"ER",pin:"304,104",offset:3,points:"306,104,310,107,309,108,306,105,303,105,303,104,302,105,301,105,301,101,304,100,305,104,305,103,306,104",zoneName:"EAT"},{timezone:"Africa/Bamako",country:"ML",pin:"239,107",offset:0,points:"244,107,244,108,242,109,242,111,241,111,241,110,240,111,239,110,239,111,238,110,238,110,238,109,237,108,235,108,234,108,234,107,233,105,234,103,235,104,237,103,242,103,241,90,243,90,252,96,252,97,254,97,255,99,256,98,256,101,255,104,249,104,246,105,245,107,244,106,244,107",zoneName:"GMT"},{timezone:"Africa/Bangui",country:"CF",pin:"276,119",offset:1,points:"284,118,283,119,282,118,281,119,277,118,276,119,276,120,273,120,272,122,270,119,270,117,272,115,276,114,276,113,278,112,280,110,281,110,283,111,283,113,284,113,284,113,285,114,288,118,285,118,285,118,284,118",zoneName:"WAT"},{timezone:"Africa/Banjul",country:"GM",pin:"227,106",offset:0,points:"231,106,227,106,229,106,231,106",zoneName:"GMT"},{timezone:"Africa/Bissau",country:"GW",pin:"228,109",offset:0,points:"228,109,228,109,228,109",zoneName:"GMT"},{timezone:"Africa/Blantyre",country:"MW",pin:"299,147",offset:2,points:"298,144,300,146,299,149,298,147,298,145,297,145,295,144,297,142,296,140,297,140,296,138,298,139,299,141,298,142,298,144",zoneName:"CAT"},{timezone:"Africa/Brazzaville",country:"CG",pin:"271,131",offset:1,points:"266,131,267,130,266,128,267,128,268,128,270,128,270,128,270,126,269,125,270,124,269,123,268,123,268,122,272,123,273,120,276,120,275,126,273,128,272,130,270,132,270,131,269,132,268,131,267,132,266,131",zoneName:"WAT"},{timezone:"Africa/Bujumbura",country:"BI",pin:"291,130",offset:2,points:"292,131,291,131,290,129,291,129,292,128,292,128,293,130,292,131",zoneName:"CAT"},{timezone:"Asia/Oral",country:"KZ",pin:"321,54",offset:5,points:"316,55,317,56,318,56,317,55,319,54,320,53,323,53,326,54,326,56,323,58,321,57,319,58,315,58,316,55",zoneName:"ORAT"},{timezone:"Africa/Cairo",country:"EG",pin:"293,83",offset:2,points:"294,94,285,94,284,83,285,81,290,82,293,81,294,81,295,82,298,82,298,84,298,86,296,85,295,83,295,84,300,92,299,92,299,93,297,95,294,94",zoneName:"EET"},{timezone:"Africa/Casablanca",country:"MA",pin:"239,78",offset:0,points:"242,84,240,84,238,85,238,87,232,87,236,84,237,83,236,81,237,80,241,78,242,75,243,75,244,76,246,76,248,77,248,80,246,80,246,81,245,81,245,82,243,82,242,84",zoneName:"WET"},{timezone:"Africa/Ceuta",country:"ES",pin:"243,75",offset:1,points:"243,75,243,75,243,75",zoneName:"CET"},{timezone:"Africa/Conakry",country:"GN",pin:"231,112",offset:0,points:"238,114,237,115,236,113,235,114,235,113,234,111,233,111,232,112,231,111,230,110,229,109,231,109,231,107,234,108,235,108,237,108,238,109,238,110,238,110,239,111,239,112,239,113,239,113,239,114,238,114",zoneName:"GMT"},{timezone:"Africa/Dakar",country:"SN",pin:"226,105",offset:0,points:"227,107,229,106,231,106,229,106,227,106,226,105,227,102,230,102,233,105,234,108,229,107,227,108,227,108,227,108,227,107",zoneName:"GMT"},{timezone:"Africa/Dar_es_Salaam",country:"TZ",pin:"305,134",offset:3,points:"306,139,306,139,306,140,302,141,299,141,297,138,293,137,291,134,291,132,293,130,292,129,293,128,292,126,297,126,302,129,302,130,304,131,304,134,305,135,305,137,306,139",zoneName:"EAT"},{timezone:"Asia/Yekaterinburg",country:"RU",pin:"334,46",offset:6,points:"333,53,335,54,335,54,333,55,331,54,329,54,327,55,326,54,326,55,324,53,321,53,322,51,322,50,324,50,325,49,324,48,325,48,324,47,326,47,325,46,326,46,325,44,325,43,324,43,325,42,323,41,322,40,326,40,329,40,332,39,333,35,342,31,341,31,342,31,341,31,341,30,340,29,342,29,342,29,341,29,345,30,346,29,345,28,343,28,344,28,344,27,343,27,345,25,346,24,351,24,350,26,351,27,351,27,351,29,352,30,350,32,346,32,346,33,350,33,354,31,353,30,356,29,357,30,357,31,358,31,361,32,358,31,359,30,358,29,353,29,352,28,353,27,351,26,354,25,354,24,355,25,354,26,355,26,359,27,356,25,358,25,357,25,358,25,362,25,360,26,362,26,362,27,360,28,365,29,365,30,364,31,364,31,364,32,366,32,366,34,367,34,367,35,369,35,369,36,368,37,369,37,367,39,369,40,369,40,364,41,357,40,356,42,353,44,349,44,348,44,348,45,350,46,348,47,348,48,346,48,341,50,335,50,335,51,336,51,335,51,334,52,335,52,333,53",zoneName:"YEKT"},{timezone:"Africa/Djibouti",country:"DJ",pin:"310,109",offset:3,points:"310,109,310,109,310,109",zoneName:"EAT"},{timezone:"Africa/Douala",country:"CM",pin:"263,119",offset:1,points:"270,117,270,119,272,121,272,123,270,122,264,122,263,119,263,120,262,119,262,117,264,116,265,115,266,116,266,115,269,110,270,109,270,107,271,107,271,110,272,111,269,112,272,114,270,117",zoneName:"WAT"},{timezone:"Africa/Freetown",country:"SL",pin:"232,113",offset:0,points:"235,115,234,115,232,114,232,112,233,111,234,111,235,112,235,114,236,114,235,115",zoneName:"GMT"},{timezone:"Africa/Gaborone",country:"BW",pin:"286,159",offset:2,points:"287,158,285,161,282,160,281,162,279,162,279,161,278,159,278,156,279,156,279,150,282,150,283,151,285,150,286,152,289,153,289,155,291,156,287,158",zoneName:"CAT"},{timezone:"Africa/Harare",country:"ZW",pin:"293,150",offset:2,points:"293,156,289,155,289,153,286,152,285,150,288,150,291,147,296,148,295,151,296,153,295,155,293,156",zoneName:"CAT"},{timezone:"Africa/El_Aaiun",country:"EH",pin:"232,87",offset:0,points:"233,89,233,92,232,93,232,95,227,95,226,96,226,95,228,92,228,92,229,91,230,89,231,88,232,87,238,87,238,89,233,89",zoneName:"WET"},{timezone:"Africa/Johannesburg",country:"ZA",pin:"289,161",offset:2,points:"283,172,278,173,276,172,276,173,275,171,275,170,275,169,273,165,274,164,274,165,275,165,278,164,278,159,279,161,279,162,280,162,282,160,285,161,288,158,290,156,293,156,294,159,294,161,294,161,293,162,294,163,294,163,294,162,296,162,295,165,289,171,286,172,283,172",zoneName:"SAST"},{timezone:"Africa/Juba",country:"SS",pin:"294,118",offset:3,points:"299,117,299,118,300,118,300,119,298,119,297,120,293,120,291,119,289,119,289,119,287,116,284,113,285,111,286,111,287,112,290,112,292,111,293,111,295,110,295,108,296,108,296,110,297,111,297,113,296,113,296,114,297,115,299,117",zoneName:"EAT"},{timezone:"Africa/Kampala",country:"UG",pin:"295,125",offset:3,points:"293,126,291,127,292,124,293,122,293,122,293,120,297,120,297,119,299,122,297,125,297,126,293,126",zoneName:"EAT"},{timezone:"Africa/Khartoum",country:"SD",pin:"295,103",offset:3,points:"300,107,299,110,298,110,297,112,296,110,296,108,295,108,295,108,295,110,293,111,292,111,290,112,287,112,286,111,285,111,284,113,283,113,283,111,282,110,281,107,280,107,282,103,283,103,283,97,285,97,285,94,294,94,297,95,299,93,301,94,302,96,302,99,304,100,301,101,301,106,300,107,300,107",zoneName:"EAT"},{timezone:"Africa/Kinshasa",country:"CD",pin:"271,131",offset:1,points:"271,131,272,130,273,128,275,126,276,119,277,118,277,118,279,119,283,120,281,120,283,122,281,122,281,123,283,125,282,126,283,126,282,126,284,127,281,128,281,128,281,128,279,128,279,131,278,131,278,135,277,135,277,136,274,136,273,133,267,133,268,131,269,132,270,131,270,132,271,131",zoneName:"WAT"},{timezone:"Africa/Lagos",country:"NG",pin:"255,116",offset:1,points:"261,119,258,119,256,116,254,116,254,112,255,110,255,109,256,106,258,106,260,107,261,106,263,107,265,106,267,107,269,106,270,108,270,109,269,110,266,115,266,116,264,115,262,117,262,118,261,119",zoneName:"WAT"},{timezone:"Africa/Libreville",country:"GA",pin:"263,124",offset:1,points:"269,125,270,126,270,128,267,128,267,128,266,128,267,130,266,131,263,128,262,126,263,126,263,125,264,125,263,124,263,124,266,124,266,122,268,122,268,123,270,123,270,124,269,125",zoneName:"WAT"},{timezone:"Africa/Lome",country:"TG",pin:"252,116",offset:0,points:"252,116,251,116,251,113,250,110,251,110,251,111,252,111,253,116,252,116",zoneName:"GMT"},{timezone:"Africa/Kigali",country:"RW",pin:"292,128",offset:2,points:"292,128,291,129,290,129,291,127,292,126,293,128,292,128",zoneName:"CAT"},{timezone:"Africa/Luanda",country:"AO",pin:"268,137",offset:1,points:"281,140,281,141,283,140,283,143,281,143,281,148,283,149,279,150,276,149,269,149,268,149,266,149,267,144,269,142,269,140,268,138,269,137,267,133,273,133,274,136,277,136,277,135,280,135,280,138,281,140",zoneName:"WAT"},{timezone:"Africa/Lubumbashi",country:"CD",pin:"288,141",offset:2,points:"291,132,291,134,293,136,290,137,289,138,290,139,289,141,290,142,291,142,291,144,290,144,288,141,287,142,286,142,285,141,284,141,283,140,281,141,280,135,278,135,277,134,278,133,278,131,279,131,279,128,281,128,281,128,281,128,284,127,282,126,283,126,282,126,283,125,281,123,281,122,283,122,281,120,283,120,281,119,282,118,285,118,285,118,288,118,289,119,291,119,293,120,293,122,293,122,292,124,291,127,290,128,291,132",zoneName:"CAT"},{timezone:"Africa/Lusaka",country:"ZM",pin:"289,146",offset:2,points:"290,147,290,148,287,150,284,149,283,149,281,148,281,143,283,143,283,140,284,141,285,141,286,142,287,142,288,141,290,144,291,144,291,142,290,142,289,141,290,139,289,138,290,137,293,136,296,138,297,140,296,140,297,142,295,144,296,144,292,146,292,147,290,147",zoneName:"CAT"},{timezone:"Africa/Malabo",country:"GQ",pin:"262,120",offset:1,points:"266,123,266,124,263,123,264,122,266,122,266,123",zoneName:"WAT"},{timezone:"Africa/Maputo",country:"MZ",pin:"295,161",offset:2,points:"296,160,295,161,295,162,296,161,296,162,295,162,294,159,293,156,295,155,296,153,295,151,296,148,292,147,292,146,296,144,297,145,298,145,298,146,298,147,299,149,300,146,298,144,298,141,302,141,306,140,306,146,305,148,303,149,300,151,298,152,298,154,299,156,299,158,296,160",zoneName:"CAT"},{timezone:"Africa/Mbabane",country:"SZ",pin:"293,162",offset:2,points:"294,161,295,162,294,163,293,162,294,161,294,161",zoneName:"SAST"},{timezone:"Africa/Mogadishu",country:"SO",pin:"313,122",offset:3,points:"310,125,308,127,307,126,307,121,308,119,312,118,317,114,311,113,309,110,310,109,312,111,321,109,321,111,321,111,321,111,321,112,317,119,310,125",zoneName:"EAT"},{timezone:"Africa/Monrovia",country:"LR",pin:"235,116",offset:0,points:"239,118,240,119,238,119,234,116,236,113,237,113,237,115,238,115,238,114,239,115,238,116,240,117,239,118",zoneName:"GMT"},{timezone:"Africa/Nairobi",country:"KE",pin:"301,127",offset:3,points:"308,127,306,129,304,131,302,130,302,129,297,126,297,125,299,122,297,119,298,119,300,119,305,120,307,119,308,119,307,121,307,126,308,127",zoneName:"EAT"},{timezone:"Africa/Maseru",country:"LS",pin:"288,166",offset:2,points:"289,167,289,167,288,166,290,165,291,166,289,167",zoneName:"SAST"},{timezone:"Africa/Ndjamena",country:"TD",pin:"271,108",offset:1,points:"278,112,276,113,276,114,272,115,269,112,270,111,272,111,271,110,271,107,270,107,269,105,272,102,272,97,271,95,271,93,272,92,283,98,283,103,282,103,280,107,281,107,282,110,280,110,279,112,278,112",zoneName:"WAT"},{timezone:"Africa/Niamey",country:"NE",pin:"253,106",offset:1,points:"256,106,255,108,255,109,254,108,253,108,253,107,251,107,252,106,251,106,250,104,255,104,256,101,256,98,258,98,267,92,270,94,271,93,271,95,272,97,272,102,269,105,269,106,267,107,265,106,263,107,261,106,260,107,258,106,256,106",zoneName:"WAT"},{timezone:"Africa/Nouakchott",country:"MR",pin:"228,100",offset:0,points:"234,103,233,105,230,102,227,102,227,103,228,100,227,98,228,97,227,95,226,96,232,95,232,93,233,92,233,89,238,89,238,87,243,90,241,90,242,103,237,103,235,104,234,103",zoneName:"GMT"},{timezone:"Africa/Ouagadougou",country:"BF",pin:"248,108",offset:0,points:"249,110,246,110,246,112,246,111,243,112,242,111,242,109,244,108,244,106,245,107,247,105,250,104,250,104,251,106,252,106,251,107,253,107,253,109,252,110,249,110",zoneName:"GMT"},{timezone:"Africa/Porto-Novo",country:"BJ",pin:"254,116",offset:1,points:"254,114,254,116,252,116,252,112,251,111,252,109,253,109,254,108,255,109,255,110,254,112,254,114",zoneName:"WAT"},{timezone:"Africa/Tunis",country:"TN",pin:"264,74",offset:1,points:"266,80,264,81,264,82,263,83,263,80,261,79,260,78,262,76,261,74,263,73,264,73,264,74,265,74,265,75,266,76,264,78,266,79,266,80",zoneName:"CET"},{timezone:"Africa/Sao_Tome",country:"ST",pin:"259,125",offset:0,points:"260,123,260,123,260,123",zoneName:"GMT"},{timezone:"Africa/Tripoli",country:"LY",pin:"268,79",offset:2,points:"285,88,285,97,283,97,283,98,272,92,270,94,264,91,263,89,264,88,264,86,263,83,264,82,264,81,266,80,266,79,271,80,272,81,276,83,278,82,278,80,281,79,282,80,285,81,284,83,285,88",zoneName:"EET"},{timezone:"Africa/Windhoek",country:"NA",pin:"274,156",offset:2,points:"278,163,278,164,277,165,274,165,274,164,273,165,272,164,271,162,270,156,267,151,266,149,268,149,269,149,276,149,279,150,284,149,285,150,283,151,282,150,279,150,279,156,278,156,278,163",zoneName:"WAST"},{timezone:"America/Adak",country:"US",pin:"5,53",offset:-10,points:"7,53,6,53,8,52,7,53",zoneName:"HST"},{timezone:"America/Argentina/Salta",country:"AR",pin:"159,159",offset:-3,points:"162,180,162,182,163,182,160,182,160,183,150,183,150,180,152,179,151,176,152,175,153,177,155,177,155,175,160,175,160,174,162,174,162,180",zoneName:"ART"},{timezone:"America/Argentina/Salta",country:"AR",pin:"159,159",offset:-3,points:"159,156,161,157,161,156,163,156,163,156,163,159,162,161,158,162,157,161,158,160,155,160,155,159,157,158,158,159,158,157,159,159,161,159,161,158,160,158,159,156",zoneName:"ART"},{timezone:"America/Anchorage",country:"US",pin:"42,40",offset:-9,points:"42,42,40,43,39,43,40,42,39,42,40,41,43,41,42,40,42,40,40,40,36,43,37,43,36,44,30,47,30,47,25,48,25,48,27,48,27,47,31,45,31,44,32,43,30,44,30,43,29,44,27,43,25,44,25,37,27,36,26,36,27,35,25,35,25,33,26,33,25,33,25,32,26,33,25,32,25,27,33,26,34,26,33,27,35,26,39,27,39,27,38,27,39,27,51,28,54,28,54,42,54,41,50,42,46,41,47,40,44,40,45,40,43,41,44,41,44,41,44,41,42,42",zoneName:"AKST"},{timezone:"America/Anguilla",country:"AI",pin:"162,100",offset:-4,points:"162,100,162,100,162,100",zoneName:"AST"},{timezone:"America/Antigua",country:"AG",pin:"164,101",offset:-4,points:"164,101,164,101,164,101",zoneName:"AST"},{timezone:"America/Araguaina",country:"BR",pin:"183,135",offset:-3,points:"185,136,185,136,185,138,186,139,185,141,186,141,186,143,184,143,182,143,182,142,181,143,180,143,180,142,180,143,180,140,182,137,182,135,183,134,183,133,183,132,184,133,184,135,185,136",zoneName:"BRT"},{timezone:"America/Argentina/Buenos_Aires",country:"AR",pin:"169,173",offset:-3,points:"167,171,169,172,169,173,171,174,170,175,171,176,169,178,165,179,163,179,164,180,163,180,163,181,164,181,163,182,162,182,162,173,164,173,165,172,167,171",zoneName:"ART"},{timezone:"America/Argentina/Catamarca",country:"AR",pin:"159,165",offset:-3,points:"159,188,157,188,156,189,151,189,151,188,150,187,151,187,150,187,151,186,150,184,160,183,161,184,161,183,162,184,160,184,161,185,159,186,159,188",zoneName:"ART"},{timezone:"America/Argentina/Catamarca",country:"AR",pin:"159,165",offset:-3,points:"160,167,158,164,154,164,155,162,155,160,158,160,157,161,159,162,158,163,159,164,159,164,160,167",zoneName:"ART"},{timezone:"America/Argentina/Cordoba",country:"AR",pin:"161,169",offset:-3,points:"163,173,162,173,162,174,160,174,160,170,159,169,159,168,160,166,159,164,161,161,162,161,163,159,163,156,165,158,170,160,169,163,173,163,174,162,174,161,175,161,175,163,173,164,170,167,169,172,166,171,164,173,163,173",zoneName:"ART"},{timezone:"America/Argentina/Jujuy",country:"AR",pin:"159,159",offset:-3,points:"157,157,158,155,159,156,159,157,160,158,161,158,161,159,160,159,159,159,158,157,158,159,157,158,157,157",zoneName:"ART"},{timezone:"America/Argentina/La_Rioja",country:"AR",pin:"157,166",offset:-3,points:"156,167,154,166,154,165,153,164,154,164,158,164,160,167,159,169,157,169,156,167",zoneName:"ART"},{timezone:"America/Argentina/Mendoza",country:"AR",pin:"154,171",offset:-3,points:"152,170,156,170,157,171,157,175,155,175,155,177,153,177,152,175,152,174,153,171,152,170",zoneName:"ART"},{timezone:"America/Argentina/Rio_Gallegos",country:"AR",pin:"154,197",offset:-3,points:"151,189,156,189,157,190,159,191,158,192,154,195,154,197,155,198,150,197,149,197,150,195,148,196,148,195,148,193,150,192,149,191,150,191,151,189",zoneName:"ART"},{timezone:"America/Argentina/San_Juan",country:"AR",pin:"155,169",offset:-3,points:"153,167,154,164,154,165,154,166,156,167,157,169,156,169,156,170,154,169,152,170,152,168,153,167",zoneName:"ART"},{timezone:"America/Argentina/San_Luis",country:"AR",pin:"158,171",offset:-3,points:"159,169,160,170,160,175,157,175,156,169,159,169",zoneName:"ART"},{timezone:"America/Argentina/Tucuman",country:"AR",pin:"159,162",offset:-3,points:"158,161,160,161,160,164,159,164,158,163,159,162,158,161",zoneName:"ART"},{timezone:"America/Aruba",country:"AW",pin:"153,108",offset:-4,points:"153,108,153,108,153,108",zoneName:"AST"},{timezone:"America/Argentina/Ushuaia",country:"AR",pin:"155,201",offset:-3,points:"155,201,155,198,155,199,155,199,156,200,160,201,158,201,155,201",zoneName:"ART"},{timezone:"America/Asuncion",country:"PY",pin:"170,160",offset:-3,points:"174,161,173,163,169,163,170,160,165,158,163,156,164,152,167,152,169,152,169,156,172,156,173,158,175,158,174,161",zoneName:"PYST"},{timezone:"America/Bahia_Banderas",country:"MX",pin:"104,96",offset:-6,points:"104,96,103,96,104,96",zoneName:"CST"},{timezone:"America/Atikokan",country:"CA",pin:"123,57",offset:-5,points:"125,58,122,58,123,57,124,57,124,58,125,58",zoneName:"EST"},{timezone:"America/Bahia",country:"BR",pin:"197,143",offset:-2,points:"187,146,186,146,186,141,185,141,186,140,187,139,188,140,189,140,189,138,191,138,193,137,194,138,195,137,197,137,198,139,197,140,198,141,196,143,196,147,195,150,194,149,195,147,194,147,193,147,192,146,189,145,189,145,187,146",zoneName:"BRT"},{timezone:"America/Barbados",country:"BB",pin:"167,107",offset:-4,points:"167,106,167,107,167,106",zoneName:"AST"},{timezone:"America/Belem",country:"BR",pin:"183,127",offset:-3,points:"179,126,180,125,179,126,180,125,183,125,182,127,180,128,182,127,181,128,183,127,183,126,186,127,185,130,182,132,183,133,182,135,182,137,180,139,177,138,178,138,177,136,178,134,177,132,177,130,178,129,177,127,178,127,177,126,176,123,174,122,174,122,177,122,178,119,180,122,181,123,179,126",zoneName:"BRT"},{timezone:"America/Belize",country:"BZ",pin:"128,101",offset:-6,points:"128,101,128,101,128,101",zoneName:"CST"},{timezone:"America/Blanc-Sablon",country:"CA",pin:"171,54",offset:-4,points:"169,54,167,55,169,54",zoneName:"AST"},{timezone:"America/Boa_Vista",country:"BR",pin:"166,121",offset:-4,points:"167,118,166,119,167,120,167,121,167,122,168,123,168,125,167,125,166,126,165,126,164,127,163,126,163,122,161,122,160,119,163,120,163,119,165,119,166,118,167,118",zoneName:"AMT"},{timezone:"America/Bogota",country:"CO",pin:"147,119",offset:-5,points:"154,126,153,131,152,130,153,129,152,128,150,128,149,128,146,125,142,124,140,123,143,120,142,119,143,117,142,115,143,114,143,113,143,114,143,113,145,112,145,110,148,109,150,108,151,108,149,110,148,112,149,113,149,115,150,115,153,115,154,117,156,117,156,119,157,120,156,121,157,122,157,123,156,122,153,123,153,124,154,124,153,124,154,126",zoneName:"COT"},{timezone:"America/Boise",country:"US",pin:"89,64",offset:-7,points:"96,66,96,67,87,67,87,66,86,66,86,64,87,63,88,62,88,61,89,62,92,62,93,63,96,63,96,66",zoneName:"MST"},{timezone:"America/Cambridge_Bay",country:"CA",pin:"104,29",offset:-7,points:"99,18,100,19,99,19,101,19,100,20,102,19,104,20,103,21,97,21,97,20,99,20,97,20,98,19,97,19,99,18",zoneName:"MST"},{timezone:"America/Cambridge_Bay",country:"CA",pin:"104,29",offset:-7,points:"108,36,97,35,95,34,92,34,81,30,81,28,89,29,92,30,89,31,90,31,97,31,100,32,99,32,101,33,100,32,101,32,101,32,101,32,100,31,103,30,99,30,100,30,103,29,105,31,106,30,109,31,113,31,113,30,115,30,116,30,116,31,117,30,116,31,118,32,117,30,120,30,120,30,120,29,119,30,120,28,116,28,117,27,116,27,116,26,119,25,118,25,120,25,121,26,121,27,123,28,122,27,121,28,122,28,121,28,125,29,123,29,124,29,124,30,125,30,126,29,126,32,108,32,108,36",zoneName:"MST"},{timezone:"America/Cambridge_Bay",country:"CA",pin:"104,29",offset:-7,points:"115,24,116,24,116,25,116,25,114,25,114,26,113,26,107,24,111,24,110,23,115,24",zoneName:"MST"},{timezone:"America/Cambridge_Bay",country:"CA",pin:"104,29",offset:-7,points:"100,23,103,24,105,26,105,26,110,27,110,28,106,28,107,28,107,29,108,28,109,29,107,29,104,29,104,29,102,28,99,30,93,30,92,29,93,29,88,29,87,28,97,28,97,24,99,24,100,26,101,25,100,23,100,23",zoneName:"MST"},{timezone:"America/Campo_Grande",country:"BR",pin:"174,153",offset:-3,points:"176,150,177,151,179,152,179,153,175,158,173,158,172,156,169,156,170,150,172,149,173,150,175,149,175,150,176,150",zoneName:"AMST"},{timezone:"America/Cancun",country:"MX",pin:"129,96",offset:-6,points:"128,95,130,96,128,98,129,98,128,100,128,99,127,100,126,100,126,98,128,96,128,95",zoneName:"EST"},{timezone:"America/Caracas",country:"VE",pin:"157,110",offset:-4.5,points:"163,111,165,112,165,113,167,113,166,115,166,115,165,116,165,117,166,118,163,119,163,120,160,119,161,122,162,122,159,124,159,124,158,124,157,123,157,122,156,121,157,120,156,119,156,116,154,117,153,115,150,115,149,112,148,112,149,110,151,109,150,109,151,110,150,111,150,112,151,112,151,112,151,110,153,109,153,108,153,109,155,109,155,110,158,110,160,111,162,110,161,110,164,110,163,110,163,111",zoneName:"VET"},{timezone:"America/Cayenne",country:"GF",pin:"177,118",offset:-3,points:"176,117,178,119,177,122,175,122,175,120,174,118,175,117,176,117",zoneName:"GFT"},{timezone:"America/Cayman",country:"KY",pin:"137,98",offset:-5,points:"139,98,139,98,139,98",zoneName:"EST"},{timezone:"America/Chicago",country:"US",pin:"128,67",offset:-6,points:"128,72,132,74,132,75,131,76,132,80,132,84,130,83,130,83,129,83,129,82,128,83,128,82,128,83,124,83,126,83,125,84,126,84,126,85,125,84,125,85,120,83,118,84,118,84,118,85,116,86,117,85,116,85,116,86,115,86,115,87,114,87,115,87,115,89,112,88,112,87,109,84,107,84,107,85,105,84,104,82,104,81,107,81,107,74,108,74,108,73,109,73,109,70,108,70,108,69,109,69,109,67,110,67,109,64,111,63,110,63,111,62,111,61,109,61,110,60,110,59,106,59,105,57,118,57,118,56,119,57,126,58,122,60,124,60,124,60,128,61,128,62,128,63,129,63,128,66,129,67,130,67,128,68,128,72",zoneName:"CST"},{timezone:"America/Chihuahua",country:"MX",pin:"103,85",offset:-7,points:"106,85,106,86,106,88,105,88,103,88,101,89,98,86,99,86,99,82,100,82,100,81,102,81,104,83,105,84,106,85",zoneName:"MST"},{timezone:"America/Coral_Harbour",country:"",pin:"250,125",offset:-5,points:"131,33,132,34,133,34,136,35,137,36,136,36,139,36,137,37,135,36,131,37,131,36,129,37,130,36,130,34,131,33",zoneName:"EST"},{timezone:"America/Costa_Rica",country:"CR",pin:"133,111",offset:-6,points:"132,112,131,111,131,109,134,110,135,112,135,112,135,114,131,111,132,112",zoneName:"CST"},{timezone:"America/Creston",country:"CA",pin:"88,57",offset:-7,points:"89,57,87,57,88,56,89,57",zoneName:"MST"},{timezone:"America/Cuiaba",country:"BR",pin:"172,147",offset:-3,points:"179,146,176,148,176,150,175,150,175,149,173,150,172,149,170,150,169,149,169,148,166,148,166,144,167,142,167,141,165,140,164,137,169,137,169,135,170,137,171,138,180,139,180,141,180,143,179,146",zoneName:"AMST"},{timezone:"America/Curacao",country:"CW",pin:"154,108",offset:-4,points:"154,108,154,108,154,108",zoneName:"AST"},{timezone:"America/Danmarkshavn",country:"GL",pin:"224,18",offset:0,points:"223,15,223,15,224,15,221,16,221,16,220,17,221,17,223,17,224,17,221,17,224,18,225,18,218,18,223,19,220,19,223,20,218,20,219,14,223,15",zoneName:"GMT"},{timezone:"America/Dawson",country:"CA",pin:"56,36",offset:-8,points:"56,36,56,36,56,36",zoneName:"PST"},{timezone:"America/Dawson_Creek",country:"CA",pin:"83,42",offset:-7,points:"83,45,83,50,79,48,78,46,83,45",zoneName:"MST"},{timezone:"America/Denver",country:"US",pin:"104,70",offset:-7,points:"93,63,92,62,91,62,91,60,89,59,89,57,105,57,106,59,108,59,108,60,110,61,109,61,111,61,111,62,110,63,111,63,109,64,110,67,109,67,109,69,108,69,108,70,109,70,109,73,108,73,108,74,107,74,107,81,104,81,104,82,102,81,99,81,99,76,96,76,95,74,95,74,92,74,92,67,96,67,96,63,93,63",zoneName:"MST"},{timezone:"America/Detroit",country:"US",pin:"135,66",offset:-5,points:"132,67,129,67,130,66,130,64,130,63,131,62,132,63,132,61,134,62,134,62,134,63,133,65,135,64,136,65,134,67,132,67",zoneName:"EST"},{timezone:"America/Detroit",country:"US",pin:"135,66",offset:-5,points:"125,60,127,59,127,60,130,61,132,60,134,61,131,61,130,62,129,61,128,62,129,62,128,61,126,61,125,60",zoneName:"EST"},{timezone:"America/Dominica",country:"DM",pin:"165,104",offset:-4,points:"165,103,165,104,165,103",zoneName:"AST"},{timezone:"America/Edmonton",country:"CA",pin:"92,51",offset:-7,points:"92,57,88,56,88,55,85,53,86,52,86,52,83,51,83,42,97,42,97,51,99,51,97,52,97,57,92,57",zoneName:"MST"},{timezone:"America/Eirunepe",country:"BR",pin:"153,134",offset:-4,points:"156,138,148,135,148,134,149,132,150,131,153,131,156,138",zoneName:"ACT"},{timezone:"America/El_Salvador",country:"SV",pin:"126,106",offset:-6,points:"126,106,125,106,126,105,128,106,128,107,126,106",zoneName:"CST"},{timezone:"America/Fortaleza",country:"BR",pin:"197,130",offset:-3,points:"201,134,202,135,201,135,199,137,198,136,198,135,197,136,194,135,194,136,192,138,189,138,189,140,187,140,186,139,185,138,185,136,184,135,184,133,182,132,185,130,186,126,187,127,187,127,188,127,188,128,188,130,188,128,189,129,190,128,191,129,194,129,198,132,201,132,201,134",zoneName:"BRT"},{timezone:"America/Glace_Bay",country:"CA",pin:"167,61",offset:-4,points:"167,61,166,61,166,61,167,61",zoneName:"AST"},{timezone:"America/Godthab",country:"GL",pin:"178,36",offset:-3,points:"188,42,187,42,188,41,187,41,188,41,187,41,187,41,186,41,187,40,186,41,187,40,183,41,184,40,183,40,183,40,182,40,183,39,182,39,182,39,181,39,182,39,181,39,182,39,180,38,181,37,180,38,180,38,180,37,180,37,179,37,180,37,179,37,180,37,179,37,179,36,178,36,180,36,178,36,180,35,179,35,180,35,181,36,179,34,180,35,178,36,178,35,179,35,177,35,178,34,177,34,180,34,177,34,177,34,176,34,178,33,176,33,180,32,175,33,176,33,175,33,177,33,176,32,177,32,175,32,180,32,175,31,181,31,177,31,178,31,175,31,176,30,178,31,176,30,180,31,179,30,179,30,176,30,179,30,180,29,179,30,179,29,180,29,180,29,180,29,180,29,179,29,180,28,179,28,180,28,174,27,180,27,179,27,179,27,178,26,179,26,177,26,178,26,177,26,179,26,176,26,178,25,176,25,175,25,175,26,173,26,172,25,174,25,173,25,174,25,173,24,174,24,173,23,174,23,172,23,173,22,172,22,172,22,170,22,172,21,168,20,169,20,169,20,161,19,158,15,160,15,160,14,162,14,156,13,162,12,163,13,162,12,165,12,165,12,166,12,165,11,166,11,172,12,167,11,169,11,174,11,176,11,175,12,176,11,181,12,179,11,181,11,179,10,180,10,188,11,189,11,188,11,188,11,188,11,191,11,186,10,195,10,187,10,196,10,196,10,199,10,196,9,202,9,214,9,201,10,215,9,216,10,214,10,220,10,204,11,216,11,213,12,213,12,221,11,220,12,217,13,224,12,223,12,228,11,234,12,229,13,221,13,228,13,221,14,222,14,226,14,225,14,219,14,218,20,223,20,223,21,219,20,220,20,219,20,222,21,219,21,224,22,219,22,219,22,219,22,220,23,220,22,222,22,221,23,219,23,217,23,219,23,214,22,216,23,212,23,213,23,212,23,212,24,215,24,212,24,216,24,213,24,215,24,215,25,216,25,216,26,210,25,212,25,210,26,215,26,211,26,212,26,209,27,213,27,210,28,212,28,215,27,219,28,213,30,208,30,206,30,205,30,206,31,204,31,202,33,198,33,198,34,197,33,198,33,197,33,197,33,197,33,197,34,195,34,194,34,195,34,194,35,193,35,194,36,192,36,194,36,194,37,192,36,193,37,192,37,192,37,191,37,192,38,190,38,191,38,190,38,192,39,190,39,191,39,190,40,191,40,189,40,191,40,190,41,190,41,189,41,190,42,189,41,188,42", -zoneName:"WGT"},{timezone:"America/Goose_Bay",country:"CA",pin:"166,51",offset:-4,points:"171,53,161,53,161,52,162,52,162,52,161,52,160,53,158,53,158,51,156,52,157,51,156,50,157,49,156,49,157,49,157,48,159,49,161,49,162,48,161,48,161,47,162,47,161,47,162,45,160,44,162,43,160,43,161,43,161,42,160,42,160,41,162,43,161,43,163,43,162,44,163,44,162,44,164,45,163,45,165,46,163,46,164,46,164,47,166,48,166,48,166,49,167,48,167,48,168,48,167,49,168,48,168,49,170,49,167,50,169,50,166,51,170,50,171,50,170,51,171,53",zoneName:"AST"},{timezone:"America/Grand_Turk",country:"TC",pin:"151,95",offset:-5,points:"151,95,151,95,151,95",zoneName:"AST"},{timezone:"America/Grenada",country:"GD",pin:"164,108",offset:-4,points:"165,108,165,108,165,108",zoneName:"AST"},{timezone:"America/Guadeloupe",country:"GP",pin:"165,102",offset:-4,points:"165,102,165,102,165,102",zoneName:"AST"},{timezone:"America/Guatemala",country:"GT",pin:"124,105",offset:-6,points:"125,106,123,105,122,104,123,103,124,103,123,101,124,101,124,100,126,100,126,103,127,103,125,106",zoneName:"CST"},{timezone:"America/Guayaquil",country:"EC",pin:"139,128",offset:-5,points:"141,130,140,132,139,131,138,131,139,128,139,129,137,128,139,124,141,123,142,124,144,124,145,125,145,125,146,126,141,130",zoneName:"ECT"},{timezone:"America/Guyana",country:"GY",pin:"169,116",offset:-4,points:"170,116,171,118,170,118,169,119,172,122,170,122,168,123,167,122,167,121,167,120,166,119,167,118,165,117,165,116,166,115,166,115,167,114,167,113,169,115,169,116,169,115,170,116",zoneName:"GYT"},{timezone:"America/Halifax",country:"CA",pin:"162,63",offset:-4,points:"161,63,162,62,160,62,161,61,163,62,164,61,165,62,161,63,159,65,158,64,158,63,160,62,161,63",zoneName:"AST"},{timezone:"America/Havana",country:"CU",pin:"136,93",offset:-5,points:"146,96,147,97,142,97,143,96,141,96,141,95,136,94,136,94,137,94,136,93,132,95,134,93,138,93,146,96",zoneName:"CST"},{timezone:"America/Hermosillo",country:"MX",pin:"96,85",offset:-7,points:"90,81,91,80,96,81,99,81,99,86,98,86,99,88,98,88,96,87,97,86,94,85,93,82,90,81",zoneName:"MST"},{timezone:"America/Indiana/Petersburg",country:"US",pin:"129,72",offset:-5,points:"129,72,129,72,129,72",zoneName:"EST"},{timezone:"America/Indiana/Tell_City",country:"US",pin:"129,72",offset:-6,points:"130,72,130,72,130,72",zoneName:"CST"},{timezone:"America/Indiana/Vevay",country:"US",pin:"132,71",offset:-5,points:"132,71,132,71,132,71",zoneName:"EST"},{timezone:"America/Indiana/Indianapolis",country:"US",pin:"130,70",offset:-5,points:"132,71,130,72,130,71,128,71,128,68,130,68,130,67,132,67,132,70,132,71",zoneName:"EST"},{timezone:"America/Indiana/Knox",country:"US",pin:"130,68",offset:-6,points:"130,67,129,68,130,67",zoneName:"CST"},{timezone:"America/Indiana/Marengo",country:"US",pin:"130,72",offset:-5,points:"130,72,130,72,130,72,130,72",zoneName:"EST"},{timezone:"America/Indiana/Vincennes",country:"US",pin:"128,71",offset:-5,points:"128,72,128,71,130,71,130,72,128,72",zoneName:"EST"},{timezone:"America/Indiana/Winamac",country:"US",pin:"130,68",offset:-5,points:"130,68,129,68,130,68",zoneName:"EST"},{timezone:"America/Inuvik",country:"CA",pin:"64,30",offset:-7,points:"65,30,64,30,65,30",zoneName:"MST"},{timezone:"America/Iqaluit",country:"CA",pin:"155,36",offset:-5,points:"139,20,138,20,140,20,138,21,140,21,132,21,132,20,139,20",zoneName:"EST"},{timezone:"America/Iqaluit",country:"CA",pin:"155,36",offset:-5,points:"139,10,145,10,144,11,145,10,142,10,156,10,155,10,156,10,156,12,154,12,156,12,152,12,156,12,156,13,152,13,153,14,149,14,152,14,151,14,147,14,146,14,148,15,142,15,147,15,142,15,146,16,143,16,145,16,145,17,141,17,142,17,141,18,136,18,142,18,141,19,137,19,138,19,135,18,136,19,132,19,132,18,134,18,134,17,136,17,132,17,132,15,136,16,134,16,137,15,132,15,134,15,132,15,132,14,138,14,139,14,134,13,144,13,140,13,143,12,140,12,140,12,139,13,132,13,132,12,136,12,132,12,132,11,133,11,132,10,140,11,135,11,138,10,137,10,141,10,138,10,139,10",zoneName:"EST"},{timezone:"America/Iqaluit",country:"CA",pin:"155,36",offset:-5,points:"132,30,133,29,132,29,132,28,136,28,134,28,137,29,136,29,137,30,135,30,137,31,134,33,133,32,132,32,132,30",zoneName:"EST"},{timezone:"America/Iqaluit",country:"CA",pin:"155,36",offset:-5,points:"144,24,140,24,138,23,142,23,144,24",zoneName:"EST"},{timezone:"America/Iqaluit",country:"CA",pin:"155,36",offset:-5,points:"133,23,137,23,139,24,138,24,138,24,138,25,139,24,142,25,140,25,142,25,141,24,142,24,146,24,146,25,144,25,147,25,145,26,146,25,146,26,148,25,147,26,148,26,147,26,149,26,148,26,149,25,151,26,149,27,150,27,149,27,152,26,150,27,151,27,151,28,153,27,152,27,155,27,152,28,155,28,153,28,156,27,155,28,153,28,156,29,154,29,155,29,154,29,155,29,155,29,156,29,154,29,156,30,156,33,155,33,156,34,156,37,154,36,156,37,156,39,151,37,150,37,151,37,149,36,148,36,148,35,148,36,148,35,147,36,147,35,146,35,146,35,147,36,142,36,141,35,142,34,148,34,147,33,150,32,148,30,148,30,147,30,148,30,145,29,144,30,145,29,143,28,143,28,143,28,143,28,142,28,142,27,140,27,139,27,140,27,141,28,136,28,138,28,135,27,137,28,132,28,132,26,133,25,132,25,132,25,133,25,132,25,132,24,133,24,132,23,133,24,132,23,133,23",zoneName:"EST"},{timezone:"America/La_Paz",country:"BO",pin:"155,148",offset:-4,points:"164,154,163,156,161,156,161,157,160,156,158,155,157,157,156,157,155,153,155,152,153,149,154,148,154,147,155,142,153,140,155,140,157,139,159,138,160,142,166,144,166,148,169,148,169,149,170,150,170,153,168,152,164,152,164,154",zoneName:"BOT"},{timezone:"America/Jamaica",country:"JM",pin:"143,100",offset:-5,points:"144,100,143,100,141,100,143,99,144,100",zoneName:"EST"},{timezone:"America/Juneau",country:"US",pin:"63,44",offset:-9,points:"59,43,62,42,66,45,65,46,65,45,64,45,65,45,64,45,65,45,64,44,64,44,63,44,62,42,62,42,62,44,61,44,61,43,61,43,61,43,60,43,61,44,60,44,58,43,59,43",zoneName:"AKST"},{timezone:"America/Kentucky/Louisville",country:"US",pin:"131,72",offset:-5,points:"130,72,130,72,131,71,130,72",zoneName:"EST"},{timezone:"America/Kentucky/Monticello",country:"US",pin:"132,74",offset:-5,points:"132,74,133,74,132,74,132,74",zoneName:"EST"},{timezone:"America/Kralendijk",country:"BQ",pin:"155,108",offset:-4,points:"162,101,162,100,162,101",zoneName:"AST"},{timezone:"America/Lima",country:"PE",pin:"143,142",offset:-5,points:"153,149,153,150,152,150,151,149,146,146,144,145,144,144,139,135,137,133,138,133,137,131,138,130,139,130,138,131,140,132,141,130,145,127,146,126,145,125,146,125,149,128,150,128,153,129,152,130,153,131,152,131,149,132,148,134,147,135,149,137,148,138,150,138,150,139,152,138,152,140,153,140,155,142,154,147,154,148,153,149",zoneName:"PET"},{timezone:"America/Managua",country:"NI",pin:"130,108",offset:-6,points:"132,110,131,110,128,107,129,107,129,106,131,106,132,104,135,104,134,110,132,110",zoneName:"CST"},{timezone:"America/Manaus",country:"BR",pin:"167,129",offset:-4,points:"167,125,168,125,169,127,172,128,169,134,169,135,169,137,164,137,163,136,162,136,161,137,160,138,158,138,157,139,156,138,153,131,154,127,153,124,154,124,153,124,153,123,156,122,157,123,158,124,159,124,159,124,161,122,162,122,163,124,163,126,164,127,165,126,166,126,167,125",zoneName:"AMT"},{timezone:"America/Los_Angeles",country:"US",pin:"86,78",offset:-8,points:"91,62,88,62,88,61,88,62,87,63,86,64,86,66,87,66,87,67,92,67,92,75,91,75,91,77,91,79,91,79,87,80,85,78,82,77,82,76,80,73,80,72,79,72,78,71,77,69,78,67,77,66,78,61,78,61,78,61,78,60,78,60,78,60,78,60,77,58,80,58,79,59,80,58,80,59,79,60,80,59,80,57,89,57,89,59,91,60,91,62",zoneName:"PST"},{timezone:"America/Lower_Princes",country:"SX",pin:"162,100",offset:-4,points:"162,100,162,100,162,100",zoneName:"AST"},{timezone:"America/Maceio",country:"BR",pin:"200,138",offset:-3,points:"199,140,198,141,197,140,198,139,197,138,198,137,199,138,201,137,199,140",zoneName:"BRT"},{timezone:"America/Marigot",country:"MF",pin:"162,100",offset:-4,points:"162,100,162,100,162,100",zoneName:"AST"},{timezone:"America/Martinique",country:"MQ",pin:"165,105",offset:-4,points:"165,104,165,104,165,104",zoneName:"AST"},{timezone:"America/Matamoros",country:"MX",pin:"115,89",offset:-6,points:"113,89,112,89,109,84,108,84,107,85,107,85,107,84,109,84,112,87,112,88,115,89,115,90,113,89",zoneName:"CST"},{timezone:"America/Mazatlan",country:"MX",pin:"102,93",offset:-7,points:"104,93,106,95,105,96,104,96,104,95,103,94,100,90,98,89,98,88,100,88,101,90,104,93",zoneName:"MST"},{timezone:"America/Mazatlan",country:"MX",pin:"102,93",offset:-7,points:"91,86,93,86,98,93,97,93,94,91,94,89,93,88,92,88,90,86,92,87,91,86",zoneName:"MST"},{timezone:"America/Menominee",country:"US",pin:"128,62",offset:-6,points:"128,62,128,61,124,60,128,61,129,62,128,62",zoneName:"CST"},{timezone:"America/Mexico_City",country:"MX",pin:"112,98",offset:-6,points:"119,103,118,102,116,103,114,103,104,98,103,97,104,96,105,96,106,95,105,94,106,92,108,91,107,90,110,91,111,93,114,94,117,99,119,100,122,99,122,100,124,100,124,101,123,101,124,102,123,103,122,105,119,102,119,103",zoneName:"CST"},{timezone:"America/Merida",country:"MX",pin:"126,96",offset:-6,points:"124,96,125,95,128,95,128,96,126,98,126,100,122,100,122,99,123,99,124,96",zoneName:"CST"},{timezone:"America/Metlakatla",country:"US",pin:"67,48",offset:-8,points:"67,48,67,48,67,48",zoneName:"PST"},{timezone:"America/Miquelon",country:"PM",pin:"172,60",offset:-3,points:"172,60,172,60,172,60",zoneName:"PMST"},{timezone:"America/Moncton",country:"CA",pin:"160,61",offset:-4,points:"157,58,160,59,159,60,160,60,160,61,161,61,158,62,156,62,156,62,156,60,154,59,155,58,157,58",zoneName:"AST"},{timezone:"America/Monterrey",country:"MX",pin:"111,89",offset:-6,points:"107,90,108,91,106,92,105,94,104,94,101,90,103,88,105,88,106,88,106,86,106,85,108,84,109,84,112,89,115,89,115,90,115,90,114,90,114,94,111,93,110,91,107,90",zoneName:"CST"},{timezone:"America/Montevideo",country:"UY",pin:"172,173",offset:-2,points:"172,173,170,173,169,172,170,167,171,167,172,168,173,168,176,170,175,173,172,173",zoneName:"UYT"},{timezone:"America/Montreal",country:"",pin:"250,125",offset:-5,points:"146,63,147,62,144,62,141,61,139,59,140,53,140,54,140,53,141,52,140,51,140,50,139,49,142,48,144,47,143,44,141,44,143,42,142,41,143,41,142,41,142,40,141,41,142,40,142,39,142,38,146,39,148,38,150,39,150,39,151,39,150,40,151,40,153,40,153,42,151,42,153,42,153,43,154,43,152,44,154,43,155,44,154,45,156,44,155,44,156,44,156,45,158,43,158,44,158,43,159,43,159,43,160,43,159,42,160,41,160,42,161,42,161,43,160,43,162,43,160,44,162,45,161,47,162,47,161,47,161,48,162,48,161,49,159,49,157,48,157,49,156,49,157,49,156,50,157,51,156,52,158,51,158,53,160,53,161,52,162,52,162,52,161,52,161,53,171,53,171,53,168,54,167,55,164,55,158,55,158,55,156,57,155,57,152,60,149,61,157,57,160,57,161,58,159,58,155,58,153,60,152,62,151,63,146,63",zoneName:"EST"},{timezone:"America/Montserrat",country:"MS",pin:"164,102",offset:-4,points:"164,102,164,102,164,102",zoneName:"AST"},{timezone:"America/Nassau",country:"BS",pin:"143,90",offset:-5,points:"143,90,143,90,143,90",zoneName:"EST"},{timezone:"America/New_York",country:"US",pin:"147,68",offset:-5,points:"132,82,131,76,132,75,133,74,130,72,132,71,132,71,132,67,136,67,140,66,140,65,144,65,144,64,146,63,152,62,154,59,156,60,156,62,157,63,155,64,154,63,154,64,152,64,151,66,152,67,153,67,153,67,151,67,151,67,151,68,148,68,146,71,145,70,146,72,145,73,145,72,144,71,145,70,144,70,144,72,143,72,144,72,143,72,144,74,143,73,144,74,145,75,144,74,145,75,143,75,145,76,143,76,144,76,143,76,144,76,144,77,138,80,137,82,139,88,138,90,137,90,136,88,136,88,136,88,135,87,136,86,135,86,135,84,133,83,131,84,132,82",zoneName:"EST"},{timezone:"America/Nipigon",country:"CA",pin:"127,57",offset:-5,points:"127,57,127,57,127,57",zoneName:"EST"},{timezone:"America/Nome",country:"US",pin:"20,35",offset:-9,points:"25,32,23,32,22,31,18,30,19,29,23,29,25,27,25,32",zoneName:"AKST"},{timezone:"America/Nome",country:"US",pin:"20,35",offset:-9,points:"25,35,24,36,19,35,18,35,19,34,16,34,23,33,22,33,25,33,25,35",zoneName:"AKST"},{timezone:"America/Nome",country:"US",pin:"20,35",offset:-9,points:"25,42,22,42,20,41,21,40,19,39,22,37,23,37,25,37,25,42",zoneName:"AKST"},{timezone:"America/Noronha",country:"BR",pin:"205,130",offset:-2,points:"205,130,205,130,205,130",zoneName:"FNT"},{timezone:"America/North_Dakota/Beulah",country:"US",pin:"109,59",offset:-6,points:"109,59,108,60,108,59,109,59",zoneName:"CST"},{timezone:"America/North_Dakota/Center",country:"US",pin:"109,60",offset:-6,points:"110,60,109,60,110,60",zoneName:"CST"},{timezone:"America/North_Dakota/New_Salem",country:"US",pin:"109,60",offset:-6,points:"110,60,110,60,110,61,108,60,110,60",zoneName:"CST"},{timezone:"America/Ojinaga",country:"MX",pin:"105,84",offset:-7,points:"102,81,106,85,105,84,104,83,102,81,99,82,100,81,100,81,102,81",zoneName:"MST"},{timezone:"America/Panama",country:"PA",pin:"140,113",offset:-5,points:"142,113,143,114,142,115,141,114,141,113,142,114,140,112,138,114,139,115,138,115,138,115,137,114,137,114,136,113,135,114,135,113,135,112,137,113,140,112,142,113",zoneName:"EST"},{timezone:"America/Pangnirtung",country:"CA",pin:"159,33",offset:-5,points:"156,10,160,10,165,11,156,12,156,10",zoneName:"EST"},{timezone:"America/Pangnirtung",country:"CA",pin:"159,33",offset:-5,points:"156,34,157,34,157,35,157,35,160,35,159,36,160,36,160,36,160,37,159,36,160,38,159,37,159,38,158,37,158,37,156,37,156,34",zoneName:"EST"},{timezone:"America/Pangnirtung",country:"CA",pin:"159,33",offset:-5,points:"156,30,157,30,156,30,158,30,157,31,158,31,158,31,158,31,160,31,159,31,160,31,161,31,160,31,161,32,160,32,162,31,161,32,163,32,163,32,164,32,165,32,163,33,164,33,162,33,163,33,163,33,163,34,162,33,162,35,159,34,161,33,159,33,159,33,158,33,156,32,157,33,156,33,157,33,156,33,156,30",zoneName:"EST"},{timezone:"America/Paramaribo",country:"SR",pin:"173,117",offset:-3,points:"175,118,175,120,174,122,172,121,172,122,171,122,169,119,170,118,171,118,171,117,175,117,175,118",zoneName:"SRT"},{timezone:"America/Phoenix",country:"US",pin:"94,79",offset:-7,points:"99,80,99,81,96,81,91,80,91,77,91,75,91,75,92,74,95,74,95,74,96,76,99,76,99,80",zoneName:"MST"},{timezone:"America/Port-au-Prince",country:"HT",pin:"150,99",offset:-5,points:"148,99,149,99,148,99",zoneName:"EST"},{timezone:"America/Port_of_Spain",country:"TT",pin:"165,110",offset:-4,points:"164,110,164,110,164,110",zoneName:"AST"},{timezone:"America/Porto_Velho",country:"BR",pin:"161,137",offset:-4,points:"167,142,166,144,164,144,160,142,159,138,157,139,158,138,160,138,161,137,162,136,163,136,165,137,165,140,167,140,167,142",zoneName:"AMT"},{timezone:"America/Puerto_Rico",country:"PR",pin:"158,99",offset:-4,points:"158,99,158,99,158,99",zoneName:"AST"},{timezone:"America/Rainy_River",country:"CA",pin:"119,57",offset:-6,points:"119,57,119,57,119,57",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"132,12,125,12,125,12,129,12,122,12,132,11,132,12",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"113,19,114,19,114,20,115,20,111,21,110,20,111,20,108,20,108,20,109,20,108,19,111,20,111,19,112,19,110,19,113,19",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"125,16,126,16,122,16,121,16,123,16,119,15,124,15,118,15,117,15,119,14,116,14,119,14,118,14,119,14,116,13,120,13,117,13,121,12,119,12,120,12,127,14,128,13,128,14,129,14,129,15,132,15,128,16,128,15,127,16,128,16,127,16,127,16,125,16",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"132,21,122,21,122,21,122,20,121,19,118,19,118,19,115,18,117,18,121,18,120,19,124,19,124,19,123,19,126,19,123,19,126,20,132,20,132,21",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"132,19,125,19,130,18,127,17,129,17,132,18,132,19",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"132,28,127,27,126,26,129,26,125,26,125,25,126,25,125,24,128,23,132,23,130,24,130,24,130,25,132,26,129,26,132,26,132,28",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"125,36,124,37,119,36,123,37,124,37,122,38,122,38,120,39,120,39,119,40,120,40,118,42,108,42,108,32,126,32,126,29,128,30,128,30,127,31,129,32,130,31,131,30,132,30,132,33,130,33,131,33,129,34,123,33,129,35,128,36,125,36",zoneName:"CST"},{timezone:"America/Rankin_Inlet",country:"CA",pin:"122,38",offset:-6,points:"118,22,120,22,125,22,122,24,119,24,120,24,120,25,117,24,117,23,119,23,118,22",zoneName:"CST"},{timezone:"America/Recife",country:"BR",pin:"202,136",offset:-3,points:"196,136,197,136,198,135,199,137,201,135,202,136,201,137,199,138,198,137,197,138,195,137,194,138,193,137,194,136,194,135,196,136",zoneName:"BRT"},{timezone:"America/Regina",country:"CA",pin:"105,55",offset:-6,points:"104,57,97,57,97,52,99,51,97,51,97,42,108,42,108,48,108,49,109,50,109,57,104,57",zoneName:"CST"},{timezone:"America/Resolute",country:"CA",pin:"118,21",offset:-6,points:"118,20,120,20,120,21,116,21,118,20",zoneName:"CST"},{timezone:"America/Rio_Branco",country:"BR",pin:"156,139",offset:-4,points:"148,135,152,136,157,139,155,140,152,140,152,138,150,139,150,138,148,138,149,137,147,135,148,135",zoneName:"ACT"},{timezone:"America/Santa_Isabel",country:"MX",pin:"90,83",offset:-8,points:"91,80,91,83,93,86,91,86,92,85,89,84,88,81,88,80,91,80",zoneName:"PST"},{timezone:"America/Santarem",country:"BR",pin:"174,128",offset:-3,points:"176,125,177,126,178,127,177,127,178,129,177,130,177,132,178,134,177,136,178,138,177,138,171,138,170,137,169,134,172,128,168,126,168,123,170,122,172,122,172,121,174,121,174,123,176,123,176,125",zoneName:"BRT"},{timezone:"America/Santiago",country:"CL",pin:"152,171",offset:-3,points:"152,199,154,198,155,198,155,201,150,201,153,201,152,200,154,201,154,201,154,201,153,200,154,199,152,199,152,199",zoneName:"CLT"},{timezone:"America/Santiago",country:"CL",pin:"152,171",offset:-3,points:"148,193,148,195,148,196,150,195,149,197,150,197,155,198,152,198,151,200,149,199,150,199,151,198,151,198,149,198,149,199,148,199,149,198,148,198,148,197,149,198,149,197,149,198,149,198,149,198,149,197,148,196,148,197,149,197,148,197,149,197,148,197,148,197,148,197,147,197,148,196,147,196,148,195,148,195,147,196,147,195,146,195,147,195,147,194,148,194,147,193,147,194,147,193,147,193,147,193,147,192,146,192,148,192,148,191,146,191,147,191,147,191,147,191,147,190,145,190,146,189,147,189,147,190,148,188,148,189,148,188,149,188,148,188,149,187,148,186,149,184,149,184,149,184,149,183,149,183,150,183,148,183,147,182,148,180,148,177,148,177,151,172,151,165,152,162,152,157,153,155,152,150,154,149,155,152,155,153,156,157,157,157,156,158,155,159,155,162,153,164,153,167,152,168,153,173,152,174,152,175,151,176,152,179,150,180,150,183,150,184,151,186,150,187,151,187,150,187,151,188,150,188,150,190,149,191,149,193,148,193",zoneName:"CLT"},{timezone:"America/Santo_Domingo",country:"DO",pin:"153,99",offset:-4,points:"151,100,150,100,150,99,150,97,153,98,154,98,153,98,155,99,155,100,151,100",zoneName:"AST"},{timezone:"America/Scoresbysund",country:"GL",pin:"219,27",offset:-1,points:"216,25,219,25,218,26,220,25,219,26,220,26,220,26,219,26,220,26,219,26,220,27,217,27,216,26,216,25",zoneName:"EGT"},{timezone:"America/Sao_Paulo",country:"BR",pin:"185,158",offset:-2,points:"181,166,179,168,178,170,180,167,179,167,179,168,176,172,176,171,176,170,175,170,173,168,172,168,171,167,170,167,173,164,175,162,175,161,174,161,175,159,176,157,178,155,179,152,177,151,176,149,179,146,180,142,181,143,182,142,182,143,184,143,186,143,186,146,189,145,195,147,194,149,195,150,195,152,193,155,193,156,192,157,188,157,183,160,182,160,183,161,182,161,183,161,182,164,181,166",zoneName:"BRST"},{timezone:"America/Sitka",country:"US",pin:"62,46",offset:-9,points:"66,45,67,46,65,46,66,45,66,45",zoneName:"AKST"},{timezone:"America/St_Barthelemy",country:"BL",pin:"163,100",offset:-4,points:"163,100,163,100,163,100",zoneName:"AST"},{timezone:"America/St_Johns",country:"CA",pin:"177,59",offset:-3.5,points:"173,53,172,54,173,54,172,54,171,56,172,55,173,56,172,56,173,56,173,57,174,56,176,57,175,58,176,57,175,58,175,59,177,58,176,59,177,59,176,60,176,60,176,59,175,60,175,59,175,58,173,60,174,59,172,59,173,59,173,58,171,59,168,59,169,58,168,58,169,57,170,57,169,57,170,56,169,56,170,55,173,53",zoneName:"NST"},{timezone:"America/Thule",country:"GL",pin:"154,19",offset:-4,points:"161,19,155,19,153,19,156,18,151,18,158,17,153,17,153,17,149,16,158,15,161,19",zoneName:"AST"},{timezone:"America/St_Kitts",country:"KN",pin:"163,101",offset:-4,points:"163,101,163,101,163,101",zoneName:"AST"},{timezone:"America/St_Lucia",country:"LC",pin:"165,106",offset:-4,points:"165,105,165,106,165,105",zoneName:"AST"},{timezone:"America/St_Thomas",country:"VI",pin:"160,100",offset:-4,points:"160,99,160,99,160,99",zoneName:"AST"},{timezone:"America/St_Vincent",country:"VC",pin:"165,107",offset:-4,points:"165,106,165,107,165,106",zoneName:"AST"},{timezone:"America/Swift_Current",country:"CA",pin:"100,55",offset:-6,points:"100,55,100,55,100,55",zoneName:"CST"},{timezone:"America/Tegucigalpa",country:"HN",pin:"129,105",offset:-6,points:"129,106,129,107,128,106,126,105,126,104,128,103,131,103,133,103,135,104,132,104,131,106,129,106",zoneName:"CST"},{timezone:"America/Thunder_Bay",country:"CA",pin:"126,58",offset:-5,points:"126,58,126,58,126,58",zoneName:"EST"},{timezone:"America/Tijuana",country:"MX",pin:"87,80",offset:-8,points:"87,80,87,80,87,80",zoneName:"PST"},{timezone:"America/Toronto",country:"CA",pin:"140,64",offset:-5,points:"146,62,147,62,143,64,142,63,143,64,143,64,140,64,139,65,140,65,135,67,137,65,137,63,137,62,138,63,139,63,138,61,137,61,136,62,133,61,133,60,132,58,131,58,130,57,127,57,128,58,126,58,126,58,124,58,124,57,125,56,125,55,124,53,125,53,125,52,126,52,127,51,126,50,125,50,125,47,127,46,128,47,132,48,136,48,136,51,138,54,140,54,140,60,141,61,144,62,146,62",zoneName:"EST"},{timezone:"America/Tortola",country:"VG",pin:"160,99",offset:-4,points:"160,99,160,99,160,99",zoneName:"AST"},{timezone:"America/Vancouver",country:"CA",pin:"79,57",offset:-8,points:"72,54,76,55,79,58,76,57,77,57,76,57,75,56,74,56,75,56,72,55,73,55,72,54",zoneName:"PST"},{timezone:"America/Vancouver",country:"CA",pin:"79,57",offset:-8,points:"63,43,62,42,59,43,57,42,83,42,83,45,78,46,79,48,86,52,86,52,85,53,88,55,88,57,79,57,81,56,81,57,80,56,81,57,80,57,80,56,79,57,79,56,79,56,79,55,79,56,78,56,78,56,78,56,78,55,77,56,77,55,77,55,76,55,77,54,76,54,76,55,75,55,76,54,75,55,74,54,75,54,73,54,74,54,73,54,74,53,73,53,72,53,73,52,74,53,74,52,75,52,73,52,73,52,73,52,72,53,72,52,72,52,72,52,71,51,73,51,71,51,71,50,70,51,69,50,70,50,69,49,69,50,69,49,70,48,69,49,70,48,70,48,69,49,69,47,67,46,65,44,63,43",zoneName:"PST"},{timezone:"America/Whitehorse",country:"CA",pin:"62,41",offset:-8,points:"57,29,60,29,61,32,64,32,64,33,66,33,66,35,69,36,70,37,69,37,70,37,70,39,73,40,74,41,77,40,78,42,54,41,54,28,57,29",zoneName:"PST"},{timezone:"America/Winnipeg",country:"CA",pin:"115,56",offset:-6,points:"122,58,119,57,118,56,118,57,109,57,109,50,108,49,108,48,108,42,118,42,118,43,121,43,122,45,121,46,124,45,126,46,125,47,125,50,126,50,127,51,126,52,125,52,125,53,124,53,125,55,125,56,123,57,122,58",zoneName:"CST"},{timezone:"America/Yakutat",country:"US",pin:"56,42",offset:-9,points:"56,42,56,42,56,42",zoneName:"AKST"},{timezone:"America/Yellowknife",country:"CA",pin:"91,38",offset:-7,points:"83,31,92,34,95,34,97,35,108,36,108,42,78,42,77,40,74,41,73,40,70,39,70,37,69,37,70,37,69,36,66,35,66,33,64,33,64,32,61,32,60,29,62,30,61,29,63,28,64,28,64,29,70,28,67,28,67,29,68,28,68,29,69,28,73,27,72,27,76,29,77,28,78,28,77,29,79,28,82,28,81,28,81,30,83,31",zoneName:"MST"},{timezone:"America/Yellowknife",country:"CA",pin:"91,38",offset:-7,points:"88,17,90,18,88,18,89,18,87,19,86,19,86,18,83,20,79,19,84,18,88,17",zoneName:"MST"},{timezone:"America/Yellowknife",country:"CA",pin:"91,38",offset:-7,points:"84,22,87,22,90,23,85,24,83,25,83,26,79,26,75,25,78,23,77,22,81,21,84,22",zoneName:"MST"},{timezone:"America/Yellowknife",country:"CA",pin:"91,38",offset:-7,points:"91,19,97,20,97,21,93,22,91,21,96,20,87,20,90,20,87,20,91,20,88,20,91,19",zoneName:"MST"},{timezone:"America/Yellowknife",country:"CA",pin:"91,38",offset:-7,points:"90,23,92,23,91,24,93,24,96,24,95,25,97,24,97,28,87,28,87,28,87,28,95,27,87,27,86,26,90,26,86,26,87,25,85,25,86,25,86,24,90,23",zoneName:"MST"},{timezone:"Antarctica/Macquarie",country:"AU",pin:"471,201",offset:11,points:"471,201,471,201,471,201",zoneName:"MIST"},{timezone:"Arctic/Longyearbyen",country:"SJ",pin:"272,17",offset:1,points:"275,14,275,14,275,15,275,14,280,16,276,16,276,17,274,18,274,19,273,19,272,18,273,18,269,17,273,17,270,17,274,17,269,17,269,17,274,16,273,16,273,16,271,16,271,15,270,16,271,16,268,16,267,16,268,16,266,15,267,15,266,15,267,15,266,15,265,15,269,14,267,14,269,15,269,15,270,14,273,15,272,14,275,14",zoneName:"CET"},{timezone:"Arctic/Longyearbyen",country:"SJ",pin:"272,17",offset:1,points:"285,14,288,14,283,15,275,14,277,14,276,13,278,14,277,13,281,14,282,13,282,13,282,14,285,14",zoneName:"CET"},{timezone:"Asia/Aden",country:"YE",pin:"313,107",offset:3,points:"313,107,310,107,309,104,310,101,314,102,314,103,318,100,322,99,324,102,323,102,322,103,313,107",zoneName:"AST"},{timezone:"Asia/Almaty",country:"KZ",pin:"357,65",offset:6,points:"348,66,345,68,343,68,342,66,344,65,345,64,344,63,343,61,337,60,339,58,339,58,340,57,342,57,345,55,344,54,344,52,342,52,342,51,342,51,342,51,342,49,346,48,348,48,349,50,352,50,352,51,357,49,356,50,358,51,361,55,362,54,363,55,366,54,368,56,371,56,371,57,369,58,369,60,365,59,364,62,365,62,363,62,361,63,362,63,362,65,361,66,359,65,355,66,353,65,352,65,352,66,350,66,348,66",zoneName:"ALMT"},{timezone:"Asia/Amman",country:"JO",pin:"300,81",offset:2,points:"302,82,303,83,302,83,300,84,299,84,299,80,301,80,304,79,305,80,301,81,302,82",zoneName:"EET"},{timezone:"Asia/Anadyr",country:"RU",pin:"497,35",offset:12,points:"10,32,14,33,13,34,12,34,12,34,10,34,11,34,11,35,9,35,11,36,10,36,6,35,6,34,2,34,2,33,2,33,0,33,1,34,0,35,0,29,7,31,8,33,9,33,7,32,10,32",zoneName:"ANAT"},{timezone:"Asia/Anadyr",country:"RU",pin:"497,35",offset:12,points:"497,35,493,35,498,36,499,38,499,39,496,38,492,39,491,38,487,39,484,37,486,37,484,36,473,35,471,34,471,33,470,33,471,32,469,31,470,30,476,30,476,29,475,28,476,28,483,28,486,30,488,29,486,28,487,28,495,28,500,29,500,35,498,35,497,35",zoneName:"ANAT"},{timezone:"Asia/Aqtau",country:"KZ",pin:"320,63",offset:5,points:"328,63,328,68,325,66,323,67,323,66,321,65,320,63,322,63,321,63,321,62,324,62,323,60,321,60,319,61,317,60,318,60,318,59,315,58,319,58,325,57,325,57,326,57,327,60,329,61,329,62,328,63",zoneName:"AQTT"},{timezone:"Asia/Aqtobe",country:"KZ",pin:"329,55",offset:5,points:"326,55,326,54,327,55,328,54,331,54,333,55,335,54,336,54,337,56,337,57,339,58,337,60,335,59,331,62,329,62,328,60,326,59,326,57,324,57,326,56,326,55",zoneName:"AQTT"},{timezone:"Asia/Ashgabat",country:"TM",pin:"331,72",offset:5,points:"340,74,340,75,337,76,335,75,335,74,334,74,332,73,329,72,325,73,325,71,324,71,324,70,325,70,323,69,324,68,323,67,325,66,327,68,329,68,329,67,331,66,331,66,331,66,331,66,333,66,334,68,336,68,337,70,343,72,342,73,341,73,340,74",zoneName:"TMT"},{timezone:"Asia/Baghdad",country:"IQ",pin:"312,79",offset:3,points:"315,83,315,85,312,84,308,82,304,80,304,79,307,77,307,74,309,73,312,73,313,75,314,75,313,78,314,79,316,80,316,81,316,82,317,83,315,83",zoneName:"AST"},{timezone:"Asia/Bahrain",country:"BH",pin:"320,88",offset:3,points:"320,89,320,89,320,89",zoneName:"AST"},{timezone:"Asia/Bangkok",country:"TH",pin:"390,106",offset:7,points:"391,116,392,117,390,117,390,116,389,116,389,116,386,113,387,110,388,109,388,106,386,104,387,102,385,99,386,99,386,97,387,98,389,97,390,97,390,98,391,98,390,101,392,100,393,100,393,99,394,100,397,103,396,105,394,105,392,106,393,109,391,107,390,107,390,106,389,107,388,112,389,112,390,115,391,116",zoneName:"ICT"},{timezone:"Asia/Baku",country:"AZ",pin:"319,69",offset:4,points:"314,67,316,68,317,67,320,69,319,69,318,72,317,71,317,70,317,70,315,71,315,70,313,69,314,69,313,68,315,68,314,67",zoneName:"AZT"},{timezone:"Asia/Chongqing",country:"",pin:"250,125",offset:8,points:"403,95,402,95,401,95,401,95,399,95,398,94,398,93,396,93,394,94,394,93,393,94,392,93,391,94,391,96,391,96,390,95,389,95,389,94,388,94,388,93,387,93,387,91,385,92,386,90,387,89,387,87,386,86,387,86,387,85,388,84,387,81,385,80,386,79,385,78,387,78,388,77,387,77,388,77,387,75,388,73,388,72,389,72,385,71,386,70,384,70,385,68,386,68,385,66,390,66,396,67,399,66,403,66,404,65,407,67,409,66,408,67,408,69,406,69,404,70,403,72,404,73,403,77,404,79,402,79,403,80,402,80,403,82,401,83,401,84,402,85,402,87,401,87,402,87,402,88,402,89,405,89,404,90,405,90,406,91,405,93,406,92,407,95,405,95,405,94,405,94,404,93,403,95",zoneName:"CST"},{timezone:"Asia/Beirut",country:"LB",pin:"299,78",offset:2,points:"300,79,299,79,300,77,301,77,301,77,300,79",zoneName:"EET"},{timezone:"Asia/Bishkek",country:"KG",pin:"354,65",offset:6,points:"354,69,353,69,353,70,350,71,346,70,352,68,350,67,349,68,347,67,349,66,348,66,349,66,352,66,352,65,353,65,355,66,359,65,361,66,359,68,357,68,356,69,354,69",zoneName:"KGT"},{timezone:"Asia/Brunei",country:"BN",pin:"410,118",offset:8,points:"410,118,410,119,410,118",zoneName:"BNT"},{timezone:"Asia/Choibalsan",country:"MN",pin:"409,58",offset:8,points:"412,61,411,62,409,62,408,63,406,62,405,60,405,59,407,59,406,56,409,55,412,56,410,58,411,59,415,58,416,59,416,61,415,60,412,61",zoneName:"CHOT"},{timezone:"Asia/Colombo",country:"LK",pin:"361,115",offset:5.5,points:"363,116,362,117,361,116,361,114,361,111,363,113,364,115,363,116",zoneName:"IST"},{timezone:"Asia/Damascus",country:"SY",pin:"300,78",offset:2,points:"302,80,301,80,299,80,299,79,301,77,300,76,301,75,301,74,304,74,309,73,307,75,307,77,302,80",zoneName:"EET"},{timezone:"Asia/Dhaka",country:"BD",pin:"376,92",offset:6,points:"375,94,375,94,375,95,374,95,373,91,372,91,374,90,372,89,373,88,374,89,375,89,375,90,378,90,377,92,377,93,378,92,379,95,378,95,378,96,378,94,378,94,377,94,377,93,376,93,376,92,375,92,376,93,374,92,375,92,376,94,375,94",zoneName:"BDT"},{timezone:"Asia/Dili",country:"TL",pin:"424,137",offset:9,points:"424,137,427,137,424,138,424,137",zoneName:"TLT"},{timezone:"Asia/Dubai",country:"AE",pin:"327,90",offset:4,points:"328,91,328,92,327,92,327,94,323,93,322,92,325,91,328,89,328,90,328,91",zoneName:"GST"},{timezone:"Asia/Dushanbe",country:"TJ",pin:"346,71",offset:5,points:"346,73,344,73,345,72,344,70,345,70,346,69,345,69,348,68,348,68,348,69,349,69,347,69,346,70,349,70,350,71,352,70,352,71,354,72,354,73,352,73,350,74,349,72,349,72,346,73",zoneName:"TJT"},{timezone:"Asia/Gaza",country:"PS",pin:"298,81",offset:2,points:"298,81,298,82,298,81",zoneName:"EET"},{timezone:"Asia/Harbin",country:"",pin:"250,125",offset:8,points:"424,68,423,65,423,66,421,65,421,63,420,64,420,62,419,61,420,61,421,60,422,60,420,59,423,58,423,58,425,54,424,53,421,54,420,53,423,51,425,52,427,56,431,57,432,59,437,58,437,59,435,62,433,62,432,63,432,65,431,66,431,66,430,65,430,66,428,67,428,68,426,67,424,68",zoneName:"CST"},{timezone:"Asia/Hebron",country:"PS",pin:"299,81",offset:2,points:"299,81,299,80,299,81,298,81,299,81",zoneName:"EET"},{timezone:"Asia/Ho_Chi_Minh",country:"VN",pin:"398,110",offset:7,points:"397,112,395,113,396,111,395,111,396,110,397,110,397,109,399,108,400,104,396,99,394,98,396,97,395,97,395,96,393,96,393,95,392,94,392,93,394,94,396,93,398,93,398,94,400,95,398,96,398,96,397,99,401,104,402,107,401,109,398,110,398,112,397,112",zoneName:"ICT"},{timezone:"Asia/Hong_Kong",country:"HK",pin:"409,94",offset:8,points:"409,94,408,94,409,94",zoneName:"HKT"},{timezone:"Asia/Hovd",country:"MN",pin:"377,58",offset:7,points:"387,63,386,64,386,66,384,66,383,63,376,62,376,60,375,59,372,58,372,57,378,54,381,55,381,56,385,56,385,56,386,57,388,57,388,58,386,58,387,59,385,60,387,61,387,63",zoneName:"HOVT"},{timezone:"Asia/Irkutsk",country:"RU",pin:"395,52",offset:9,points:"411,46,410,46,411,48,412,48,412,49,408,51,409,51,408,52,406,52,404,53,401,53,400,55,401,55,400,56,396,55,393,55,392,55,392,54,387,52,388,51,384,50,383,50,384,49,384,48,386,47,385,46,386,45,389,44,390,45,390,44,392,43,392,43,395,44,396,43,396,42,397,42,396,41,395,41,398,39,398,38,397,37,398,37,398,36,400,36,401,36,400,37,402,37,402,38,402,38,403,38,402,39,402,40,404,40,402,42,402,43,406,42,406,43,411,41,413,42,413,42,415,43,415,44,413,44,413,45,414,45,413,46,411,46", -zoneName:"IRKT"},{timezone:"Asia/Jakarta",country:"ID",pin:"398,134",offset:7,points:"406,135,407,136,409,136,409,137,400,136,396,134,397,133,400,134,401,134,403,135,404,134,406,135",zoneName:"WIB"},{timezone:"Asia/Jakarta",country:"ID",pin:"398,134",offset:7,points:"397,131,397,133,396,133,396,133,395,133,395,133,392,131,389,125,388,125,387,123,383,119,382,117,385,118,386,119,393,124,393,125,394,125,394,126,395,126,395,129,397,128,397,129,397,131",zoneName:"WIB"},{timezone:"Asia/Jayapura",country:"ID",pin:"445,129",offset:9,points:"438,131,437,131,437,130,436,130,436,129,435,130,435,131,434,130,433,129,435,128,436,129,436,128,434,128,433,127,432,127,432,126,434,125,436,126,436,128,438,130,441,127,446,129,446,138,444,136,445,136,443,136,443,136,443,135,444,135,442,135,443,135,441,132,438,131",zoneName:"WIT"},{timezone:"Asia/Kabul",country:"AF",pin:"346,77",offset:4.5,points:"338,84,335,84,336,82,335,81,334,79,335,78,334,78,335,76,337,76,341,73,344,74,346,73,349,72,349,72,350,74,352,73,354,73,349,75,350,76,349,78,347,78,348,79,347,79,346,81,345,81,344,81,343,82,342,84,338,84",zoneName:"AFT"},{timezone:"Asia/Jerusalem",country:"IL",pin:"299,81",offset:2,points:"298,81,299,79,300,79,299,80,299,80,299,81,298,81,299,81,298,84,298,82,298,81",zoneName:"IST"},{timezone:"Asia/Kamchatka",country:"RU",pin:"470,51",offset:12,points:"476,45,477,47,475,47,475,48,475,49,472,50,472,51,470,51,470,52,468,54,466,47,468,45,468,45,471,44,475,41,477,41,477,40,478,40,478,39,480,38,477,38,476,39,477,39,476,39,475,39,476,38,475,37,476,37,476,36,477,36,477,35,484,36,486,37,484,37,486,38,491,38,492,39,489,40,487,42,484,41,481,42,481,41,479,42,477,42,477,43,475,44,477,45,476,45",zoneName:"PETT"},{timezone:"Asia/Karachi",country:"PK",pin:"343,90",offset:5,points:"348,86,347,88,348,89,349,91,348,91,346,91,344,92,344,91,343,91,342,89,336,90,336,89,338,88,338,87,337,87,337,86,336,85,335,84,339,84,342,84,343,82,346,81,347,79,348,79,347,78,349,78,349,77,350,76,349,75,350,74,354,74,355,74,356,75,358,76,355,77,353,77,353,79,355,80,354,81,354,82,350,86,348,86",zoneName:"PKT"},{timezone:"Asia/Kashgar",country:"",pin:"250,125",offset:6,points:"361,83,359,82,359,80,360,80,360,79,359,78,360,77,359,77,358,76,356,75,355,74,354,74,354,73,354,72,352,71,352,70,354,69,356,69,357,68,358,68,362,67,361,66,362,65,362,63,361,63,363,62,365,63,363,63,368,65,365,64,366,65,365,65,365,67,364,68,364,70,365,70,364,74,365,75,364,76,365,76,364,77,366,80,365,81,365,81,366,81,366,82,365,82,363,83,361,83",zoneName:"XJT"},{timezone:"Asia/Kathmandu",country:"NP",pin:"368,87",offset:5.8,points:"368,86,372,86,372,88,369,88,367,87,366,87,361,85,362,84,363,83,368,85,368,86",zoneName:"NPT"},{timezone:"Asia/Kolkata",country:"IN",pin:"373,94",offset:5.5,points:"362,106,361,109,361,111,360,111,360,112,360,112,359,112,358,113,357,114,356,113,352,103,351,97,351,96,351,95,352,95,351,95,351,94,351,94,350,96,348,96,346,94,347,94,348,93,346,93,345,92,346,92,345,92,346,91,349,91,347,87,348,86,350,86,354,82,354,81,355,80,353,79,353,77,355,77,358,76,359,77,360,77,359,78,360,79,360,80,359,80,359,82,363,83,361,85,369,88,372,88,372,86,373,86,374,88,378,88,378,87,377,86,379,86,381,84,384,84,384,85,384,85,384,86,385,86,385,87,385,87,384,87,382,88,381,92,380,92,379,95,378,92,377,93,377,92,378,90,375,90,375,89,374,89,373,88,372,89,374,90,372,91,373,91,374,95,373,94,373,95,372,94,371,95,371,96,370,97,368,98,364,101,364,102,362,103,362,106",zoneName:"IST"},{timezone:"Asia/Krasnoyarsk",country:"RU",pin:"379,47",offset:8,points:"396,16,388,17,392,15,393,15,392,16,394,15,396,16,396,16",zoneName:"KRAT"},{timezone:"Asia/Krasnoyarsk",country:"RU",pin:"379,47",offset:8,points:"385,14,386,14,384,14,389,14,389,15,388,15,389,15,382,15,380,14,379,14,385,14",zoneName:"KRAT"},{timezone:"Asia/Krasnoyarsk",country:"RU",pin:"379,47",offset:8,points:"383,12,386,13,384,13,385,14,380,14,377,14,383,12",zoneName:"KRAT"},{timezone:"Asia/Krasnoyarsk",country:"RU",pin:"379,47",offset:8,points:"384,51,388,51,386,54,386,55,385,56,381,56,381,55,378,54,374,56,374,55,375,55,374,54,372,53,374,52,373,51,374,50,373,49,373,49,373,48,374,47,373,46,374,45,372,44,373,43,373,43,371,42,367,42,368,41,367,40,369,40,367,39,369,37,368,37,369,36,369,35,367,35,367,34,366,34,366,32,364,32,364,31,364,31,365,30,365,29,360,28,362,27,360,26,362,25,359,24,366,25,364,26,364,27,364,27,364,27,364,28,367,28,365,27,366,27,366,26,366,26,362,24,362,24,362,23,362,23,362,23,371,22,371,22,369,22,371,22,369,21,371,21,372,21,371,21,374,20,381,20,379,19,380,19,388,19,387,19,390,19,391,18,395,17,398,18,395,18,399,18,398,19,404,18,406,19,405,19,408,20,408,20,406,20,408,20,407,21,400,23,397,23,399,23,396,24,404,23,404,23,403,23,404,24,404,24,406,25,406,26,407,26,403,27,402,28,397,29,398,29,398,31,397,32,397,32,398,34,399,34,397,35,400,36,398,36,398,37,397,37,398,38,398,39,396,40,395,41,397,41,397,42,396,42,396,43,396,43,393,43,390,45,390,45,389,44,386,45,385,46,386,47,384,48,384,49,383,50,384,51",zoneName:"KRAT"},{timezone:"Asia/Kuala_Lumpur",country:"MY",pin:"391,121",offset:8,points:"394,121,395,123,394,123,391,121,390,119,389,116,390,116,390,117,392,116,393,117,394,121",zoneName:"MYT"},{timezone:"Asia/Kuching",country:"MY",pin:"403,123",offset:8,points:"410,118,412,115,412,116,413,115,413,116,414,117,416,118,414,118,415,119,411,119,409,123,406,123,405,124,404,124,402,123,402,122,405,123,404,123,405,121,407,121,408,119,409,119,410,118,410,119,410,118",zoneName:"MYT"},{timezone:"Asia/Kuwait",country:"KW",pin:"317,84",offset:3,points:"317,83,317,83,317,83",zoneName:"AST"},{timezone:"Asia/Macau",country:"MO",pin:"408,94",offset:8,points:"408,94,408,94,408,94",zoneName:"CST"},{timezone:"Asia/Magadan",country:"RU",pin:"459,42",offset:12,points:"464,42,464,42,466,43,460,43,460,43,462,43,458,42,457,43,455,43,454,43,454,42,452,41,454,40,453,39,450,39,445,38,444,37,445,36,444,35,445,34,444,33,445,32,443,32,446,31,446,30,447,30,447,29,448,29,447,28,448,28,446,28,450,25,457,25,458,25,457,25,461,27,470,26,472,27,472,28,474,28,474,29,476,28,476,29,476,30,470,30,469,31,471,32,470,33,471,33,471,34,477,35,477,36,476,36,476,37,475,37,476,38,475,39,476,39,472,41,473,40,472,40,473,39,469,39,464,42",zoneName:"MAGT"},{timezone:"Asia/Makassar",country:"ID",pin:"416,132",offset:8,points:"412,128,411,130,409,131,409,129,411,127,410,125,410,125,410,124,408,124,410,122,411,119,413,119,414,120,413,120,413,120,413,120,414,122,414,122,415,124,414,123,413,126,411,127,412,128",zoneName:"WITA"},{timezone:"Asia/Makassar",country:"ID",pin:"416,132",offset:8,points:"420,129,420,130,421,131,419,132,418,130,418,129,417,129,417,133,416,133,416,131,416,130,415,130,415,129,416,127,416,126,417,124,418,123,422,124,424,123,423,124,417,124,417,125,417,126,418,127,419,126,421,126,421,126,418,128,420,129",zoneName:"WITA"},{timezone:"Asia/Manila",country:"PH",pin:"418,105",offset:8,points:"423,117,422,116,423,115,422,114,421,115,420,114,420,115,419,115,420,114,421,113,422,113,422,114,423,113,423,112,424,112,424,111,425,112,426,115,425,116,425,115,424,117,424,117,423,117",zoneName:"PHT"},{timezone:"Asia/Manila",country:"PH",pin:"418,105",offset:8,points:"421,105,421,106,421,105,422,106,422,106,422,107,422,108,420,106,420,107,419,106,418,106,418,105,417,104,417,105,417,104,416,102,417,103,418,99,420,99,420,102,419,104,419,106,421,105",zoneName:"PHT"},{timezone:"Asia/Muscat",country:"OM",pin:"331,92",offset:4,points:"328,100,327,100,326,101,324,102,322,99,326,97,327,94,327,93,327,92,328,92,328,90,329,92,332,92,333,94,331,97,330,97,330,99,329,99,328,100",zoneName:"GST"},{timezone:"Asia/Nicosia",country:"CY",pin:"296,76",offset:2,points:"296,77,295,76,298,75,297,76,297,76,296,77",zoneName:"EET"},{timezone:"Asia/Novokuznetsk",country:"RU",pin:"371,50",offset:7,points:"373,46,374,47,373,48,373,49,373,49,374,50,374,50,373,51,374,52,373,53,371,52,370,51,371,51,368,49,367,47,373,46",zoneName:"KRAT"},{timezone:"Asia/Novosibirsk",country:"RU",pin:"365,49",offset:7,points:"367,47,368,49,367,50,366,50,365,51,362,49,358,51,356,50,357,49,355,50,355,49,354,48,356,47,355,47,356,46,354,44,356,42,357,40,364,41,366,40,368,41,368,42,371,42,373,43,372,44,374,45,373,46,367,47",zoneName:"NOVT"},{timezone:"Asia/Omsk",country:"RU",pin:"352,49",offset:7,points:"354,44,356,46,355,47,356,47,354,48,355,49,355,50,352,51,352,50,349,50,349,48,348,48,348,47,350,46,348,45,348,44,349,44,351,44,354,44",zoneName:"OMST"},{timezone:"Asia/Omsk",country:"RU",pin:"352,49",offset:7,points:"373,53,372,53,374,54,375,55,374,55,375,56,371,57,371,56,368,56,366,54,363,55,362,54,361,55,358,51,362,49,365,51,366,50,368,49,371,51,370,51,371,52,373,53",zoneName:"OMST"},{timezone:"Asia/Phnom_Penh",country:"KH",pin:"396,109",offset:7,points:"397,109,397,110,394,110,394,109,393,110,392,106,394,105,397,106,397,105,399,105,399,108,397,109",zoneName:"ICT"},{timezone:"Asia/Pontianak",country:"ID",pin:"402,125",offset:7,points:"411,127,409,130,407,129,405,130,405,129,403,129,403,127,402,126,401,123,402,122,404,124,405,124,406,123,408,123,408,124,410,124,410,125,410,125,411,127",zoneName:"WIB"},{timezone:"Asia/Pyongyang",country:"KP",pin:"425,71",offset:9,points:"426,72,424,73,423,72,424,71,424,71,424,70,423,70,423,69,425,68,426,67,428,68,428,67,429,67,431,65,431,66,430,67,430,68,427,70,428,72,426,72",zoneName:"KST"},{timezone:"Asia/Qatar",country:"QA",pin:"322,90",offset:3,points:"322,90,321,91,321,90,321,89,322,90",zoneName:"AST"},{timezone:"Asia/Qyzylorda",country:"KZ",pin:"341,63",offset:6,points:"344,52,344,54,345,55,342,57,340,57,339,58,337,57,337,56,336,54,333,53,335,52,334,52,335,51,336,51,335,51,336,51,335,51,335,50,342,49,342,51,342,52,344,52",zoneName:"QYZT"},{timezone:"Asia/Qyzylorda",country:"KZ",pin:"341,63",offset:6,points:"342,65,340,64,336,65,334,63,334,61,335,60,334,60,333,61,334,61,333,61,333,61,331,61,335,59,341,61,343,61,344,63,345,64,344,65,342,66,342,65",zoneName:"QYZT"},{timezone:"Asia/Rangoon",country:"MM",pin:"384,102",offset:6.5,points:"386,104,388,106,388,109,387,111,387,107,386,106,386,102,385,102,384,101,384,102,383,102,383,103,382,102,382,103,382,102,381,103,382,102,381,103,381,101,381,98,380,99,380,98,381,98,380,98,381,98,380,97,379,96,379,97,378,96,379,94,380,92,381,92,382,88,384,87,385,87,385,87,386,85,387,87,387,88,387,89,385,91,385,92,387,91,387,93,388,93,388,94,389,94,389,95,391,95,389,97,386,97,386,99,385,99,387,102,386,104",zoneName:"MMT"},{timezone:"Asia/Riyadh",country:"SA",pin:"315,91",offset:3,points:"310,101,309,102,307,98,304,96,303,92,302,91,299,86,298,86,299,84,300,84,303,83,301,81,304,80,306,81,312,84,317,85,318,87,320,88,319,89,321,91,322,91,321,91,323,93,327,93,327,94,326,97,318,100,314,103,314,102,312,101,310,101,310,101",zoneName:"AST"},{timezone:"Asia/Sakhalin",country:"RU",pin:"448,60",offset:11,points:"450,56,451,57,450,57,449,57,448,59,449,61,449,61,448,60,447,61,447,57,448,54,447,52,447,51,448,50,448,50,449,50,449,52,449,53,450,56",zoneName:"SAKT"},{timezone:"Asia/Samarkand",country:"UZ",pin:"343,70",offset:5,points:"345,71,345,72,344,73,342,73,343,72,337,70,336,68,334,68,333,66,331,66,331,66,331,66,331,66,329,67,329,68,328,68,328,63,331,62,331,63,333,64,334,63,336,65,340,64,342,65,342,67,342,67,343,68,343,69,344,70,344,71,345,71",zoneName:"UZT"},{timezone:"Asia/Seoul",country:"KR",pin:"426,73",offset:9,points:"429,76,427,76,427,77,427,77,426,77,425,77,426,75,425,74,426,74,426,73,426,72,428,71,429,73,430,75,429,76",zoneName:"KST"},{timezone:"Asia/Shanghai",country:"CN",pin:"419,82",offset:8,points:"401,84,401,83,403,82,402,80,403,80,402,79,404,79,403,77,404,73,403,72,404,70,406,69,408,69,408,67,409,66,407,67,406,66,404,65,406,64,405,63,405,63,406,62,408,63,414,60,416,61,416,59,415,58,411,59,410,58,412,56,414,56,416,55,415,55,418,53,417,52,418,51,423,51,420,53,421,54,424,53,425,54,423,58,423,58,420,59,422,60,421,60,420,61,419,61,420,62,420,64,421,63,421,65,423,66,423,65,425,68,418,71,419,70,418,70,419,70,420,69,418,68,415,71,414,71,413,71,414,72,415,72,415,73,416,73,418,72,420,73,420,74,417,75,416,76,417,77,418,80,419,81,417,81,419,82,417,83,420,83,419,84,419,84,419,85,419,86,418,86,417,88,416,88,417,88,416,90,416,89,415,91,414,91,413,92,412,92,412,93,409,93,409,94,408,94,408,93,408,94,407,94,406,93,406,92,405,93,406,91,405,90,404,90,405,89,402,89,402,88,402,87,401,87,402,87,402,85,401,84",zoneName:"CST"},{timezone:"Asia/Singapore",country:"SG",pin:"394,123",offset:8,points:"394,123,394,123,394,123",zoneName:"SGT"},{timezone:"Asia/Taipei",country:"TW",pin:"419,90",offset:8,points:"417,94,417,92,418,91,419,90,419,90,418,95,417,94",zoneName:"CST"},{timezone:"Asia/Tashkent",country:"UZ",pin:"346,68",offset:5,points:"345,69,346,69,345,70,344,70,343,68,345,68,349,66,347,67,349,68,350,67,352,68,350,69,348,69,348,68,345,69",zoneName:"UZT"},{timezone:"Asia/Tbilisi",country:"GE",pin:"312,67",offset:4,points:"309,67,308,67,308,66,306,64,310,65,311,66,313,66,315,68,310,68,309,67",zoneName:"GET"},{timezone:"Asia/Tehran",country:"IR",pin:"321,75",offset:3.5,points:"335,81,336,82,335,84,336,85,337,86,337,87,338,87,338,88,336,89,335,90,330,89,329,87,326,88,325,88,321,86,320,83,319,83,318,83,318,83,317,83,316,82,316,81,316,80,314,79,313,77,314,75,313,75,311,72,312,72,311,70,312,70,314,71,317,70,317,70,317,71,318,72,318,73,322,74,325,74,325,73,328,72,332,73,334,74,335,74,335,77,334,78,335,78,334,79,335,81",zoneName:"IRST"},{timezone:"Asia/Thimphu",country:"BT",pin:"375,87",offset:6,points:"378,87,378,88,375,88,373,87,375,86,377,86,378,87",zoneName:"BTT"},{timezone:"Asia/Tokyo",country:"JP",pin:"444,75",offset:9,points:"448,63,451,64,452,63,452,65,453,65,450,65,449,67,447,66,445,66,445,66,446,67,445,67,444,66,445,65,445,65,446,65,447,62,448,63",zoneName:"JST"},{timezone:"Asia/Tokyo",country:"JP",pin:"444,75",offset:9,points:"432,78,433,78,433,79,433,79,432,81,431,82,432,81,431,82,431,81,431,79,431,79,431,80,430,80,430,79,431,79,430,79,432,78",zoneName:"JST"},{timezone:"Asia/Tokyo",country:"JP",pin:"444,75",offset:9,points:"437,76,439,75,440,73,441,73,440,73,441,74,442,73,444,71,444,69,445,68,446,68,446,67,446,67,447,70,447,72,446,72,445,75,446,75,445,76,444,76,445,76,444,75,443,77,443,76,442,77,440,77,441,77,440,76,440,77,439,79,438,78,438,77,434,77,434,78,432,78,432,77,434,76,437,76",zoneName:"JST"},{timezone:"Asia/Ulaanbaatar",country:"MN",pin:"398,58",offset:8,points:"396,67,390,66,386,66,386,64,387,62,385,60,387,59,386,58,388,58,388,57,385,56,386,55,386,54,387,53,392,54,392,55,394,55,398,55,401,56,403,57,406,56,407,59,405,59,405,60,406,62,405,63,406,64,403,66,399,66,396,67",zoneName:"ULAT"},{timezone:"Asia/Urumqi",country:"CN",pin:"372,64",offset:6,points:"386,86,384,86,384,85,383,84,381,84,378,86,375,86,374,87,373,86,369,86,364,83,364,83,365,82,365,83,366,82,365,81,365,81,366,80,364,77,365,76,364,76,365,75,364,74,365,70,364,70,364,68,365,67,365,65,366,65,365,64,368,65,363,63,365,63,364,62,365,59,369,60,369,58,372,57,372,58,375,59,376,60,376,62,383,63,384,66,385,66,386,68,385,68,384,70,386,70,385,71,389,72,388,72,388,73,387,75,388,77,387,77,388,77,387,78,385,78,386,79,385,80,386,80,387,82,388,84,387,85,387,86,386,86",zoneName:"XJT"},{timezone:"Asia/Vientiane",country:"LA",pin:"393,100",offset:7,points:"398,105,397,106,396,105,397,103,394,100,393,99,393,100,392,100,390,101,391,98,390,98,390,97,389,97,390,95,391,96,391,94,393,95,393,96,395,96,395,97,396,97,394,98,396,99,398,102,399,103,399,105,398,105",zoneName:"ICT"},{timezone:"Asia/Vladivostok",country:"RU",pin:"433,65",offset:11,points:"433,63,433,62,435,62,437,59,437,58,432,59,431,57,432,57,433,56,432,54,433,53,435,52,435,52,437,52,437,51,437,51,433,51,433,51,433,50,431,50,435,48,432,48,432,47,433,46,432,46,433,45,433,44,433,44,434,44,433,43,433,42,435,40,435,40,436,39,437,40,439,39,433,37,434,36,434,35,433,34,431,33,435,31,435,30,434,29,435,28,435,28,435,27,433,27,433,26,435,25,435,25,434,25,436,26,444,26,444,25,445,25,443,25,446,24,446,24,455,25,449,25,446,28,448,28,447,28,448,29,447,29,447,30,446,30,446,31,443,32,445,32,444,33,445,34,444,35,445,35,445,36,445,37,444,37,444,38,448,39,453,39,454,40,452,41,454,42,454,42,448,43,438,49,440,49,440,50,441,50,441,51,442,50,442,51,443,50,444,50,446,51,445,51,447,53,445,55,445,57,441,61,436,65,435,66,433,65,431,66,431,66,432,65,432,63,433,63",zoneName:"VLAT"},{timezone:"Asia/Vladivostok",country:"RU",pin:"433,65",offset:11,points:"443,19,445,20,447,19,446,19,452,20,449,21,444,21,443,21,440,20,441,20,443,19",zoneName:"VLAT"},{timezone:"Asia/Yakutsk",country:"RU",pin:"430,39",offset:10,points:"432,48,435,48,431,50,433,50,433,51,437,51,437,51,437,52,435,52,435,52,433,53,431,55,433,55,433,56,432,57,428,56,425,52,422,51,418,51,417,52,418,52,418,53,416,55,416,55,414,56,409,55,404,57,400,56,400,55,401,55,400,55,401,53,404,53,406,52,408,52,409,51,408,51,412,49,412,49,411,48,410,46,413,46,414,45,413,45,413,44,415,44,415,42,413,42,413,42,411,41,406,43,406,42,403,43,402,43,404,40,402,40,402,39,403,38,402,38,402,38,402,37,400,37,401,36,401,36,397,35,399,34,398,34,397,32,397,32,398,31,398,29,397,29,402,28,403,27,406,26,406,26,406,25,404,24,404,24,404,24,403,23,405,22,407,22,408,23,411,23,421,24,422,23,421,23,423,22,430,24,429,24,430,24,429,24,430,25,428,25,430,25,429,26,432,27,434,26,433,27,435,27,435,28,435,28,434,29,435,30,435,31,431,33,433,34,434,35,434,36,433,37,439,39,437,40,436,39,435,40,435,40,433,42,433,43,434,44,433,44,433,44,433,45,432,46,433,46,432,47,432,48",zoneName:"YAKT"},{timezone:"Asia/Yerevan",country:"AM",pin:"312,69",offset:4,points:"313,68,314,69,313,69,315,70,315,71,314,71,314,70,311,69,310,68,313,68",zoneName:"AMT"},{timezone:"Atlantic/Azores",country:"PT",pin:"214,73",offset:-1,points:"202,79,220,65",zoneName:"AZOT"},{timezone:"Atlantic/Bermuda",country:"BM",pin:"160,80",offset:-4,points:"155,85,165,75",zoneName:"AST"},{timezone:"Atlantic/Canary",country:"ES",pin:"229,86",offset:0,points:"220,92,236,79",zoneName:"WET"},{timezone:"Atlantic/Cape_Verde",country:"CV",pin:"217,104",offset:-1,points:"210,109,224,96",zoneName:"CVT"},{timezone:"Atlantic/Faroe",country:"FO",pin:"241,39",offset:0,points:"234,45,246,33",zoneName:"WET"},{timezone:"Atlantic/Madeira",country:"PT",pin:"227,80",offset:0,points:"221,88,233,74",zoneName:"WET"},{timezone:"Atlantic/Reykjavik",country:"IS",pin:"220,36",offset:0,points:"218,42,231,28",zoneName:"GMT"},{timezone:"Atlantic/South_Georgia",country:"GS",pin:"199,200",offset:-2,points:"192,213,214,195",zoneName:"GST"},{timezone:"Atlantic/St_Helena",country:"SH",pin:"242,147",offset:0,points:"230,181,242,136",zoneName:"GMT"},{timezone:"Atlantic/Stanley",country:"FK",pin:"170,197",offset:-3,points:"160,203,175,191",zoneName:"FKST"},{timezone:"Australia/Adelaide",country:"AU",pin:"442,173",offset:10.5,points:"438,161,446,161,446,178,444,177,444,175,443,174,444,175,444,175,442,174,442,173,442,172,441,174,440,174,441,173,442,172,441,170,441,171,439,173,439,174,438,173,438,173,438,172,436,170,432,169,429,169,429,161,438,161",zoneName:"ACDT"},{timezone:"Australia/Brisbane",country:"AU",pin:"463,163",offset:10,points:"458,156,459,156,460,158,459,157,461,158,463,161,463,163,463,164,462,164,460,166,459,165,457,165,446,165,446,161,442,161,442,148,444,150,446,149,447,146,446,144,447,142,447,142,448,140,449,142,450,145,451,145,452,146,453,151,457,153,458,156,458,156,458,156",zoneName:"AEST"},{timezone:"Australia/Broken_Hill",country:"AU",pin:"446,169",offset:10.5,points:"446,170,446,169,447,169,447,170,446,170",zoneName:"ACDT"},{timezone:"Australia/Currie",country:"AU",pin:"450,180",offset:11,points:"450,180,450,180,450,180",zoneName:"AEDT"},{timezone:"Australia/Darwin",country:"AU",pin:"432,142",offset:9.5,points:"429,146,430,146,430,145,430,145,431,143,432,143,431,142,432,143,432,142,434,142,434,141,433,140,436,142,438,142,439,142,438,142,439,142,439,142,440,141,440,142,440,143,439,143,438,145,437,146,442,148,442,161,429,161,429,146",zoneName:"ACST"},{timezone:"Australia/Eucla",country:"AU",pin:"429,169",offset:8.8,points:"429,168,428,169,424,170,424,168,429,168",zoneName:"ACWST"},{timezone:"Australia/Hobart",country:"AU",pin:"455,185",offset:11,points:"456,182,456,185,454,184,454,186,452,185,452,184,452,184,451,182,452,182,454,183,454,182,456,182",zoneName:"AEDT"},{timezone:"Australia/Lindeman",country:"AU",pin:"457,153",offset:10,points:"457,153,457,153,457,153",zoneName:"AEST"},{timezone:"Australia/Lord_Howe",country:"AU",pin:"471,169",offset:11,points:"471,169,471,169,471,169",zoneName:"LHDT"},{timezone:"Australia/Melbourne",country:"AU",pin:"451,178",offset:11,points:"448,173,449,173,451,175,455,175,456,176,458,177,455,178,453,179,453,179,452,178,451,178,452,178,451,178,449,179,446,178,446,178,446,172,447,172,448,173",zoneName:"AEDT"},{timezone:"Australia/Perth",country:"AU",pin:"411,169",offset:8,points:"424,146,424,145,424,145,425,145,425,144,426,145,426,144,428,145,428,147,428,146,428,147,428,146,429,146,429,168,424,168,424,170,423,171,422,172,417,172,414,174,411,173,410,173,410,172,411,171,411,169,410,165,407,161,408,162,408,160,408,161,409,162,407,159,408,156,409,155,409,156,409,155,412,154,413,154,418,152,420,150,420,149,421,148,422,149,422,148,422,147,423,148,423,148,423,147,423,147,423,147,423,146,424,147,423,146,424,146",zoneName:"AWST"},{timezone:"Australia/Sydney",country:"AU",pin:"460,172",offset:11,points:"459,174,458,175,458,177,456,176,455,175,451,175,449,173,446,172,446,170,447,170,447,169,446,169,446,165,457,165,459,165,460,166,462,164,463,164,462,169,460,172,460,171,460,172,459,174",zoneName:"AEDT"},{timezone:"Europe/Amsterdam",country:"NL",pin:"257,52",offset:1,points:"258,53,258,55,257,53,255,53,256,53,256,53,256,52,258,52,257,52,258,51,260,51,259,52,260,52,259,53,258,53",zoneName:"CET"},{timezone:"Europe/Andorra",country:"AD",pin:"252,66",offset:1,points:"252,66,252,66,252,66",zoneName:"CET"},{timezone:"Europe/Athens",country:"GR",pin:"283,72",offset:2,points:"282,72,283,73,282,73,282,74,281,74,281,74,281,74,280,74,279,72,280,72,282,72,279,72,279,71,279,71,278,70,279,68,287,67,286,68,283,68,284,69,283,69,283,70,281,69,282,71,282,70,282,71,281,71,283,72,283,73,282,72",zoneName:"EET"},{timezone:"Europe/Belgrade",country:"RS",pin:"278,63",offset:1,points:"280,63,282,63,281,64,282,65,281,66,279,67,278,66,278,65,277,65,277,64,277,63,276,62,277,62,276,61,278,61,280,63",zoneName:"CET"},{timezone:"Europe/Bucharest",country:"RO",pin:"286,63",offset:2,points:"289,62,291,62,290,64,288,64,284,64,282,64,281,63,282,63,280,63,280,62,278,61,279,61,281,59,282,58,285,59,287,58,289,60,289,62",zoneName:"EET"},{timezone:"Europe/Berlin",country:"DE",pin:"269,52",offset:1,points:"260,57,259,56,258,53,260,52,259,52,260,50,262,51,262,50,264,51,262,50,262,50,263,49,262,49,262,49,264,49,264,50,265,49,265,50,267,49,270,50,270,52,270,52,271,54,267,55,269,57,268,58,268,59,261,59,261,57,260,57",zoneName:"CET"},{timezone:"Europe/Budapest",country:"HU",pin:"277,59",offset:1,points:"279,61,275,61,272,60,273,60,273,59,274,58,276,59,279,58,282,58,279,61",zoneName:"CET"},{timezone:"Europe/Chisinau",country:"MD",pin:"290,60",offset:2,points:"292,60,290,60,290,61,289,62,289,60,287,58,288,58,291,58,292,60",zoneName:"EET"},{timezone:"Europe/Bratislava",country:"SK",pin:"274,58",offset:1,points:"274,58,274,57,276,56,281,57,281,58,278,58,276,59,274,58",zoneName:"CET"},{timezone:"Europe/Brussels",country:"BE",pin:"256,54",offset:1,points:"254,54,254,54,257,53,259,55,258,56,257,55,256,56,254,54",zoneName:"CET"},{timezone:"Europe/Copenhagen",country:"DK",pin:"267,48",offset:1,points:"263,48,264,49,262,49,262,48,261,48,261,46,265,45,264,46,265,47,263,48",zoneName:"CET"},{timezone:"Europe/Dublin",country:"IE",pin:"241,51",offset:0,points:"241,50,242,50,241,53,237,54,236,53,237,53,236,53,237,52,238,52,236,52,238,51,236,51,237,50,236,50,238,50,239,49,238,49,240,48,240,48,239,49,241,50",zoneName:"GMT"},{timezone:"Europe/Gibraltar",country:"GI",pin:"243,75",offset:1,points:"243,75,243,75,243,75",zoneName:"CET"},{timezone:"Europe/Guernsey",country:"GG",pin:"246,56",offset:0,points:"247,56,247,56,247,56",zoneName:"GMT"},{timezone:"Europe/Helsinki",country:"FI",pin:"285,41",offset:2,points:"287,41,282,42,282,41,280,41,280,40,279,38,280,37,281,37,282,36,285,35,285,34,283,33,283,32,283,31,283,31,279,29,280,29,281,30,285,30,287,28,289,28,291,28,290,30,292,31,290,32,292,34,291,35,292,36,292,36,294,38,289,41,287,41",zoneName:"EET"},{timezone:"Europe/Isle_of_Man",country:"IM",pin:"244,50",offset:0,points:"244,50,243,50,244,50",zoneName:"GMT"},{timezone:"Europe/Istanbul",country:"TR",pin:"290,68",offset:2,points:"301,74,300,75,300,75,300,74,296,75,293,74,291,75,290,74,288,74,289,74,288,74,288,73,288,73,288,72,286,72,287,71,288,72,287,71,287,70,286,70,286,69,287,69,292,68,291,68,293,68,295,67,299,67,301,68,303,68,309,67,311,68,311,69,312,70,311,70,312,72,311,72,312,73,312,74,309,73,304,74,301,74",zoneName:"EET"},{timezone:"Europe/Jersey",country:"JE",pin:"247,57",offset:0,points:"247,57,247,57,247,57",zoneName:"GMT"},{timezone:"Europe/Kaliningrad",country:"RU",pin:"278,49",offset:3,points:"279,48,279,49,281,49,282,49,277,49,278,49,278,49,278,49,279,48",zoneName:"EET"},{timezone:"Europe/Kiev",country:"UA",pin:"292,55",offset:2,points:"304,59,303,60,301,60,302,59,301,59,298,58,298,59,298,59,299,61,298,61,295,61,294,60,295,60,294,60,291,61,291,62,290,62,289,62,290,60,292,61,291,58,288,58,285,59,282,57,281,56,283,55,283,53,284,53,292,54,293,53,297,52,298,53,297,53,298,54,299,54,299,55,302,55,303,56,306,56,305,57,306,57,305,57,305,59,304,59",zoneName:"EET"},{timezone:"Europe/Lisbon",country:"PT",pin:"237,71",offset:0,points:"240,73,239,74,238,74,238,72,237,71,238,71,237,71,238,68,238,67,239,66,239,67,241,67,241,67,240,68,240,70,240,70,240,72,240,73",zoneName:"WET"},{timezone:"Europe/Ljubljana",country:"SI",pin:"270,61",offset:1,points:"272,60,273,60,272,61,271,62,269,62,269,62,269,61,269,60,272,60",zoneName:"CET"},{timezone:"Europe/London",country:"GB",pin:"250,53",offset:0,points:"246,53,245,54,243,53,245,52,244,51,243,52,244,51,246,51,246,51,246,50,245,49,246,49,243,49,244,48,243,47,242,47,242,48,242,47,243,46,241,46,243,46,242,45,243,45,242,44,243,44,243,44,246,44,244,45,245,45,244,45,247,45,246,46,245,47,246,47,245,47,247,47,248,49,250,50,250,51,249,50,250,51,250,52,252,52,252,53,250,54,252,54,251,54,245,55,245,55,242,55,244,54,246,54,247,53,246,53",zoneName:"GMT"},{timezone:"Europe/Luxembourg",country:"LU",pin:"259,56",offset:1,points:"259,56,258,56,258,55,259,56,259,56",zoneName:"CET"},{timezone:"Europe/Madrid",country:"ES",pin:"245,69",offset:1,points:"249,72,247,74,244,74,242,75,241,74,242,74,241,74,240,73,240,72,240,70,240,70,240,68,241,67,241,67,239,67,239,66,238,67,238,66,237,65,239,64,248,65,249,66,255,66,254,67,251,68,251,68,250,70,250,71,249,72",zoneName:"CET"},{timezone:"Europe/Malta",country:"MT",pin:"270,75",offset:1,points:"270,75,270,75,270,75",zoneName:"CET"},{timezone:"Europe/Mariehamn",country:"AX",pin:"278,42",offset:2,points:"279,41,279,41,279,41",zoneName:"EET"},{timezone:"Europe/Minsk",country:"BY",pin:"288,50",offset:3,points:"292,53,285,53,283,53,283,53,282,52,283,52,283,50,286,50,286,49,287,48,287,48,287,48,289,47,292,47,293,48,293,49,296,51,294,51,294,53,293,53,292,54,292,53",zoneName:"MSK"},{timezone:"Europe/Monaco",country:"MC",pin:"260,64",offset:1,points:"260,64,260,64,260,64",zoneName:"CET"},{timezone:"Europe/Moscow",country:"RU",pin:"302,48",offset:4,points:"326,23,329,23,327,24,327,24,327,24,327,25,327,25,330,27,324,27,325,26,321,25,324,24,323,24,325,24,323,24,326,23",zoneName:"MSK"},{timezone:"Europe/Moscow",country:"RU",pin:"302,48",offset:4,points:"333,20,344,18,346,18,345,19,335,20,333,21,333,21,332,21,332,22,331,21,332,22,329,22,330,23,329,22,330,23,328,23,329,23,329,23,325,23,328,23,324,23,328,22,327,22,329,21,328,21,329,21,328,21,333,20",zoneName:"MSK"},{timezone:"Europe/Moscow",country:"RU",pin:"302,48",offset:4,points:"315,63,315,63,316,64,316,65,317,67,316,68,313,66,311,66,310,65,306,65,301,62,303,61,302,60,305,60,303,60,303,59,305,59,306,58,305,57,306,57,305,57,306,56,303,56,302,55,299,55,299,54,298,54,297,53,298,53,297,52,294,53,293,51,296,51,293,49,293,48,289,47,289,45,288,45,289,45,288,44,289,43,289,42,292,42,290,41,290,41,289,41,294,38,292,36,292,36,291,35,292,34,290,32,292,31,290,30,290,29,289,29,294,28,296,28,295,28,295,28,295,29,297,29,296,29,300,29,307,31,307,32,304,33,294,32,297,33,296,33,298,33,298,34,299,35,298,35,302,36,303,36,301,35,306,35,305,34,309,33,311,33,312,32,311,32,312,31,310,30,314,30,315,31,312,32,315,32,316,32,316,31,318,31,317,31,323,30,325,29,326,29,324,29,325,30,324,30,332,29,333,30,332,30,333,30,335,29,334,28,335,28,340,29,340,29,341,30,341,31,342,31,341,31,342,31,333,35,332,39,329,40,326,40,322,40,323,41,319,42,319,43,317,42,318,41,317,40,316,40,314,41,315,42,315,43,316,43,314,44,316,45,315,45,315,46,318,45,322,47,321,47,323,47,324,46,324,47,324,47,324,47,325,48,324,48,325,49,324,50,321,49,320,49,319,50,317,51,317,52,310,52,309,53,310,54,307,54,309,56,308,57,309,57,308,58,310,59,312,59,312,58,315,59,315,59,316,60,315,61,316,61,315,62,316,62,315,63",zoneName:"MSK"},{timezone:"Europe/Oslo",country:"NO",pin:"265,42",offset:1,points:"261,44,259,44,260,44,258,44,258,43,259,43,258,43,259,42,257,42,260,41,258,42,258,41,257,41,258,41,257,41,257,41,258,40,257,40,260,40,261,40,257,40,257,40,258,40,257,40,258,40,257,39,259,39,257,39,259,39,258,39,259,38,260,39,259,38,261,38,260,38,260,37,262,38,261,37,262,38,261,37,266,36,265,36,266,36,264,37,263,37,267,35,266,35,268,34,267,34,268,34,267,34,268,33,268,34,268,33,270,33,268,33,269,33,268,33,269,32,268,32,269,32,269,32,271,32,270,32,272,31,271,31,272,31,271,30,272,30,273,31,272,31,273,30,272,30,275,30,273,30,275,30,274,30,275,30,274,29,275,29,277,29,277,28,277,28,277,29,278,28,278,29,279,29,278,28,279,28,281,28,280,28,281,28,279,27,280,27,283,28,282,27,284,27,284,27,284,26,286,27,285,27,285,28,287,26,287,27,288,27,288,26,290,26,289,27,289,27,289,27,290,27,293,27,290,28,291,28,291,28,293,28,290,29,290,28,288,28,286,28,286,29,285,30,281,30,280,29,278,29,278,29,278,30,276,30,275,31,273,31,272,33,270,33,270,34,269,35,270,35,269,36,267,37,267,39,268,40,267,40,268,41,266,42,266,43,265,43,265,42,264,43,263,43,261,44",zoneName:"CET"},{timezone:"Europe/Paris",country:"FR",pin:"253,57",offset:1,points:"256,65,254,65,254,66,254,66,248,65,248,65,249,63,248,62,249,62,248,62,248,61,247,60,247,59,246,59,244,59,243,58,244,58,243,58,246,57,246,58,248,57,247,56,248,57,251,56,250,56,252,55,252,54,254,54,256,56,257,55,258,56,261,57,261,59,260,59,258,61,259,61,260,62,259,62,260,64,261,64,260,64,259,65,256,65",zoneName:"CET"},{timezone:"Europe/Podgorica",country:"ME",pin:"277,66",offset:1,points:"276,65,278,65,277,66,277,67,276,66,276,65",zoneName:"CET"},{timezone:"Europe/Prague",country:"CZ",pin:"270,55",offset:1,points:"271,57,270,57,268,56,267,55,271,54,273,55,273,55,275,55,276,56,274,57,271,57",zoneName:"CET"},{timezone:"Europe/Riga",country:"LV",pin:"283,46",offset:2,points:"281,45,283,46,284,45,284,45,285,44,288,45,289,47,287,48,285,47,279,47,279,46,281,45",zoneName:"EET"},{timezone:"Europe/Rome",country:"IT",pin:"267,67",offset:1,points:"273,69,273,70,274,71,272,72,273,71,272,69,265,66,264,64,262,63,260,64,261,64,260,64,259,62,260,62,259,61,262,60,263,61,263,60,264,61,265,60,267,60,269,60,269,61,269,62,267,62,267,63,267,63,269,65,270,67,272,67,272,67,276,69,275,70,274,69,273,69",zoneName:"CET"},{timezone:"Europe/Samara",country:"RU",pin:"320,51",offset:4,points:"320,49,323,49,322,50,322,51,321,53,317,52,317,51,317,51,319,50,320,49",zoneName:"SAMT"},{timezone:"Europe/Samara",country:"RU",pin:"320,51",offset:4,points:"325,47,324,47,324,47,324,47,324,46,323,47,321,47,321,47,322,46,321,45,322,45,322,44,325,44,326,46,325,46,325,47",zoneName:"SAMT"},{timezone:"Europe/San_Marino",country:"SM",pin:"267,64",offset:1,points:"267,64,267,64,267,64",zoneName:"CET"},{timezone:"Europe/Sarajevo",country:"BA",pin:"276,64",offset:1,points:"272,63,277,63,277,63,277,64,277,64,277,64,276,65,276,66,274,65,272,63",zoneName:"CET"},{timezone:"Europe/Simferopol",country:"RU",pin:"297,63",offset:2,points:"298,61,299,62,301,62,297,63,296,62,296,62,295,62,297,61,297,61,298,61",zoneName:"MSK"},{timezone:"Europe/Skopje",country:"MK",pin:"280,67",offset:1,points:"281,66,282,67,282,68,279,68,278,67,279,67,281,66", -zoneName:"CET"},{timezone:"Europe/Sofia",country:"BG",pin:"282,66",offset:2,points:"289,66,288,66,289,67,287,67,286,68,282,68,281,66,282,65,281,64,281,64,282,64,285,64,288,64,290,64,289,66",zoneName:"EET"},{timezone:"Europe/Stockholm",country:"SE",pin:"275,43",offset:1,points:"267,47,268,46,267,45,267,44,266,44,265,43,266,43,266,42,268,41,267,40,268,40,267,39,267,37,268,36,270,36,269,35,270,34,270,33,272,33,273,32,272,31,274,30,275,31,275,30,278,30,278,29,283,31,283,31,283,32,283,33,284,34,280,34,281,34,280,34,280,35,279,35,280,36,279,36,275,38,274,37,275,38,274,38,275,39,274,39,274,41,277,42,275,42,276,43,275,43,275,43,274,43,272,44,274,44,273,44,273,44,273,45,273,45,272,47,270,47,270,48,269,48,268,48,268,48,267,47",zoneName:"CET"},{timezone:"Europe/Tallinn",country:"EE",pin:"284,42",offset:2,points:"283,43,286,42,289,43,288,44,289,45,287,45,285,44,284,45,284,44,283,44,283,43,283,43",zoneName:"EET"},{timezone:"Europe/Tirane",country:"AL",pin:"278,68",offset:1,points:"278,70,277,69,277,67,277,66,277,66,279,66,278,68,279,69,278,70",zoneName:"CET"},{timezone:"Europe/Uzhgorod",country:"UA",pin:"281,57",offset:2,points:"282,57,284,58,282,58,281,58,282,57",zoneName:"EET"},{timezone:"Europe/Vaduz",country:"LI",pin:"263,60",offset:1,points:"263,59,263,60,263,59",zoneName:"CET"},{timezone:"Europe/Vatican",country:"VA",pin:"267,67",offset:1,points:"267,67,267,67,267,67",zoneName:"CET"},{timezone:"Europe/Vienna",country:"AT",pin:"273,58",offset:1,points:"263,59,268,59,268,58,271,57,273,57,274,58,273,59,273,60,272,60,270,61,267,60,267,60,264,60,263,59",zoneName:"CET"},{timezone:"Europe/Vilnius",country:"LT",pin:"285,49",offset:2,points:"279,47,285,47,287,48,286,49,286,50,283,50,282,49,281,49,280,48,279,47",zoneName:"EET"},{timezone:"Europe/Volgograd",country:"RU",pin:"312,57",offset:4,points:"315,57,316,59,318,59,318,60,317,60,318,61,318,62,317,61,317,62,315,62,316,61,315,61,316,60,315,59,315,59,314,59,313,58,312,58,312,59,311,59,308,58,309,57,308,57,309,56,307,54,310,54,309,53,310,52,317,52,320,53,320,54,317,55,318,56,316,55,315,57",zoneName:"MSK"},{timezone:"Europe/Volgograd",country:"RU",pin:"312,57",offset:4,points:"317,40,318,41,317,42,319,43,319,42,321,42,324,41,324,43,325,43,325,44,322,44,322,45,321,45,322,46,321,47,318,45,315,46,315,45,316,45,314,44,316,43,315,43,315,42,314,41,316,40,317,40",zoneName:"MSK"},{timezone:"Europe/Warsaw",country:"PL",pin:"279,52",offset:1,points:"283,55,282,56,282,57,280,56,276,56,275,55,273,55,273,55,271,54,270,52,270,52,270,50,275,49,277,49,277,50,282,49,283,51,282,52,284,54,283,55",zoneName:"CET"},{timezone:"Europe/Zagreb",country:"HR",pin:"272,61",offset:1,points:"277,63,272,62,274,65,272,65,270,62,269,63,269,62,271,62,272,61,273,60,275,61,276,61,277,62,277,63",zoneName:"CET"},{timezone:"Europe/Zaporozhye",country:"UA",pin:"299,59",offset:2,points:"298,59,298,59,298,58,299,58,302,59,301,60,299,61,299,61,298,59",zoneName:"EET"},{timezone:"Europe/Zurich",country:"CH",pin:"262,59",offset:1,points:"259,61,258,61,260,59,262,59,263,59,263,60,265,60,264,61,263,60,263,61,262,60,260,61,259,61,259,61",zoneName:"CET"},{timezone:"Indian/Antananarivo",country:"MG",pin:"316,151",offset:3,points:"305,160,324,142",zoneName:"EAT"},{timezone:"Indian/Chagos",country:"IO",pin:"351,135",offset:6,points:"344,140,356,127",zoneName:"IOT"},{timezone:"Indian/Christmas",country:"CX",pin:"397,139",offset:7,points:"392,145,402,134",zoneName:"CXT"},{timezone:"Indian/Cocos",country:"CC",pin:"385,142",offset:6.5,points:"379,147,390,136",zoneName:"CCT"},{timezone:"Indian/Comoro",country:"KM",pin:"310,141",offset:3,points:"305,147,317,136",zoneName:"EAT"},{timezone:"Indian/Kerguelen",country:"TF",pin:"348,194",offset:5,points:"320,194,358,177",zoneName:"TFT"},{timezone:"Indian/Mahe",country:"SC",pin:"327,131",offset:4,points:"314,144,328,125",zoneName:"SCT"},{timezone:"Indian/Maldives",country:"MV",pin:"352,119",offset:5,points:"346,126,357,115",zoneName:"MVT"},{timezone:"Indian/Mauritius",country:"MU",pin:"330,153",offset:4,points:"328,154,338,139",zoneName:"MUT"},{timezone:"Indian/Mayotte",country:"YT",pin:"313,143",offset:3,points:"308,148,318,138",zoneName:"EAT"},{timezone:"Indian/Reunion",country:"RE",pin:"327,154",offset:4,points:"322,160,333,149",zoneName:"RET"},{timezone:"Pacific/Apia",country:"WS",pin:"11,144",offset:14,points:"5,150,17,139",zoneName:"WSDT"},{timezone:"Pacific/Auckland",country:"NZ",pin:"493,176",offset:13,points:"485,190,481,189,484,186,487,185,490,181,491,182,492,182,492,182,492,182,492,183,490,185,490,186,488,187,487,189,485,190",zoneName:"NZDT"},{timezone:"Pacific/Auckland",country:"NZ",pin:"493,176",offset:13,points:"495,181,494,183,493,182,493,181,491,180,492,179,493,178,492,176,492,175,492,176,490,173,492,174,493,176,494,177,494,176,495,178,498,177,497,179,496,179,495,181",zoneName:"NZDT"},{timezone:"Pacific/Chatham",country:"NZ",pin:"5,186",offset:13.8,points:"-1,192,11,181",zoneName:"CHADT"},{timezone:"Pacific/Chuuk",country:"FM",pin:"461,115",offset:10,points:"442,123,464,106",zoneName:"CHUT"},{timezone:"Pacific/Easter",country:"CL",pin:"98,163",offset:-5,points:"93,168,103,158",zoneName:"EAST"},{timezone:"Pacific/Enderbury",country:"KI",pin:"12,129",offset:13,points:"3,137,18,124",zoneName:"PHOT"},{timezone:"Pacific/Fakaofo",country:"TK",pin:"12,138",offset:13,points:"5,143,17,132",zoneName:"TKT"},{timezone:"Pacific/Efate",country:"VU",pin:"484,150",offset:11,points:"476,153,491,143",zoneName:"VUT"},{timezone:"Pacific/Fiji",country:"FJ",pin:"498,150",offset:13,points:"1,149,1,149,1,149",zoneName:"FJST"},{timezone:"Pacific/Funafuti",country:"TV",pin:"499,137",offset:12,points:"490,143,505,128",zoneName:"TVT"},{timezone:"Pacific/Galapagos",country:"EC",pin:"126,126",offset:-6,points:"117,132,131,118",zoneName:"GALT"},{timezone:"Pacific/Gambier",country:"PF",pin:"63,157",offset:-9,points:"55,162,68,150",zoneName:"GAMT"},{timezone:"Pacific/Kwajalein",country:"MH",pin:"482,112",offset:12,points:"477,118,488,107",zoneName:"MHT"},{timezone:"Pacific/Guadalcanal",country:"SB",pin:"473,138",offset:11,points:"466,142,485,132",zoneName:"SBT"},{timezone:"Pacific/Guam",country:"GU",pin:"451,106",offset:10,points:"446,112,456,101",zoneName:"ChST"},{timezone:"Pacific/Honolulu",country:"US",pin:"31,95",offset:-10,points:"8,99,35,89",zoneName:"HST"},{timezone:"Pacific/Johnston",country:"UM",pin:"15,102",offset:-10,points:"10,107,20,97",zoneName:"HST"},{timezone:"Pacific/Kiritimati",country:"KI",pin:"31,122",offset:14,points:"27,141,41,118",zoneName:"LINT"},{timezone:"Pacific/Kosrae",country:"FM",pin:"476,118",offset:11,points:"471,123,481,113",zoneName:"KOST"},{timezone:"Pacific/Majuro",country:"MH",pin:"488,115",offset:12,points:"474,119,489,105",zoneName:"MHT"},{timezone:"Pacific/Midway",country:"UM",pin:"4,86",offset:-11,points:"-3,91,9,80",zoneName:"SST"},{timezone:"Pacific/Marquesas",country:"PF",pin:"56,138",offset:-9.5,points:"50,145,63,131",zoneName:"MART"},{timezone:"Pacific/Nauru",country:"NR",pin:"482,126",offset:12,points:"477,131,487,121",zoneName:"NRT"},{timezone:"Pacific/Niue",country:"NU",pin:"14,151",offset:-11,points:"9,157,19,146",zoneName:"NUT"},{timezone:"Pacific/Norfolk",country:"NF",pin:"483,165",offset:11.5,points:"478,170,488,160",zoneName:"NFT"},{timezone:"Pacific/Noumea",country:"NC",pin:"481,156",offset:11,points:"470,162,489,147",zoneName:"NCT"},{timezone:"Pacific/Pago_Pago",country:"AS",pin:"13,145",offset:-11,points:"7,150,20,135",zoneName:"SST"},{timezone:"Pacific/Palau",country:"PW",pin:"437,115",offset:9,points:"427,126,442,109",zoneName:"PWT"},{timezone:"Pacific/Pitcairn",country:"PN",pin:"69,160",offset:-8,points:"63,165,82,153",zoneName:"PST"},{timezone:"Pacific/Pohnpei",country:"FM",pin:"470,115",offset:11,points:"463,122,478,110",zoneName:"PONT"},{timezone:"Pacific/Port_Moresby",country:"PG",pin:"454,138",offset:10,points:"447,141,471,126",zoneName:"PGT"},{timezone:"Pacific/Rarotonga",country:"CK",pin:"28,154",offset:-10,points:"20,155,32,137",zoneName:"CKT"},{timezone:"Pacific/Saipan",country:"MP",pin:"452,104",offset:10,points:"446,110,458,91",zoneName:"ChST"},{timezone:"Pacific/Tahiti",country:"PF",pin:"42,149",offset:-10,points:"35,163,61,145",zoneName:"TAHT"},{timezone:"Pacific/Tarawa",country:"KI",pin:"490,123",offset:12,points:"485,134,496,115",zoneName:"GILT"},{timezone:"Pacific/Tongatapu",country:"TO",pin:"7,154",offset:13,points:"0,161,14,142",zoneName:"TOT"},{timezone:"Pacific/Wake",country:"UM",pin:"481,98",offset:12,points:"476,103,486,93",zoneName:"WAKT"},{timezone:"Pacific/Wallis",country:"WF",pin:"5,143",offset:12,points:"-3,150,10,138",zoneName:"WFT"}]}); \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-datetimepicker.min.js b/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-datetimepicker.min.js deleted file mode 100644 index 724db768990..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-datetimepicker.min.js +++ /dev/null @@ -1,2 +0,0 @@ -!function(a){"use strict";if("function"==typeof define&&define.amd)define(["jquery","moment"],a);else if("object"==typeof exports)module.exports=a(require("jquery"),require("moment"));else{if("undefined"==typeof jQuery)throw"bootstrap-datetimepicker requires jQuery to be loaded first";if("undefined"==typeof moment)throw"bootstrap-datetimepicker requires Moment.js to be loaded first";a(jQuery,moment)}}(function(a,b){"use strict";if(!b)throw new Error("bootstrap-datetimepicker requires Moment.js to be loaded first");var c=function(c,d){var e,f,g,h,i,j,k,l={},m=!0,n=!1,o=!1,p=0,q=[{clsName:"days",navFnc:"M",navStep:1},{clsName:"months",navFnc:"y",navStep:1},{clsName:"years",navFnc:"y",navStep:10},{clsName:"decades",navFnc:"y",navStep:100}],r=["days","months","years","decades"],s=["top","bottom","auto"],t=["left","right","auto"],u=["default","top","bottom"],v={up:38,38:"up",down:40,40:"down",left:37,37:"left",right:39,39:"right",tab:9,9:"tab",escape:27,27:"escape",enter:13,13:"enter",pageUp:33,33:"pageUp",pageDown:34,34:"pageDown",shift:16,16:"shift",control:17,17:"control",space:32,32:"space",t:84,84:"t",delete:46,46:"delete"},w={},x=function(){return void 0!==b.tz&&void 0!==d.timeZone&&null!==d.timeZone&&""!==d.timeZone},y=function(a){var c;return c=void 0===a||null===a?b():b.isDate(a)||b.isMoment(a)?b(a):x()?b.tz(a,j,d.useStrict,d.timeZone):b(a,j,d.useStrict),x()&&c.tz(d.timeZone),c},z=function(a){if("string"!=typeof a||a.length>1)throw new TypeError("isEnabled expects a single character string parameter");switch(a){case"y":return i.indexOf("Y")!==-1;case"M":return i.indexOf("M")!==-1;case"d":return i.toLowerCase().indexOf("d")!==-1;case"h":case"H":return i.toLowerCase().indexOf("h")!==-1;case"m":return i.indexOf("m")!==-1;case"s":return i.indexOf("s")!==-1;default:return!1}},A=function(){return z("h")||z("m")||z("s")},B=function(){return z("y")||z("M")||z("d")},C=function(){var b=a("").append(a("").append(a("").addClass("prev").attr("data-action","previous").append(a("").addClass(d.icons.previous))).append(a("").addClass("picker-switch").attr("data-action","pickerSwitch").attr("colspan",d.calendarWeeks?"6":"5")).append(a("").addClass("next").attr("data-action","next").append(a("").addClass(d.icons.next)))),c=a("").append(a("").append(a("").attr("colspan",d.calendarWeeks?"8":"7")));return[a("
    ").addClass("datepicker-days").append(a("").addClass("table-condensed").append(b).append(a(""))),a("
    ").addClass("datepicker-months").append(a("
    ").addClass("table-condensed").append(b.clone()).append(c.clone())),a("
    ").addClass("datepicker-years").append(a("
    ").addClass("table-condensed").append(b.clone()).append(c.clone())),a("
    ").addClass("datepicker-decades").append(a("
    ").addClass("table-condensed").append(b.clone()).append(c.clone()))]},D=function(){var b=a(""),c=a(""),e=a("");return z("h")&&(b.append(a("
    ").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.incrementHour}).addClass("btn").attr("data-action","incrementHours").append(a("").addClass(d.icons.up)))),c.append(a("").append(a("").addClass("timepicker-hour").attr({"data-time-component":"hours",title:d.tooltips.pickHour}).attr("data-action","showHours"))),e.append(a("").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.decrementHour}).addClass("btn").attr("data-action","decrementHours").append(a("").addClass(d.icons.down))))),z("m")&&(z("h")&&(b.append(a("").addClass("separator")),c.append(a("").addClass("separator").html(":")),e.append(a("").addClass("separator"))),b.append(a("").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.incrementMinute}).addClass("btn").attr("data-action","incrementMinutes").append(a("").addClass(d.icons.up)))),c.append(a("").append(a("").addClass("timepicker-minute").attr({"data-time-component":"minutes",title:d.tooltips.pickMinute}).attr("data-action","showMinutes"))),e.append(a("").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.decrementMinute}).addClass("btn").attr("data-action","decrementMinutes").append(a("").addClass(d.icons.down))))),z("s")&&(z("m")&&(b.append(a("").addClass("separator")),c.append(a("").addClass("separator").html(":")),e.append(a("").addClass("separator"))),b.append(a("").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.incrementSecond}).addClass("btn").attr("data-action","incrementSeconds").append(a("").addClass(d.icons.up)))),c.append(a("").append(a("").addClass("timepicker-second").attr({"data-time-component":"seconds",title:d.tooltips.pickSecond}).attr("data-action","showSeconds"))),e.append(a("").append(a("").attr({href:"#",tabindex:"-1",title:d.tooltips.decrementSecond}).addClass("btn").attr("data-action","decrementSeconds").append(a("").addClass(d.icons.down))))),h||(b.append(a("").addClass("separator")),c.append(a("").append(a("").addClass("separator"))),a("
    ").addClass("timepicker-picker").append(a("").addClass("table-condensed").append([b,c,e]))},E=function(){var b=a("
    ").addClass("timepicker-hours").append(a("
    ").addClass("table-condensed")),c=a("
    ").addClass("timepicker-minutes").append(a("
    ").addClass("table-condensed")),d=a("
    ").addClass("timepicker-seconds").append(a("
    ").addClass("table-condensed")),e=[D()];return z("h")&&e.push(b),z("m")&&e.push(c),z("s")&&e.push(d),e},F=function(){var b=[];return d.showTodayButton&&b.push(a("
    ").append(a("").attr({"data-action":"today",title:d.tooltips.today}).append(a("").addClass(d.icons.today)))),!d.sideBySide&&B()&&A()&&b.push(a("").append(a("").attr({"data-action":"togglePicker",title:d.tooltips.selectTime}).append(a("").addClass(d.icons.time)))),d.showClear&&b.push(a("").append(a("").attr({"data-action":"clear",title:d.tooltips.clear}).append(a("").addClass(d.icons.clear)))),d.showClose&&b.push(a("").append(a("").attr({"data-action":"close",title:d.tooltips.close}).append(a("").addClass(d.icons.close)))),a("").addClass("table-condensed").append(a("").append(a("").append(b)))},G=function(){var b=a("
    ").addClass("bootstrap-datetimepicker-widget dropdown-menu"),c=a("
    ").addClass("datepicker").append(C()),e=a("
    ").addClass("timepicker").append(E()),f=a("
      ").addClass("list-unstyled"),g=a("
    • ").addClass("picker-switch"+(d.collapse?" accordion-toggle":"")).append(F());return d.inline&&b.removeClass("dropdown-menu"),h&&b.addClass("usetwentyfour"),z("s")&&!h&&b.addClass("wider"),d.sideBySide&&B()&&A()?(b.addClass("timepicker-sbs"),"top"===d.toolbarPlacement&&b.append(g),b.append(a("
      ").addClass("row").append(c.addClass("col-md-6")).append(e.addClass("col-md-6"))),"bottom"===d.toolbarPlacement&&b.append(g),b):("top"===d.toolbarPlacement&&f.append(g),B()&&f.append(a("
    • ").addClass(d.collapse&&A()?"collapse in":"").append(c)),"default"===d.toolbarPlacement&&f.append(g),A()&&f.append(a("
    • ").addClass(d.collapse&&B()?"collapse":"").append(e)),"bottom"===d.toolbarPlacement&&f.append(g),b.append(f))},H=function(){var b,e={};return b=c.is("input")||d.inline?c.data():c.find("input").data(),b.dateOptions&&b.dateOptions instanceof Object&&(e=a.extend(!0,e,b.dateOptions)),a.each(d,function(a){var c="date"+a.charAt(0).toUpperCase()+a.slice(1);void 0!==b[c]&&(e[a]=b[c])}),e},I=function(){var b,e=(n||c).position(),f=(n||c).offset(),g=d.widgetPositioning.vertical,h=d.widgetPositioning.horizontal;if(d.widgetParent)b=d.widgetParent.append(o);else if(c.is("input"))b=c.after(o).parent();else{if(d.inline)return void(b=c.append(o));b=c,c.children().first().after(o)}if("auto"===g&&(g=f.top+1.5*o.height()>=a(window).height()+a(window).scrollTop()&&o.height()+c.outerHeight()a(window).width()?"right":"left"),"top"===g?o.addClass("top").removeClass("bottom"):o.addClass("bottom").removeClass("top"),"right"===h?o.addClass("pull-right"):o.removeClass("pull-right"),"static"===b.css("position")&&(b=b.parents().filter(function(){return"static"!==a(this).css("position")}).first()),0===b.length)throw new Error("datetimepicker component should be placed within a non-static positioned container");o.css({top:"top"===g?"auto":e.top+c.outerHeight(),bottom:"top"===g?b.outerHeight()-(b===c?0:e.top):"auto",left:"left"===h?b===c?0:e.left:"auto",right:"left"===h?"auto":b.outerWidth()-c.outerWidth()-(b===c?0:e.left)})},J=function(a){"dp.change"===a.type&&(a.date&&a.date.isSame(a.oldDate)||!a.date&&!a.oldDate)||c.trigger(a)},K=function(a){"y"===a&&(a="YYYY"),J({type:"dp.update",change:a,viewDate:f.clone()})},L=function(a){o&&(a&&(k=Math.max(p,Math.min(3,k+a))),o.find(".datepicker > div").hide().filter(".datepicker-"+q[k].clsName).show())},M=function(){var b=a("
    "),c=f.clone().startOf("w").startOf("d");for(d.calendarWeeks===!0&&b.append(a(""),d.calendarWeeks&&c.append('"),j.push(c)),k=["day"],b.isBefore(f,"M")&&k.push("old"),b.isAfter(f,"M")&&k.push("new"),b.isSame(e,"d")&&!m&&k.push("active"),R(b,"d")||k.push("disabled"),b.isSame(y(),"d")&&k.push("today"),0!==b.day()&&6!==b.day()||k.push("weekend"),J({type:"dp.classify",date:b,classNames:k}),c.append('"),b.add(1,"d");h.find("tbody").empty().append(j),T(),U(),V()}},X=function(){var b=o.find(".timepicker-hours table"),c=f.clone().startOf("d"),d=[],e=a("");for(f.hour()>11&&!h&&c.hour(12);c.isSame(f,"d")&&(h||f.hour()<12&&c.hour()<12||f.hour()>11);)c.hour()%4===0&&(e=a(""),d.push(e)),e.append('"),c.add(1,"h");b.empty().append(d)},Y=function(){for(var b=o.find(".timepicker-minutes table"),c=f.clone().startOf("h"),e=[],g=a(""),h=1===d.stepping?5:d.stepping;f.isSame(c,"h");)c.minute()%(4*h)===0&&(g=a(""),e.push(g)),g.append('"),c.add(h,"m");b.empty().append(e)},Z=function(){for(var b=o.find(".timepicker-seconds table"),c=f.clone().startOf("m"),d=[],e=a("");f.isSame(c,"m");)c.second()%20===0&&(e=a(""),d.push(e)),e.append('"),c.add(5,"s");b.empty().append(d)},$=function(){var a,b,c=o.find(".timepicker span[data-time-component]");h||(a=o.find(".timepicker [data-action=togglePeriod]"),b=e.clone().add(e.hours()>=12?-12:12,"h"),a.text(e.format("A")),R(b,"h")?a.removeClass("disabled"):a.addClass("disabled")),c.filter("[data-time-component=hours]").text(e.format(h?"HH":"hh")),c.filter("[data-time-component=minutes]").text(e.format("mm")),c.filter("[data-time-component=seconds]").text(e.format("ss")),X(),Y(),Z()},_=function(){o&&(W(),$())},aa=function(a){var b=m?null:e;if(!a)return m=!0,g.val(""),c.data("date",""),J({type:"dp.change",date:!1,oldDate:b}),void _();if(a=a.clone().locale(d.locale),x()&&a.tz(d.timeZone),1!==d.stepping)for(a.minutes(Math.round(a.minutes()/d.stepping)*d.stepping).seconds(0);d.minDate&&a.isBefore(d.minDate);)a.add(d.stepping,"minutes");R(a)?(e=a,f=e.clone(),g.val(e.format(i)),c.data("date",e.format(i)),m=!1,_(),J({type:"dp.change",date:e.clone(),oldDate:b})):(d.keepInvalid?J({type:"dp.change",date:a,oldDate:b}):g.val(m?"":e.format(i)),J({type:"dp.error",date:a,oldDate:b}))},ba=function(){var b=!1;return o?(o.find(".collapse").each(function(){var c=a(this).data("collapse");return!c||!c.transitioning||(b=!0,!1)}),b?l:(n&&n.hasClass("btn")&&n.toggleClass("active"),o.hide(),a(window).off("resize",I),o.off("click","[data-action]"),o.off("mousedown",!1),o.remove(),o=!1,J({type:"dp.hide",date:e.clone()}),g.blur(),f=e.clone(),l)):l},ca=function(){aa(null)},da=function(a){return void 0===d.parseInputDate?(!b.isMoment(a)||a instanceof Date)&&(a=y(a)):a=d.parseInputDate(a),a},ea={next:function(){var a=q[k].navFnc;f.add(q[k].navStep,a),W(),K(a)},previous:function(){var a=q[k].navFnc;f.subtract(q[k].navStep,a),W(),K(a)},pickerSwitch:function(){L(1)},selectMonth:function(b){var c=a(b.target).closest("tbody").find("span").index(a(b.target));f.month(c),k===p?(aa(e.clone().year(f.year()).month(f.month())),d.inline||ba()):(L(-1),W()),K("M")},selectYear:function(b){var c=parseInt(a(b.target).text(),10)||0;f.year(c),k===p?(aa(e.clone().year(f.year())),d.inline||ba()):(L(-1),W()),K("YYYY")},selectDecade:function(b){var c=parseInt(a(b.target).data("selection"),10)||0;f.year(c),k===p?(aa(e.clone().year(f.year())),d.inline||ba()):(L(-1),W()),K("YYYY")},selectDay:function(b){var c=f.clone();a(b.target).is(".old")&&c.subtract(1,"M"),a(b.target).is(".new")&&c.add(1,"M"),aa(c.date(parseInt(a(b.target).text(),10))),A()||d.keepOpen||d.inline||ba()},incrementHours:function(){var a=e.clone().add(1,"h");R(a,"h")&&aa(a)},incrementMinutes:function(){var a=e.clone().add(d.stepping,"m");R(a,"m")&&aa(a)},incrementSeconds:function(){var a=e.clone().add(1,"s");R(a,"s")&&aa(a)},decrementHours:function(){var a=e.clone().subtract(1,"h");R(a,"h")&&aa(a)},decrementMinutes:function(){var a=e.clone().subtract(d.stepping,"m");R(a,"m")&&aa(a)},decrementSeconds:function(){var a=e.clone().subtract(1,"s");R(a,"s")&&aa(a)},togglePeriod:function(){aa(e.clone().add(e.hours()>=12?-12:12,"h"))},togglePicker:function(b){var c,e=a(b.target),f=e.closest("ul"),g=f.find(".in"),h=f.find(".collapse:not(.in)");if(g&&g.length){if(c=g.data("collapse"),c&&c.transitioning)return;g.collapse?(g.collapse("hide"),h.collapse("show")):(g.removeClass("in"),h.addClass("in")),e.is("span")?e.toggleClass(d.icons.time+" "+d.icons.date):e.find("span").toggleClass(d.icons.time+" "+d.icons.date)}},showPicker:function(){o.find(".timepicker > div:not(.timepicker-picker)").hide(),o.find(".timepicker .timepicker-picker").show()},showHours:function(){o.find(".timepicker .timepicker-picker").hide(),o.find(".timepicker .timepicker-hours").show()},showMinutes:function(){o.find(".timepicker .timepicker-picker").hide(),o.find(".timepicker .timepicker-minutes").show()},showSeconds:function(){o.find(".timepicker .timepicker-picker").hide(),o.find(".timepicker .timepicker-seconds").show()},selectHour:function(b){var c=parseInt(a(b.target).text(),10);h||(e.hours()>=12?12!==c&&(c+=12):12===c&&(c=0)),aa(e.clone().hours(c)),ea.showPicker.call(l)},selectMinute:function(b){aa(e.clone().minutes(parseInt(a(b.target).text(),10))),ea.showPicker.call(l)},selectSecond:function(b){aa(e.clone().seconds(parseInt(a(b.target).text(),10))),ea.showPicker.call(l)},clear:ca,today:function(){var a=y();R(a,"d")&&aa(a)},close:ba},fa=function(b){return!a(b.currentTarget).is(".disabled")&&(ea[a(b.currentTarget).data("action")].apply(l,arguments),!1)},ga=function(){var b,c={year:function(a){return a.month(0).date(1).hours(0).seconds(0).minutes(0)},month:function(a){return a.date(1).hours(0).seconds(0).minutes(0)},day:function(a){return a.hours(0).seconds(0).minutes(0)},hour:function(a){return a.seconds(0).minutes(0)},minute:function(a){return a.seconds(0)}};return g.prop("disabled")||!d.ignoreReadonly&&g.prop("readonly")||o?l:(void 0!==g.val()&&0!==g.val().trim().length?aa(da(g.val().trim())):m&&d.useCurrent&&(d.inline||g.is("input")&&0===g.val().trim().length)&&(b=y(),"string"==typeof d.useCurrent&&(b=c[d.useCurrent](b)),aa(b)),o=G(),M(),S(),o.find(".timepicker-hours").hide(),o.find(".timepicker-minutes").hide(),o.find(".timepicker-seconds").hide(),_(),L(),a(window).on("resize",I),o.on("click","[data-action]",fa),o.on("mousedown",!1),n&&n.hasClass("btn")&&n.toggleClass("active"),I(),o.show(),d.focusOnShow&&!g.is(":focus")&&g.focus(),J({type:"dp.show"}),l)},ha=function(){return o?ba():ga()},ia=function(a){var b,c,e,f,g=null,h=[],i={},j=a.which,k="p";w[j]=k;for(b in w)w.hasOwnProperty(b)&&w[b]===k&&(h.push(b),parseInt(b,10)!==j&&(i[b]=!0));for(b in d.keyBinds)if(d.keyBinds.hasOwnProperty(b)&&"function"==typeof d.keyBinds[b]&&(e=b.split(" "),e.length===h.length&&v[j]===e[e.length-1])){for(f=!0,c=e.length-2;c>=0;c--)if(!(v[e[c]]in i)){f=!1;break}if(f){g=d.keyBinds[b];break}}g&&(g.call(l,o),a.stopPropagation(),a.preventDefault())},ja=function(a){w[a.which]="r",a.stopPropagation(),a.preventDefault()},ka=function(b){var c=a(b.target).val().trim(),d=c?da(c):null;return aa(d),b.stopImmediatePropagation(),!1},la=function(){g.on({change:ka,blur:d.debug?"":ba,keydown:ia,keyup:ja,focus:d.allowInputToggle?ga:""}),c.is("input")?g.on({focus:ga}):n&&(n.on("click",ha),n.on("mousedown",!1))},ma=function(){g.off({change:ka,blur:blur,keydown:ia,keyup:ja,focus:d.allowInputToggle?ba:""}),c.is("input")?g.off({focus:ga}):n&&(n.off("click",ha),n.off("mousedown",!1))},na=function(b){var c={};return a.each(b,function(){var a=da(this);a.isValid()&&(c[a.format("YYYY-MM-DD")]=!0)}),!!Object.keys(c).length&&c},oa=function(b){var c={};return a.each(b,function(){c[this]=!0}),!!Object.keys(c).length&&c},pa=function(){var a=d.format||"L LT";i=a.replace(/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,function(a){var b=e.localeData().longDateFormat(a)||a;return b.replace(/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,function(a){return e.localeData().longDateFormat(a)||a})}),j=d.extraFormats?d.extraFormats.slice():[],j.indexOf(a)<0&&j.indexOf(i)<0&&j.push(i),h=i.toLowerCase().indexOf("a")<1&&i.replace(/\[.*?\]/g,"").indexOf("h")<1,z("y")&&(p=2),z("M")&&(p=1),z("d")&&(p=0),k=Math.max(p,k),m||aa(e)};if(l.destroy=function(){ba(),ma(),c.removeData("DateTimePicker"),c.removeData("date")},l.toggle=ha,l.show=ga,l.hide=ba,l.disable=function(){return ba(),n&&n.hasClass("btn")&&n.addClass("disabled"),g.prop("disabled",!0),l},l.enable=function(){return n&&n.hasClass("btn")&&n.removeClass("disabled"),g.prop("disabled",!1),l},l.ignoreReadonly=function(a){if(0===arguments.length)return d.ignoreReadonly;if("boolean"!=typeof a)throw new TypeError("ignoreReadonly () expects a boolean parameter");return d.ignoreReadonly=a,l},l.options=function(b){if(0===arguments.length)return a.extend(!0,{},d);if(!(b instanceof Object))throw new TypeError("options() options parameter should be an object");return a.extend(!0,d,b),a.each(d,function(a,b){if(void 0===l[a])throw new TypeError("option "+a+" is not recognized!");l[a](b)}),l},l.date=function(a){if(0===arguments.length)return m?null:e.clone();if(!(null===a||"string"==typeof a||b.isMoment(a)||a instanceof Date))throw new TypeError("date() parameter must be one of [null, string, moment or Date]");return aa(null===a?null:da(a)),l},l.format=function(a){if(0===arguments.length)return d.format;if("string"!=typeof a&&("boolean"!=typeof a||a!==!1))throw new TypeError("format() expects a string or boolean:false parameter "+a);return d.format=a,i&&pa(),l},l.timeZone=function(a){if(0===arguments.length)return d.timeZone;if("string"!=typeof a)throw new TypeError("newZone() expects a string parameter");return d.timeZone=a,l},l.dayViewHeaderFormat=function(a){if(0===arguments.length)return d.dayViewHeaderFormat;if("string"!=typeof a)throw new TypeError("dayViewHeaderFormat() expects a string parameter");return d.dayViewHeaderFormat=a,l},l.extraFormats=function(a){if(0===arguments.length)return d.extraFormats;if(a!==!1&&!(a instanceof Array))throw new TypeError("extraFormats() expects an array or false parameter");return d.extraFormats=a,j&&pa(),l},l.disabledDates=function(b){if(0===arguments.length)return d.disabledDates?a.extend({},d.disabledDates):d.disabledDates;if(!b)return d.disabledDates=!1,_(),l;if(!(b instanceof Array))throw new TypeError("disabledDates() expects an array parameter");return d.disabledDates=na(b),d.enabledDates=!1,_(),l},l.enabledDates=function(b){if(0===arguments.length)return d.enabledDates?a.extend({},d.enabledDates):d.enabledDates;if(!b)return d.enabledDates=!1,_(),l;if(!(b instanceof Array))throw new TypeError("enabledDates() expects an array parameter");return d.enabledDates=na(b),d.disabledDates=!1,_(),l},l.daysOfWeekDisabled=function(a){if(0===arguments.length)return d.daysOfWeekDisabled.splice(0);if("boolean"==typeof a&&!a)return d.daysOfWeekDisabled=!1,_(),l;if(!(a instanceof Array))throw new TypeError("daysOfWeekDisabled() expects an array parameter");if(d.daysOfWeekDisabled=a.reduce(function(a,b){return b=parseInt(b,10),b>6||b<0||isNaN(b)?a:(a.indexOf(b)===-1&&a.push(b),a)},[]).sort(),d.useCurrent&&!d.keepInvalid){for(var b=0;!R(e,"d");){if(e.add(1,"d"),31===b)throw"Tried 31 times to find a valid date";b++}aa(e)}return _(),l},l.maxDate=function(a){if(0===arguments.length)return d.maxDate?d.maxDate.clone():d.maxDate;if("boolean"==typeof a&&a===!1)return d.maxDate=!1,_(),l;"string"==typeof a&&("now"!==a&&"moment"!==a||(a=y()));var b=da(a);if(!b.isValid())throw new TypeError("maxDate() Could not parse date parameter: "+a);if(d.minDate&&b.isBefore(d.minDate))throw new TypeError("maxDate() date parameter is before options.minDate: "+b.format(i));return d.maxDate=b,d.useCurrent&&!d.keepInvalid&&e.isAfter(a)&&aa(d.maxDate),f.isAfter(b)&&(f=b.clone().subtract(d.stepping,"m")),_(),l},l.minDate=function(a){if(0===arguments.length)return d.minDate?d.minDate.clone():d.minDate;if("boolean"==typeof a&&a===!1)return d.minDate=!1,_(),l;"string"==typeof a&&("now"!==a&&"moment"!==a||(a=y()));var b=da(a);if(!b.isValid())throw new TypeError("minDate() Could not parse date parameter: "+a);if(d.maxDate&&b.isAfter(d.maxDate))throw new TypeError("minDate() date parameter is after options.maxDate: "+b.format(i));return d.minDate=b,d.useCurrent&&!d.keepInvalid&&e.isBefore(a)&&aa(d.minDate),f.isBefore(b)&&(f=b.clone().add(d.stepping,"m")),_(),l},l.defaultDate=function(a){if(0===arguments.length)return d.defaultDate?d.defaultDate.clone():d.defaultDate;if(!a)return d.defaultDate=!1,l;"string"==typeof a&&(a="now"===a||"moment"===a?y():y(a));var b=da(a);if(!b.isValid())throw new TypeError("defaultDate() Could not parse date parameter: "+a);if(!R(b))throw new TypeError("defaultDate() date passed is invalid according to component setup validations");return d.defaultDate=b,(d.defaultDate&&d.inline||""===g.val().trim())&&aa(d.defaultDate),l},l.locale=function(a){if(0===arguments.length)return d.locale;if(!b.localeData(a))throw new TypeError("locale() locale "+a+" is not loaded from moment locales!");return d.locale=a,e.locale(d.locale),f.locale(d.locale),i&&pa(),o&&(ba(),ga()),l},l.stepping=function(a){return 0===arguments.length?d.stepping:(a=parseInt(a,10),(isNaN(a)||a<1)&&(a=1),d.stepping=a,l)},l.useCurrent=function(a){var b=["year","month","day","hour","minute"];if(0===arguments.length)return d.useCurrent;if("boolean"!=typeof a&&"string"!=typeof a)throw new TypeError("useCurrent() expects a boolean or string parameter");if("string"==typeof a&&b.indexOf(a.toLowerCase())===-1)throw new TypeError("useCurrent() expects a string parameter of "+b.join(", "));return d.useCurrent=a,l},l.collapse=function(a){if(0===arguments.length)return d.collapse;if("boolean"!=typeof a)throw new TypeError("collapse() expects a boolean parameter");return d.collapse===a?l:(d.collapse=a,o&&(ba(),ga()),l)},l.icons=function(b){if(0===arguments.length)return a.extend({},d.icons);if(!(b instanceof Object))throw new TypeError("icons() expects parameter to be an Object");return a.extend(d.icons,b),o&&(ba(),ga()),l},l.tooltips=function(b){if(0===arguments.length)return a.extend({},d.tooltips);if(!(b instanceof Object))throw new TypeError("tooltips() expects parameter to be an Object");return a.extend(d.tooltips,b),o&&(ba(),ga()),l},l.useStrict=function(a){if(0===arguments.length)return d.useStrict;if("boolean"!=typeof a)throw new TypeError("useStrict() expects a boolean parameter");return d.useStrict=a,l},l.sideBySide=function(a){if(0===arguments.length)return d.sideBySide;if("boolean"!=typeof a)throw new TypeError("sideBySide() expects a boolean parameter");return d.sideBySide=a,o&&(ba(),ga()),l},l.viewMode=function(a){if(0===arguments.length)return d.viewMode;if("string"!=typeof a)throw new TypeError("viewMode() expects a string parameter");if(r.indexOf(a)===-1)throw new TypeError("viewMode() parameter must be one of ("+r.join(", ")+") value");return d.viewMode=a,k=Math.max(r.indexOf(a),p),L(),l},l.toolbarPlacement=function(a){if(0===arguments.length)return d.toolbarPlacement;if("string"!=typeof a)throw new TypeError("toolbarPlacement() expects a string parameter");if(u.indexOf(a)===-1)throw new TypeError("toolbarPlacement() parameter must be one of ("+u.join(", ")+") value");return d.toolbarPlacement=a,o&&(ba(),ga()),l},l.widgetPositioning=function(b){if(0===arguments.length)return a.extend({},d.widgetPositioning);if("[object Object]"!=={}.toString.call(b))throw new TypeError("widgetPositioning() expects an object variable");if(b.horizontal){if("string"!=typeof b.horizontal)throw new TypeError("widgetPositioning() horizontal variable must be a string");if(b.horizontal=b.horizontal.toLowerCase(),t.indexOf(b.horizontal)===-1)throw new TypeError("widgetPositioning() expects horizontal parameter to be one of ("+t.join(", ")+")");d.widgetPositioning.horizontal=b.horizontal}if(b.vertical){if("string"!=typeof b.vertical)throw new TypeError("widgetPositioning() vertical variable must be a string");if(b.vertical=b.vertical.toLowerCase(),s.indexOf(b.vertical)===-1)throw new TypeError("widgetPositioning() expects vertical parameter to be one of ("+s.join(", ")+")");d.widgetPositioning.vertical=b.vertical}return _(),l},l.calendarWeeks=function(a){if(0===arguments.length)return d.calendarWeeks;if("boolean"!=typeof a)throw new TypeError("calendarWeeks() expects parameter to be a boolean value");return d.calendarWeeks=a,_(),l},l.showTodayButton=function(a){if(0===arguments.length)return d.showTodayButton;if("boolean"!=typeof a)throw new TypeError("showTodayButton() expects a boolean parameter");return d.showTodayButton=a,o&&(ba(),ga()),l},l.showClear=function(a){if(0===arguments.length)return d.showClear;if("boolean"!=typeof a)throw new TypeError("showClear() expects a boolean parameter");return d.showClear=a,o&&(ba(),ga()),l},l.widgetParent=function(b){if(0===arguments.length)return d.widgetParent;if("string"==typeof b&&(b=a(b)),null!==b&&"string"!=typeof b&&!(b instanceof a))throw new TypeError("widgetParent() expects a string or a jQuery object parameter");return d.widgetParent=b,o&&(ba(),ga()),l},l.keepOpen=function(a){if(0===arguments.length)return d.keepOpen;if("boolean"!=typeof a)throw new TypeError("keepOpen() expects a boolean parameter");return d.keepOpen=a,l},l.focusOnShow=function(a){if(0===arguments.length)return d.focusOnShow;if("boolean"!=typeof a)throw new TypeError("focusOnShow() expects a boolean parameter");return d.focusOnShow=a,l},l.inline=function(a){if(0===arguments.length)return d.inline;if("boolean"!=typeof a)throw new TypeError("inline() expects a boolean parameter");return d.inline=a,l},l.clear=function(){return ca(),l},l.keyBinds=function(a){return 0===arguments.length?d.keyBinds:(d.keyBinds=a,l)},l.getMoment=function(a){return y(a)},l.debug=function(a){if("boolean"!=typeof a)throw new TypeError("debug() expects a boolean parameter");return d.debug=a,l},l.allowInputToggle=function(a){if(0===arguments.length)return d.allowInputToggle;if("boolean"!=typeof a)throw new TypeError("allowInputToggle() expects a boolean parameter");return d.allowInputToggle=a,l},l.showClose=function(a){if(0===arguments.length)return d.showClose;if("boolean"!=typeof a)throw new TypeError("showClose() expects a boolean parameter");return d.showClose=a,l},l.keepInvalid=function(a){if(0===arguments.length)return d.keepInvalid;if("boolean"!=typeof a)throw new TypeError("keepInvalid() expects a boolean parameter"); -return d.keepInvalid=a,l},l.datepickerInput=function(a){if(0===arguments.length)return d.datepickerInput;if("string"!=typeof a)throw new TypeError("datepickerInput() expects a string parameter");return d.datepickerInput=a,l},l.parseInputDate=function(a){if(0===arguments.length)return d.parseInputDate;if("function"!=typeof a)throw new TypeError("parseInputDate() sholud be as function");return d.parseInputDate=a,l},l.disabledTimeIntervals=function(b){if(0===arguments.length)return d.disabledTimeIntervals?a.extend({},d.disabledTimeIntervals):d.disabledTimeIntervals;if(!b)return d.disabledTimeIntervals=!1,_(),l;if(!(b instanceof Array))throw new TypeError("disabledTimeIntervals() expects an array parameter");return d.disabledTimeIntervals=b,_(),l},l.disabledHours=function(b){if(0===arguments.length)return d.disabledHours?a.extend({},d.disabledHours):d.disabledHours;if(!b)return d.disabledHours=!1,_(),l;if(!(b instanceof Array))throw new TypeError("disabledHours() expects an array parameter");if(d.disabledHours=oa(b),d.enabledHours=!1,d.useCurrent&&!d.keepInvalid){for(var c=0;!R(e,"h");){if(e.add(1,"h"),24===c)throw"Tried 24 times to find a valid date";c++}aa(e)}return _(),l},l.enabledHours=function(b){if(0===arguments.length)return d.enabledHours?a.extend({},d.enabledHours):d.enabledHours;if(!b)return d.enabledHours=!1,_(),l;if(!(b instanceof Array))throw new TypeError("enabledHours() expects an array parameter");if(d.enabledHours=oa(b),d.disabledHours=!1,d.useCurrent&&!d.keepInvalid){for(var c=0;!R(e,"h");){if(e.add(1,"h"),24===c)throw"Tried 24 times to find a valid date";c++}aa(e)}return _(),l},l.viewDate=function(a){if(0===arguments.length)return f.clone();if(!a)return f=e.clone(),l;if(!("string"==typeof a||b.isMoment(a)||a instanceof Date))throw new TypeError("viewDate() parameter must be one of [string, moment or Date]");return f=da(a),K(),l},c.is("input"))g=c;else if(g=c.find(d.datepickerInput),0===g.length)g=c.find("input");else if(!g.is("input"))throw new Error('CSS class "'+d.datepickerInput+'" cannot be applied to non input element');if(c.hasClass("input-group")&&(n=0===c.find(".datepickerbutton").length?c.find(".input-group-addon"):c.find(".datepickerbutton")),!d.inline&&!g.is("input"))throw new Error("Could not initialize DateTimePicker without an input element");return e=y(),f=e.clone(),a.extend(!0,d,H()),l.options(d),pa(),la(),g.prop("disabled")&&l.disable(),g.is("input")&&0!==g.val().trim().length?aa(da(g.val().trim())):d.defaultDate&&void 0===g.attr("placeholder")&&aa(d.defaultDate),d.inline&&ga(),l};return a.fn.datetimepicker=function(b){b=b||{};var d,e=Array.prototype.slice.call(arguments,1),f=!0,g=["destroy","hide","show","toggle"];if("object"==typeof b)return this.each(function(){var d,e=a(this);e.data("DateTimePicker")||(d=a.extend(!0,{},a.fn.datetimepicker.defaults,b),e.data("DateTimePicker",c(e,d)))});if("string"==typeof b)return this.each(function(){var c=a(this),g=c.data("DateTimePicker");if(!g)throw new Error('bootstrap-datetimepicker("'+b+'") method was called on an element that is not using DateTimePicker');d=g[b].apply(g,e),f=d===g}),f||a.inArray(b,g)>-1?this:d;throw new TypeError("Invalid arguments for DateTimePicker: "+b)},a.fn.datetimepicker.defaults={timeZone:"",format:!1,dayViewHeaderFormat:"MMMM YYYY",extraFormats:!1,stepping:1,minDate:!1,maxDate:!1,useCurrent:!0,collapse:!0,locale:b.locale(),defaultDate:!1,disabledDates:!1,enabledDates:!1,icons:{time:"glyphicon glyphicon-time",date:"glyphicon glyphicon-calendar",up:"glyphicon glyphicon-chevron-up",down:"glyphicon glyphicon-chevron-down",previous:"glyphicon glyphicon-chevron-left",next:"glyphicon glyphicon-chevron-right",today:"glyphicon glyphicon-screenshot",clear:"glyphicon glyphicon-trash",close:"glyphicon glyphicon-remove"},tooltips:{today:"Go to today",clear:"Clear selection",close:"Close the picker",selectMonth:"Select Month",prevMonth:"Previous Month",nextMonth:"Next Month",selectYear:"Select Year",prevYear:"Previous Year",nextYear:"Next Year",selectDecade:"Select Decade",prevDecade:"Previous Decade",nextDecade:"Next Decade",prevCentury:"Previous Century",nextCentury:"Next Century",pickHour:"Pick Hour",incrementHour:"Increment Hour",decrementHour:"Decrement Hour",pickMinute:"Pick Minute",incrementMinute:"Increment Minute",decrementMinute:"Decrement Minute",pickSecond:"Pick Second",incrementSecond:"Increment Second",decrementSecond:"Decrement Second",togglePeriod:"Toggle Period",selectTime:"Select Time"},useStrict:!1,sideBySide:!1,daysOfWeekDisabled:!1,calendarWeeks:!1,viewMode:"days",toolbarPlacement:"default",showTodayButton:!1,showClear:!1,showClose:!1,widgetPositioning:{horizontal:"auto",vertical:"auto"},widgetParent:null,ignoreReadonly:!1,keepOpen:!1,focusOnShow:!0,inline:!1,keepInvalid:!1,datepickerInput:".datepickerinput",keyBinds:{up:function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")?this.date(b.clone().subtract(7,"d")):this.date(b.clone().add(this.stepping(),"m"))}},down:function(a){if(!a)return void this.show();var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")?this.date(b.clone().add(7,"d")):this.date(b.clone().subtract(this.stepping(),"m"))},"control up":function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")?this.date(b.clone().subtract(1,"y")):this.date(b.clone().add(1,"h"))}},"control down":function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")?this.date(b.clone().add(1,"y")):this.date(b.clone().subtract(1,"h"))}},left:function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")&&this.date(b.clone().subtract(1,"d"))}},right:function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")&&this.date(b.clone().add(1,"d"))}},pageUp:function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")&&this.date(b.clone().subtract(1,"M"))}},pageDown:function(a){if(a){var b=this.date()||this.getMoment();a.find(".datepicker").is(":visible")&&this.date(b.clone().add(1,"M"))}},enter:function(){this.hide()},escape:function(){this.hide()},"control space":function(a){a&&a.find(".timepicker").is(":visible")&&a.find('.btn[data-action="togglePeriod"]').click()},t:function(){this.date(this.getMoment())},delete:function(){this.clear()}},debug:!1,allowInputToggle:!1,disabledTimeIntervals:!1,disabledHours:!1,enabledHours:!1,viewDate:!1},a.fn.datetimepicker}); \ No newline at end of file diff --git a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-logsearch.min.js b/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-logsearch.min.js deleted file mode 100644 index 8d0fd8951ef..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/src/vendor/js/bootstrap-logsearch.min.js +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -"use strict";$(document).ready(function(){$(this).find('[data-toggle="collapseAccordion"]').off("click").on("click",function(n){var l=$(this);return l.siblings(".panel-body").slideToggle(500),l.children().children(".panel-toggle").toggleClass("fa-angle-down fa-angle-up"),n.stopPropagation(),!1})}); -"use strict";!function(e){e.fn.navigationBar=function(n){var t=e.extend({},e.fn.navigationBar.defaults,n);return this.each(function(){function n(){var n=window.location.pathname+window.location.hash;"/"!==n[n.length-1]&&(n+="/"),i.find("li a").each(function(t,a){var s=e(a),i=s.attr("data-href")||s.attr("href"),o=null==i||"/"===i[i.length-1]?i:i+"/";-1!==n.indexOf(o)&&-1===["","#"].indexOf(i)?s.parent().addClass("active"):s.parent().removeClass("active")})}function a(n){var a=e(n).parent(),s=t.activeClass,o=r+"."+s,l=f+"."+s;i.find(o).removeClass(s),i.find(l).removeClass(s),a.addClass(s)}var s=this,i=e(this).find(".navigation-bar-container"),o=e(this).find("[data-toggle="+t.navBarToggleDataAttr+"]"),l=e(this).find("[data-toggle="+t.subMenuNavToggleDataAttr+"]"),r=".side-nav-menu>li",f=".side-nav-menu>li>ul>li",u=e(this).find(".more-actions"),d=u.children(".dropdown-menu");l.each(function(n,t){return e(t).parent().addClass("has-sub-menu")}),t.fitHeight&&(e(this).addClass("navigation-bar-fit-height"),e(this).find(".side-nav-menu").on("DOMMouseScroll mousewheel",function(n){var t=e(this),a=this.scrollTop,s=this.scrollHeight,i=t.innerHeight(),o=n.originalEvent.wheelDelta,l=o>0,r=function(){return n.stopPropagation(),n.preventDefault(),n.returnValue=!1,!1};return!l&&-o>s-i-a?(t.scrollTop(s),r()):l&&o>a?(t.scrollTop(0),r()):void 0}));var c=i.width();t.moveLeftContent&&e(t.content).css("margin-left",c),t.moveLeftFooter&&e(t.footer).css("margin-left",c),t.handlePopState&&(n(),e(window).bind("popstate",n)),e(r+">a").on("click",function(){a(this)}),e(f+">a").on("click",function(){a(this),e(this).parent().parent().parent().addClass(t.activeClass)}),l.off("click").on("click",function(n){if(i.hasClass("collapsed"))return!1;var a=e(this);return a.siblings(".sub-menu").slideToggle(600,function(){var e=a.parent();return e.find("ul").is(":visible")?e.removeClass("collapsed"):e.addClass("collapsed")}),a.children(".toggle-icon").toggleClass(t.menuLeftClass+" "+t.menuDownClass),n.stopPropagation(),!1}),t.fitHeight&&u.on("click",function(){var n=e(this),t=e(".side-nav-header");d.css({top:n.offset().top-t.offset().top+20+"px",left:n.offset().left+"px"})}),i.children(".side-nav-menu").scroll(function(){u.removeClass("open")}),o.click(function(){return i.toggleClass("collapsed").promise().done(function(){var n=i.find("ul.sub-menu"),a=i.find(".side-nav-menu>li");i.hasClass("collapsed")?(n.hide(),u.hide(),a.hover(function(){e(this).find("ul.sub-menu").show();var n=e(this),a=e(".side-nav-header");t.fitHeight&&e(this).find("ul.sub-menu").css({position:"fixed",top:n.offset().top-a.offset().top+"px",left:"50px"})},function(){e(this).find("ul.sub-menu").hide()})):(n.show().each(function(n,t){return e(t).parent().removeClass("collapsed")}),a.unbind("mouseenter mouseleave"),i.find(".toggle-icon").removeClass(t.menuLeftClass).addClass(t.menuDownClass),u.show(),t.fitHeight&&e(s).find("ul.sub-menu").css({position:"relative",top:0,left:0})),i.on("transitionend",function(){var n=i.width();t.moveLeftContent&&e(t.content).css("margin-left",n),t.moveLeftFooter&&e(t.footer).css("margin-left",n)}),o.find("span").toggleClass(t.collapseNavBarClass+" "+t.expandNavBarClass)}),!1})})},e.fn.navigationBar.defaults={handlePopState:!0,fitHeight:!1,content:"#main",footer:"footer",moveLeftContent:!0,moveLeftFooter:!0,menuLeftClass:"glyphicon-menu-right",menuDownClass:"glyphicon-menu-down",collapseNavBarClass:"fa-angle-double-left",expandNavBarClass:"fa-angle-double-right",activeClass:"active",navBarToggleDataAttr:"collapse-side-nav",subMenuNavToggleDataAttr:"collapse-sub-menu"}}(jQuery); diff --git a/ambari-logsearch/ambari-logsearch-web/tsconfig.json b/ambari-logsearch/ambari-logsearch-web/tsconfig.json deleted file mode 100644 index 3d23f3f31bd..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/tsconfig.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "compileOnSave": false, - "compilerOptions": { - "outDir": "./dist/out-tsc", - "baseUrl": "src", - "paths": { - "@app/*": [ - "app/*" - ], - "@envs/*": [ - "environments/*" - ], - "@vendor/*": [ - "vendor/*" - ], - "@modules/*": [ - "app/modules/*" - ], - "@mockdata/*": [ - "mockdata/*" - ] - }, - "sourceMap": true, - "declaration": false, - "moduleResolution": "node", - "emitDecoratorMetadata": true, - "experimentalDecorators": true, - "target": "es5", - "typeRoots": [ - "node_modules/@types" - ], - "lib": [ - "es2016", - "dom" - ] - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/tslint.json b/ambari-logsearch/ambari-logsearch-web/tslint.json deleted file mode 100644 index 9113f1368b2..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/tslint.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "rulesDirectory": [ - "node_modules/codelyzer" - ], - "rules": { - "callable-types": true, - "class-name": true, - "comment-format": [ - true, - "check-space" - ], - "curly": true, - "eofline": true, - "forin": true, - "import-blacklist": [true, "rxjs"], - "import-spacing": true, - "indent": [ - true, - "spaces" - ], - "interface-over-type-literal": true, - "label-position": true, - "max-line-length": [ - true, - 140 - ], - "member-access": false, - "member-ordering": [ - true, - "static-before-instance", - "variables-before-functions" - ], - "no-arg": true, - "no-bitwise": true, - "no-console": [ - true, - "debug", - "info", - "time", - "timeEnd", - "trace" - ], - "no-construct": true, - "no-debugger": true, - "no-duplicate-variable": true, - "no-empty": false, - "no-empty-interface": true, - "no-eval": true, - "no-inferrable-types": [true, "ignore-params"], - "no-shadowed-variable": true, - "no-string-literal": false, - "no-string-throw": true, - "no-switch-case-fall-through": true, - "no-trailing-whitespace": true, - "no-unused-expression": true, - "no-use-before-declare": true, - "no-var-keyword": true, - "object-literal-sort-keys": false, - "one-line": [ - true, - "check-open-brace", - "check-catch", - "check-else", - "check-whitespace" - ], - "prefer-const": true, - "quotemark": [ - true, - "single" - ], - "radix": true, - "semicolon": [ - "always" - ], - "triple-equals": [ - true, - "allow-null-check" - ], - "typedef-whitespace": [ - true, - { - "call-signature": "nospace", - "index-signature": "nospace", - "parameter": "nospace", - "property-declaration": "nospace", - "variable-declaration": "nospace" - } - ], - "typeof-compare": true, - "unified-signatures": true, - "variable-name": false, - "whitespace": [ - true, - "check-branch", - "check-decl", - "check-operator", - "check-separator", - "check-type" - ], - - "directive-selector": [true, "attribute", "app", "camelCase"], - "component-selector": [true, "element", "app", "kebab-case"], - "use-input-property-decorator": true, - "use-output-property-decorator": true, - "use-host-property-decorator": true, - "no-input-rename": true, - "no-output-rename": true, - "use-life-cycle-interface": true, - "use-pipe-transform-interface": true, - "component-class-suffix": true, - "directive-class-suffix": true, - "no-access-missing-member": true, - "templates-use-public": true, - "invoke-injectable": true - } -} diff --git a/ambari-logsearch/ambari-logsearch-web/webpack.config.js b/ambari-logsearch/ambari-logsearch-web/webpack.config.js deleted file mode 100644 index 106932b4e76..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/webpack.config.js +++ /dev/null @@ -1,497 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -const fs = require('fs'); -const path = require('path'); -const ConcatPlugin = require('webpack-concat-plugin'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); -const ProgressPlugin = require('webpack/lib/ProgressPlugin'); -const CircularDependencyPlugin = require('circular-dependency-plugin'); -const HtmlWebpackPlugin = require('html-webpack-plugin'); -const autoprefixer = require('autoprefixer'); -const postcssUrl = require('postcss-url'); -const cssnano = require('cssnano'); - -const {NoEmitOnErrorsPlugin, SourceMapDevToolPlugin, NamedModulesPlugin} = require('webpack'); -const {InsertConcatAssetsWebpackPlugin, NamedLazyChunksWebpackPlugin, BaseHrefWebpackPlugin} = require('@angular/cli/plugins/webpack'); -const {CommonsChunkPlugin} = require('webpack').optimize; -const {AotPlugin} = require('@ngtools/webpack'); - -const nodeModules = path.join(process.cwd(), 'node_modules'); -const realNodeModules = fs.realpathSync(nodeModules); -const genDirNodeModules = path.join(process.cwd(), 'src', '$$_gendir', 'node_modules'); -const entryPoints = ["inline","polyfills","sw-register","styles","vendor","main"]; -const minimizeCss = false; -const baseHref = ""; -const deployUrl = ""; -const postcssPlugins = function () { - // safe settings based on: https://github.com/ben-eb/cssnano/issues/358#issuecomment-283696193 - const importantCommentRe = /@preserve|@license|[@#]\s*source(?:Mapping)?URL|^!/i; - const minimizeOptions = { - autoprefixer: false, - safe: true, - mergeLonghand: false, - discardComments: { remove: (comment) => !importantCommentRe.test(comment) } - }; - return [ - postcssUrl({ - url: (URL) => { - // Only convert root relative URLs, which CSS-Loader won't process into require(). - if (!URL.startsWith('/') || URL.startsWith('//')) { - return URL; - } - if (deployUrl.match(/:\/\//)) { - // If deployUrl contains a scheme, ignore baseHref use deployUrl as is. - return `${deployUrl.replace(/\/$/, '')}${URL}`; - } - else if (baseHref.match(/:\/\//)) { - // If baseHref contains a scheme, include it as is. - return baseHref.replace(/\/$/, '') + - `/${deployUrl}/${URL}`.replace(/\/\/+/g, '/'); - } - else { - // Join together base-href, deploy-url and the original URL. - // Also dedupe multiple slashes into single ones. - return `/${baseHref}/${deployUrl}/${URL}`.replace(/\/\/+/g, '/'); - } - } - }), - autoprefixer(), - ].concat(minimizeCss ? [cssnano(minimizeOptions)] : []); - }; - -const resourcesDirName = 'resources'; - -const styles = [ - path.join(process.cwd(), "node_modules/bootstrap/dist/css/bootstrap.min.css"), - path.join(process.cwd(), "node_modules/font-awesome/css/font-awesome.min.css"), - path.join(process.cwd(), "src/vendor/css/bootstrap-logsearch.min.css"), - path.join(process.cwd(), "src/vendor/css/bootstrap-datetimepicker.min.css"), - path.join(process.cwd(), "src/styles.less") -]; - -module.exports = { - "resolve": { - "extensions": [ - ".ts", - ".js", - ".less" - ], - "modules": [ - "node_modules" - ], - "symlinks": true - }, - "resolveLoader": { - "modules": [ - "node_modules" - ] - }, - "entry": { - "main": [ - "./src/main.ts" - ], - "polyfills": [ - "./src/polyfills.ts" - ], - "styles": styles - }, - "output": { - "path": path.join(process.cwd(), "dist"), - "publicPath": "", - "filename": `${resourcesDirName}/[name].bundle.js`, - "chunkFilename": "[id].chunk.js" - }, - "module": { - "rules": [ - { - "enforce": "pre", - "test": /\.js$/, - "loader": "source-map-loader", - "exclude": [ - /(\\|\/)node_modules(\\|\/)/ - ] - }, - { - "test": /\.html$/, - "loader": "raw-loader" - }, - { - "test": /\.(eot|svg|cur)$/, - "loader": `file-loader?name=${resourcesDirName}/[name].[ext]` - }, - { - "test": /\.(jpg|png|webp|gif|otf|ttf|woff|woff2|ani)$/, - "loader": `url-loader?name=${resourcesDirName}/[name].[ext]&limit=10000` - }, - { - "exclude": styles, - "test": /\.css$/, - "use": [ - "exports-loader?module.exports.toString()", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - } - ] - }, - { - "exclude": styles, - "test": /\.scss$|\.sass$/, - "use": [ - "exports-loader?module.exports.toString()", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "sass-loader", - "options": { - "sourceMap": false, - "precision": 8, - "includePaths": [] - } - } - ] - }, - { - "exclude": styles, - "test": /\.less$/, - "use": [ - "exports-loader?module.exports.toString()", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "less-loader", - "options": { - "sourceMap": false, - "paths": [ - "./node_modules", - "./src/app/modules", - "./src/app/components" - ] - } - } - ] - }, - { - "exclude": styles, - "test": /\.styl$/, - "use": [ - "exports-loader?module.exports.toString()", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "stylus-loader", - "options": { - "sourceMap": false, - "paths": [] - } - } - ] - }, - { - "include": styles, - "test": /\.css$/, - "use": [ - "style-loader", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - } - ] - }, - { - "include": styles, - "test": /\.scss$|\.sass$/, - "use": [ - "style-loader", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "sass-loader", - "options": { - "sourceMap": false, - "precision": 8, - "includePaths": [] - } - } - ] - }, - { - "include": styles, - "test": /\.less$/, - "use": [ - "style-loader", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "less-loader", - "options": { - "sourceMap": false, - "paths": ["./node_modules"] - } - } - ] - }, - { - "include": styles, - "test": /\.styl$/, - "use": [ - "style-loader", - { - "loader": "css-loader", - "options": { - "sourceMap": false, - "importLoaders": 1 - } - }, - { - "loader": "postcss-loader", - "options": { - "ident": "postcss", - "plugins": postcssPlugins - } - }, - { - "loader": "stylus-loader", - "options": { - "sourceMap": false, - "paths": [] - } - } - ] - }, - { - "test": /\.ts$/, - "loader": "@ngtools/webpack" - } - ] - }, - "plugins": [ - new NoEmitOnErrorsPlugin(), - new ConcatPlugin({ - "uglify": false, - "sourceMap": true, - "name": "scripts", - "fileName": `${resourcesDirName}/[name].bundle.js`, - "filesToConcat": [ - "node_modules/jquery/dist/jquery.min.js", - "node_modules/bootstrap/dist/js/bootstrap.min.js", - "src/vendor/js/bootstrap-logsearch.min.js" - ] - }), - new InsertConcatAssetsWebpackPlugin([ - "scripts" - ]), - new CopyWebpackPlugin([ - { - "context": "src/", - "to": resourcesDirName, - "from": { - "glob": "assets/**/*", - "dot": true - } - }, - { - "context": "src/", - "to": "favicon.ico", - "from": { - "glob": "favicon.ico", - "dot": true - } - } - ], { - "ignore": [ - ".gitkeep" - ], - "debug": "warning" - }), - new ProgressPlugin(), - new CircularDependencyPlugin({ - "exclude": /(\\|\/)node_modules(\\|\/)/, - "failOnError": false - }), - new NamedLazyChunksWebpackPlugin(), - new HtmlWebpackPlugin({ - "template": "./src/index.html", - "filename": "index.html", - "hash": false, - "inject": true, - "compile": true, - "favicon": false, - "minify": false, - "cache": true, - "showErrors": true, - "chunks": "all", - "excludeChunks": [], - "title": "Webpack App", - "xhtml": true, - "chunksSortMode": function sort(left, right) { - let leftIndex = entryPoints.indexOf(left.names[0]); - let rightindex = entryPoints.indexOf(right.names[0]); - if (leftIndex > rightindex) { - return 1; - } - else if (leftIndex < rightindex) { - return -1; - } - else { - return 0; - } - } - }), - new BaseHrefWebpackPlugin({}), - new CommonsChunkPlugin({ - "name": [ - "inline" - ], - "minChunks": null - }), - new CommonsChunkPlugin({ - "name": [ - "vendor" - ], - "minChunks": (module) => { - return module.resource - && (module.resource.startsWith(nodeModules) - || module.resource.startsWith(genDirNodeModules) - || module.resource.startsWith(realNodeModules)); - }, - "chunks": [ - "main" - ] - }), - new SourceMapDevToolPlugin({ - "filename": "[file].map[query]", - "moduleFilenameTemplate": "[resource-path]", - "fallbackModuleFilenameTemplate": "[resource-path]?[hash]", - "sourceRoot": "webpack:///" - }), - new CommonsChunkPlugin({ - "name": [ - "main" - ], - "minChunks": 2, - "async": "common" - }), - new NamedModulesPlugin({}), - new AotPlugin({ - "mainPath": "main.ts", - "replaceExport": false, - "hostReplacementPaths": { - "environments/environment.ts": `environments/environment${process.env.NODE_ENV === "production" ? ".prod" : ""}.ts` - }, - "exclude": [], - "tsConfigPath": "src/tsconfig.app.json", - "skipCodeGeneration": true - }) - ], - "node": { - "fs": "empty", - "global": true, - "crypto": "empty", - "tls": "empty", - "net": "empty", - "process": true, - "module": false, - "clearImmediate": false, - "setImmediate": false - }, - "devServer": { - "historyApiFallback": true - } -}; diff --git a/ambari-logsearch/ambari-logsearch-web/yarn.lock b/ambari-logsearch/ambari-logsearch-web/yarn.lock deleted file mode 100644 index ae4bb5a377c..00000000000 --- a/ambari-logsearch/ambari-logsearch-web/yarn.lock +++ /dev/null @@ -1,6819 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@angular-devkit/build-optimizer@~0.0.18": - version "0.0.18" - resolved "https://registry.yarnpkg.com/@angular-devkit/build-optimizer/-/build-optimizer-0.0.18.tgz#bdf507a37403b07fc72437d792d4e2541b0f13ec" - dependencies: - loader-utils "^1.1.0" - source-map "^0.5.6" - typescript "^2.3.3" - -"@angular-devkit/core@0.0.12": - version "0.0.12" - resolved "https://registry.yarnpkg.com/@angular-devkit/core/-/core-0.0.12.tgz#bf044c8ed38b8e2ec2648bbc6fd0d92be99112da" - -"@angular-devkit/schematics@~0.0.21": - version "0.0.21" - resolved "https://registry.yarnpkg.com/@angular-devkit/schematics/-/schematics-0.0.21.tgz#ea4b3b6ce8fc86f50de9df3402a039469369a7eb" - dependencies: - "@angular-devkit/core" "0.0.12" - "@ngtools/json-schema" "^1.1.0" - minimist "^1.2.0" - rxjs "^5.4.2" - -"@angular/animations@^4.0.0": - version "4.4.6" - resolved "https://registry.yarnpkg.com/@angular/animations/-/animations-4.4.6.tgz#fa661899a8a4e38cb7c583c7a5c97ce65d592a35" - dependencies: - tslib "^1.7.1" - -"@angular/cli@^1.4.3": - version "1.4.3" - resolved "https://registry.yarnpkg.com/@angular/cli/-/cli-1.4.3.tgz#8389d4eeadfe34abb1d16e53836416a8f8430fb3" - dependencies: - "@angular-devkit/build-optimizer" "~0.0.18" - "@angular-devkit/schematics" "~0.0.21" - "@ngtools/json-schema" "1.1.0" - "@ngtools/webpack" "1.7.1" - "@schematics/angular" "~0.0.30" - autoprefixer "^6.5.3" - chalk "^2.0.1" - circular-dependency-plugin "^3.0.0" - common-tags "^1.3.1" - copy-webpack-plugin "^4.0.1" - core-object "^3.1.0" - css-loader "^0.28.1" - cssnano "^3.10.0" - denodeify "^1.2.1" - ember-cli-string-utils "^1.0.0" - exports-loader "^0.6.3" - extract-text-webpack-plugin "3.0.0" - file-loader "^0.10.0" - fs-extra "^4.0.0" - get-caller-file "^1.0.0" - glob "^7.0.3" - heimdalljs "^0.2.4" - heimdalljs-logger "^0.1.9" - html-webpack-plugin "^2.29.0" - istanbul-instrumenter-loader "^2.0.0" - karma-source-map-support "^1.2.0" - less "^2.7.2" - less-loader "^4.0.5" - license-webpack-plugin "^1.0.0" - lodash "^4.11.1" - memory-fs "^0.4.1" - node-modules-path "^1.0.0" - nopt "^4.0.1" - opn "~5.1.0" - portfinder "~1.0.12" - postcss-loader "^1.3.3" - postcss-url "^5.1.2" - raw-loader "^0.5.1" - resolve "^1.1.7" - rxjs "^5.4.2" - sass-loader "^6.0.3" - semver "^5.1.0" - silent-error "^1.0.0" - source-map-loader "^0.2.0" - source-map-support "^0.4.1" - style-loader "^0.13.1" - stylus "^0.54.5" - stylus-loader "^3.0.1" - typescript ">=2.0.0 <2.6.0" - url-loader "^0.5.7" - webpack "~3.5.5" - webpack-concat-plugin "1.4.0" - webpack-dev-middleware "~1.12.0" - webpack-dev-server "~2.7.1" - webpack-merge "^4.1.0" - zone.js "^0.8.14" - optionalDependencies: - node-sass "^4.3.0" - -"@angular/common@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/common/-/common-4.4.3.tgz#f92ac68b02bec5f0e6d3603a843294dc96c96074" - dependencies: - tslib "^1.7.1" - -"@angular/compiler-cli@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/compiler-cli/-/compiler-cli-4.4.3.tgz#183af81f141186b8d660b06429592d40b7540a4a" - dependencies: - "@angular/tsc-wrapped" "4.4.3" - minimist "^1.2.0" - reflect-metadata "^0.1.2" - -"@angular/compiler@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/compiler/-/compiler-4.4.3.tgz#8f01163dad7db3408497d99d387554b6b185ad66" - dependencies: - tslib "^1.7.1" - -"@angular/core@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/core/-/core-4.4.3.tgz#e71d2b07beaacbab48ab7f51d4e2286ea5d70e15" - dependencies: - tslib "^1.7.1" - -"@angular/forms@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/forms/-/forms-4.4.3.tgz#25b41bbab58bf1da872411c8517c10d7c5373d8e" - dependencies: - tslib "^1.7.1" - -"@angular/http@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/http/-/http-4.4.3.tgz#b557ed24144aacc44b136cd477e84d2f57808903" - dependencies: - tslib "^1.7.1" - -"@angular/platform-browser-dynamic@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/platform-browser-dynamic/-/platform-browser-dynamic-4.4.3.tgz#e41ddd8252432775310eab5940cdd8df0618f084" - dependencies: - tslib "^1.7.1" - -"@angular/platform-browser@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/platform-browser/-/platform-browser-4.4.3.tgz#23f9a45bd3dc7f44d97877fbf8e6032decfc9dcb" - dependencies: - tslib "^1.7.1" - -"@angular/router@^4.0.0": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/router/-/router-4.4.3.tgz#26cc94775a3860946aeaf1c2e8f60f4d44e90991" - dependencies: - tslib "^1.7.1" - -"@angular/tsc-wrapped@4.4.3": - version "4.4.3" - resolved "https://registry.yarnpkg.com/@angular/tsc-wrapped/-/tsc-wrapped-4.4.3.tgz#2d3f38210a1d4db03fc86dcf1e095812b85cd119" - dependencies: - tsickle "^0.21.0" - -"@ngrx/core@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@ngrx/core/-/core-1.2.0.tgz#882b46abafa2e0e6d887cb71a1b2c2fa3e6d0dc6" - -"@ngrx/store-devtools@3.2.4": - version "3.2.4" - resolved "https://registry.yarnpkg.com/@ngrx/store-devtools/-/store-devtools-3.2.4.tgz#2ce4d13bf34848a9e51ec87e3b125ed67b51e550" - -"@ngrx/store@^2.2.3": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@ngrx/store/-/store-2.2.3.tgz#e7bd1149f1c44208f1cc4744353f0f98a0f1f57b" - -"@ngtools/json-schema@1.1.0", "@ngtools/json-schema@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@ngtools/json-schema/-/json-schema-1.1.0.tgz#c3a0c544d62392acc2813a42c8a0dc6f58f86922" - -"@ngtools/webpack@1.7.1", "@ngtools/webpack@^1.7.1": - version "1.7.1" - resolved "https://registry.yarnpkg.com/@ngtools/webpack/-/webpack-1.7.1.tgz#383ddd689845cf42fc755975f6440f75535f5016" - dependencies: - enhanced-resolve "^3.1.0" - loader-utils "^1.0.2" - magic-string "^0.22.3" - source-map "^0.5.6" - -"@ngx-translate/core@^6.0.1": - version "6.0.1" - resolved "https://registry.yarnpkg.com/@ngx-translate/core/-/core-6.0.1.tgz#7c7a80077feb994fc815b67a72065af04d394efe" - -"@ngx-translate/http-loader@^0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@ngx-translate/http-loader/-/http-loader-0.0.3.tgz#8346c8d2d6f630254601029668f17abe2afe8a9b" - -"@schematics/angular@~0.0.30": - version "0.0.33" - resolved "https://registry.yarnpkg.com/@schematics/angular/-/angular-0.0.33.tgz#bc0b28356af46fe9ec64495588ee61503fd34ce5" - -"@types/d3-array@*": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-1.2.0.tgz#9b1fc3202fc1a9f7da0f2873bd38b443137a9d34" - -"@types/d3-axis@*": - version "1.0.9" - resolved "https://registry.yarnpkg.com/@types/d3-axis/-/d3-axis-1.0.9.tgz#62ce7bc8d04354298cda57f3f1d1f856ad69b89a" - dependencies: - "@types/d3-selection" "*" - -"@types/d3-brush@*": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/d3-brush/-/d3-brush-1.0.7.tgz#05c30440f4d537fd23f976b0e6c4ba223001ef45" - dependencies: - "@types/d3-selection" "*" - -"@types/d3-chord@*": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@types/d3-chord/-/d3-chord-1.0.6.tgz#0589eb97a3191f4edaf17b7bde498462890ce1ec" - -"@types/d3-collection@*": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-collection/-/d3-collection-1.0.5.tgz#bb1f3aa97cdc8d881645541b9d6cf87edfee9bc3" - -"@types/d3-color@*": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-1.0.5.tgz#cad755f0fc6de7b70fa6e5e08afa81ef4c2248de" - -"@types/d3-dispatch@*": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-dispatch/-/d3-dispatch-1.0.5.tgz#f1f9187b538ecb05157569d8dc2f70dfb04f1b52" - -"@types/d3-drag@*": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-drag/-/d3-drag-1.1.0.tgz#9105e35ca58aa0c4783f3ce83082bcb24ccb6960" - dependencies: - "@types/d3-selection" "*" - -"@types/d3-dsv@*": - version "1.0.30" - resolved "https://registry.yarnpkg.com/@types/d3-dsv/-/d3-dsv-1.0.30.tgz#78e0dddde4283566f463e51551a97a63c170d5a8" - -"@types/d3-ease@*": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-1.0.7.tgz#93a301868be9e15061f3d44343b1ab3f8acb6f09" - -"@types/d3-force@*": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/d3-force/-/d3-force-1.0.7.tgz#8e3c533697143ebb70275d56840206e8ba789185" - -"@types/d3-format@*": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-1.2.0.tgz#3e128efc9e6dd09df8fd21f1d981ab68e140a8a2" - -"@types/d3-geo@*": - version "1.6.3" - resolved "https://registry.yarnpkg.com/@types/d3-geo/-/d3-geo-1.6.3.tgz#21b501d1fe224d88877f39f84cb8c9dd8aa1bf28" - dependencies: - "@types/geojson" "*" - -"@types/d3-hierarchy@*": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-hierarchy/-/d3-hierarchy-1.1.0.tgz#50f1ee052840638035cbdd4acab1fc3470905907" - -"@types/d3-interpolate@*": - version "1.1.6" - resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-1.1.6.tgz#64041b15c9c032c348da1b22baabc59fa4d16136" - dependencies: - "@types/d3-color" "*" - -"@types/d3-path@*": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-1.0.6.tgz#c1a7d2dc07b295fdd1c84dabe4404df991b48693" - -"@types/d3-polygon@*": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-polygon/-/d3-polygon-1.0.5.tgz#35ad54ed84c39d7e9f1252b6535be600be6cace2" - -"@types/d3-quadtree@*": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-quadtree/-/d3-quadtree-1.0.5.tgz#1ce1e659eae4530df0cb127f297f1741a367a82e" - -"@types/d3-queue@*": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-queue/-/d3-queue-3.0.5.tgz#3e4cbe2aff61db6a0b2b8c4800299e4ec6acc850" - -"@types/d3-random@*": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-random/-/d3-random-1.1.0.tgz#2dd08f1159c70719270e4a7c834af85c8b88d2c3" - -"@types/d3-request@*": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@types/d3-request/-/d3-request-1.0.2.tgz#db9db8154f47816584706c6e6f702be66f22f4be" - dependencies: - "@types/d3-dsv" "*" - -"@types/d3-scale-chromatic@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-scale-chromatic/-/d3-scale-chromatic-1.1.0.tgz#6a5033ed1b89b7bad38f5f085a4f16695f07fdf0" - -"@types/d3-scale@*": - version "1.0.10" - resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-1.0.10.tgz#8c5c1dca54a159eed042b46719dbb3bdb7e8c842" - dependencies: - "@types/d3-time" "*" - -"@types/d3-selection@*": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-selection/-/d3-selection-1.1.0.tgz#59b88f10d2cff7d9ffd7fe986b3aaef3de048224" - -"@types/d3-shape@*": - version "1.2.1" - resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-1.2.1.tgz#cac2d9f0122f173220c32c8c152dc42ee9349df2" - dependencies: - "@types/d3-path" "*" - -"@types/d3-time-format@*": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-2.0.5.tgz#1d4c5ba77ed5352b10c7fce062c883382f1e16e0" - -"@types/d3-time@*": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-1.0.7.tgz#4266d7c9be15fa81256a88d1d052d61cd8dc572c" - -"@types/d3-timer@*": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-1.0.6.tgz#786d4e20731adf03af2c5df6c86fe29667fe429b" - -"@types/d3-transition@*": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-transition/-/d3-transition-1.1.0.tgz#74475d4a8f8a0944a517d5ef861970cc30287e40" - dependencies: - "@types/d3-selection" "*" - -"@types/d3-voronoi@*": - version "1.1.6" - resolved "https://registry.yarnpkg.com/@types/d3-voronoi/-/d3-voronoi-1.1.6.tgz#b52252c1d61972e7c751135890aea112d5dadc6d" - -"@types/d3-zoom@*": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@types/d3-zoom/-/d3-zoom-1.5.0.tgz#21f690b25a8419fd1bcc95ac629cefdfb462c70f" - dependencies: - "@types/d3-interpolate" "*" - "@types/d3-selection" "*" - -"@types/d3@^4.10.0": - version "4.10.1" - resolved "https://registry.yarnpkg.com/@types/d3/-/d3-4.10.1.tgz#a888ac8780ac241d770b2025b3d7e379c4d417f0" - dependencies: - "@types/d3-array" "*" - "@types/d3-axis" "*" - "@types/d3-brush" "*" - "@types/d3-chord" "*" - "@types/d3-collection" "*" - "@types/d3-color" "*" - "@types/d3-dispatch" "*" - "@types/d3-drag" "*" - "@types/d3-dsv" "*" - "@types/d3-ease" "*" - "@types/d3-force" "*" - "@types/d3-format" "*" - "@types/d3-geo" "*" - "@types/d3-hierarchy" "*" - "@types/d3-interpolate" "*" - "@types/d3-path" "*" - "@types/d3-polygon" "*" - "@types/d3-quadtree" "*" - "@types/d3-queue" "*" - "@types/d3-random" "*" - "@types/d3-request" "*" - "@types/d3-scale" "*" - "@types/d3-selection" "*" - "@types/d3-shape" "*" - "@types/d3-time" "*" - "@types/d3-time-format" "*" - "@types/d3-timer" "*" - "@types/d3-transition" "*" - "@types/d3-voronoi" "*" - "@types/d3-zoom" "*" - -"@types/geojson@*": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-1.0.2.tgz#b02d10ab028e2928ac592a051aaa4981a1941d03" - -"@types/jasmine@2.5.38": - version "2.5.38" - resolved "https://registry.yarnpkg.com/@types/jasmine/-/jasmine-2.5.38.tgz#a4379124c4921d4e21de54ec74669c9e9b356717" - -"@types/jquery@^1.10.33": - version "1.10.33" - resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-1.10.33.tgz#6c4c279f8d06839b184950432e880e0a469a64a1" - -"@types/moment-timezone@^0.2.34": - version "0.2.35" - resolved "https://registry.yarnpkg.com/@types/moment-timezone/-/moment-timezone-0.2.35.tgz#3fbbcb035e66aa5589a9198a6eec8d4dd9a701a5" - dependencies: - moment ">=2.14.0" - -"@types/moment@^2.13.0": - version "2.13.0" - resolved "https://registry.yarnpkg.com/@types/moment/-/moment-2.13.0.tgz#604ebd189bc3bc34a1548689404e61a2a4aac896" - dependencies: - moment "*" - -"@types/node@^6.0.46": - version "6.0.73" - resolved "https://registry.yarnpkg.com/@types/node/-/node-6.0.73.tgz#85dc4bb6f125377c75ddd2519a1eeb63f0a4ed70" - -"@types/node@~6.0.60": - version "6.0.88" - resolved "https://registry.yarnpkg.com/@types/node/-/node-6.0.88.tgz#f618f11a944f6a18d92b5c472028728a3e3d4b66" - -"@types/q@^0.0.32": - version "0.0.32" - resolved "https://registry.yarnpkg.com/@types/q/-/q-0.0.32.tgz#bd284e57c84f1325da702babfc82a5328190c0c5" - -"@types/selenium-webdriver@^2.53.35", "@types/selenium-webdriver@~2.53.39": - version "2.53.42" - resolved "https://registry.yarnpkg.com/@types/selenium-webdriver/-/selenium-webdriver-2.53.42.tgz#74cb77fb6052edaff2a8984ddafd88d419f25cac" - -abbrev@1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" - -accepts@1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca" - dependencies: - mime-types "~2.1.11" - negotiator "0.6.1" - -accepts@~1.3.3: - version "1.3.4" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.4.tgz#86246758c7dd6d21a6474ff084a4740ec05eb21f" - dependencies: - mime-types "~2.1.16" - negotiator "0.6.1" - -acorn-dynamic-import@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz#c752bd210bef679501b6c6cb7fc84f8f47158cc4" - dependencies: - acorn "^4.0.3" - -acorn@^4.0.3: - version "4.0.13" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" - -acorn@^5.0.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.1.2.tgz#911cb53e036807cf0fa778dc5d370fbd864246d7" - -adm-zip@0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.4.tgz#a61ed5ae6905c3aea58b3a657d25033091052736" - -adm-zip@^0.4.7: - version "0.4.7" - resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.7.tgz#8606c2cbf1c426ce8c8ec00174447fd49b6eafc1" - -after@0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" - -agent-base@2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-2.1.0.tgz#193455e4347bca6b05847cb81e939bb325446da8" - dependencies: - extend "~3.0.0" - semver "~5.0.1" - -ajv-keywords@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.0.tgz#a296e17f7bfae7c1ce4f7e0de53d29cb32162df0" - -ajv@^4.9.1: - version "4.11.8" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" - dependencies: - co "^4.6.0" - json-stable-stringify "^1.0.1" - -ajv@^5.0.0, ajv@^5.1.5: - version "5.2.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39" - dependencies: - co "^4.6.0" - fast-deep-equal "^1.0.0" - json-schema-traverse "^0.3.0" - json-stable-stringify "^1.0.1" - -align-text@^0.1.1, align-text@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" - dependencies: - kind-of "^3.0.2" - longest "^1.0.1" - repeat-string "^1.5.2" - -alphanum-sort@^1.0.1, alphanum-sort@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3" - -amdefine@>=0.0.4: - version "1.0.1" - resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" - -angular-in-memory-web-api@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/angular-in-memory-web-api/-/angular-in-memory-web-api-0.3.2.tgz#8836d9e2534d37b728f3cb5a1caf6fe1e7fbbecd" - -angular-moment-timezone@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/angular-moment-timezone/-/angular-moment-timezone-0.2.1.tgz#b2c1d9dd0e90558483b4da8db277bee4dbdbf6d1" - dependencies: - "@angular/common" "^4.0.0" - "@angular/compiler" "^4.0.0" - "@angular/core" "^4.0.0" - "@angular/forms" "^4.0.0" - "@angular/http" "^4.0.0" - "@angular/platform-browser" "^4.0.0" - "@angular/platform-browser-dynamic" "^4.0.0" - "@angular/router" "^4.0.0" - "@types/moment-timezone" "^0.2.34" - angular2-moment "^1.3.3" - core-js "^2.4.1" - moment "^2.18.1" - moment-timezone "^0.5.13" - rxjs "^5.1.0" - zone.js "^0.8.4" - -angular-pipes@^6.5.3: - version "6.5.3" - resolved "https://registry.yarnpkg.com/angular-pipes/-/angular-pipes-6.5.3.tgz#6bed37c51ebc2adaf3412663bfe25179d0489b02" - -angular2-moment@^1.3.3: - version "1.4.0" - resolved "https://registry.yarnpkg.com/angular2-moment/-/angular2-moment-1.4.0.tgz#3d59c1ebc28934fcfe9b888ab461e261724987e8" - dependencies: - moment "^2.16.0" - -angular2-moment@^1.4.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/angular2-moment/-/angular2-moment-1.7.0.tgz#6e620430cc21efc65ed3a68b5c09f3f3ef7a8256" - dependencies: - moment "^2.16.0" - -angular2-notifications@0.4.46: - version "0.4.46" - resolved "https://registry.yarnpkg.com/angular2-notifications/-/angular2-notifications-0.4.46.tgz#d868cfad43c4da3c1490a57a96d637a5eb96ce69" - -ansi-align@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f" - dependencies: - string-width "^2.0.0" - -ansi-html@0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - -ansi-styles@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88" - dependencies: - color-convert "^1.9.0" - -any-promise@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" - -anymatch@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507" - dependencies: - arrify "^1.0.0" - micromatch "^2.1.5" - -app-root-path@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.0.1.tgz#cd62dcf8e4fd5a417efc664d2e5b10653c651b46" - -append-transform@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" - dependencies: - default-require-extensions "^1.0.0" - -aproba@^1.0.3: - version "1.1.1" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.1.tgz#95d3600f07710aa0e9298c726ad5ecf2eacbabab" - -are-we-there-yet@~1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d" - dependencies: - delegates "^1.0.0" - readable-stream "^2.0.6" - -argparse@^1.0.7: - version "1.0.9" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" - dependencies: - arr-flatten "^1.0.1" - -arr-flatten@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.3.tgz#a274ed85ac08849b6bd7847c4580745dc51adfb1" - -array-find-index@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - -array-flatten@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296" - -array-includes@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.0.3.tgz#184b48f62d92d7452bb31b323165c7f8bd02266d" - dependencies: - define-properties "^1.1.2" - es-abstract "^1.7.0" - -array-slice@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5" - -array-union@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - dependencies: - array-uniq "^1.0.1" - -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - -array-unique@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" - -arraybuffer.slice@0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz#f33b2159f0532a3f3107a272c0ccfbd1ad2979ca" - -arrify@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" - -asap@~2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" - -asn1.js@^4.0.0: - version "4.9.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.9.1.tgz#48ba240b45a9280e94748990ba597d216617fd40" - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -asn1@~0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - -assert-plus@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" - -assert@^1.1.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91" - dependencies: - util "0.10.3" - -async-each@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" - -async-foreach@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/async-foreach/-/async-foreach-0.1.3.tgz#36121f845c0578172de419a97dbeb1d16ec34542" - -async@^0.9.0: - version "0.9.2" - resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" - -async@^1.4.0, async@^1.5.2: - version "1.5.2" - resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" - -async@^2.1.2, async@^2.1.5, async@^2.4.1: - version "2.5.0" - resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d" - dependencies: - lodash "^4.14.0" - -async@^2.1.4: - version "2.4.1" - resolved "https://registry.yarnpkg.com/async/-/async-2.4.1.tgz#62a56b279c98a11d0987096a01cc3eeb8eb7bbd7" - dependencies: - lodash "^4.14.0" - -async@~0.2.6: - version "0.2.10" - resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - -autoprefixer@^6.3.1, autoprefixer@^6.5.3: - version "6.7.7" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-6.7.7.tgz#1dbd1c835658e35ce3f9984099db00585c782014" - dependencies: - browserslist "^1.7.6" - caniuse-db "^1.0.30000634" - normalize-range "^0.1.2" - num2fraction "^1.2.2" - postcss "^5.2.16" - postcss-value-parser "^3.2.3" - -aws-sign2@~0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" - -aws4@^1.2.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" - -babel-code-frame@^6.11.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" - -babel-code-frame@^6.20.0, babel-code-frame@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" - dependencies: - chalk "^1.1.0" - esutils "^2.0.2" - js-tokens "^3.0.0" - -babel-generator@^6.18.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.24.1.tgz#e715f486c58ded25649d888944d52aa07c5d9497" - dependencies: - babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - detect-indent "^4.0.0" - jsesc "^1.3.0" - lodash "^4.2.0" - source-map "^0.5.0" - trim-right "^1.0.1" - -babel-messages@^6.23.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" - dependencies: - babel-runtime "^6.22.0" - -babel-runtime@^6.18.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - -babel-runtime@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.23.0.tgz#0a9489f144de70efb3ce4300accdb329e2fc543b" - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.10.0" - -babel-template@^6.16.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.24.1.tgz#04ae514f1f93b3a2537f2a0f60a5a45fb8308333" - dependencies: - babel-runtime "^6.22.0" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - babylon "^6.11.0" - lodash "^4.2.0" - -babel-traverse@^6.18.0, babel-traverse@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.24.1.tgz#ab36673fd356f9a0948659e7b338d5feadb31695" - dependencies: - babel-code-frame "^6.22.0" - babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - babylon "^6.15.0" - debug "^2.2.0" - globals "^9.0.0" - invariant "^2.2.0" - lodash "^4.2.0" - -babel-types@^6.18.0, babel-types@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.24.1.tgz#a136879dc15b3606bda0d90c1fc74304c2ff0975" - dependencies: - babel-runtime "^6.22.0" - esutils "^2.0.2" - lodash "^4.2.0" - to-fast-properties "^1.0.1" - -babylon@^6.11.0, babylon@^6.13.0, babylon@^6.15.0: - version "6.17.1" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.1.tgz#17f14fddf361b695981fe679385e4f1c01ebd86f" - -babylon@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" - -backo2@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" - -balanced-match@^0.4.1, balanced-match@^0.4.2: - version "0.4.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" - -base64-arraybuffer@0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" - -base64-js@^1.0.2: - version "1.2.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886" - -base64id@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" - -batch@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - -bcrypt-pbkdf@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" - dependencies: - tweetnacl "^0.14.3" - -better-assert@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" - dependencies: - callsite "1.0.0" - -big.js@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.1.3.tgz#4cada2193652eb3ca9ec8e55c9015669c9806978" - -binary-extensions@^1.0.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.8.0.tgz#48ec8d16df4377eae5fa5884682480af4d95c774" - -blob@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.4.tgz#bcf13052ca54463f30f9fc7e95b9a47630a94921" - -block-stream@*: - version "0.0.9" - resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" - dependencies: - inherits "~2.0.0" - -blocking-proxy@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/blocking-proxy/-/blocking-proxy-0.0.5.tgz#462905e0dcfbea970f41aa37223dda9c07b1912b" - dependencies: - minimist "^1.2.0" - -bluebird@^2.10.2: - version "2.11.0" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-2.11.0.tgz#534b9033c022c9579c56ba3b3e5a5caafbb650e1" - -bluebird@^3.3.0, bluebird@^3.4.7: - version "3.5.0" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.0.tgz#791420d7f551eea2897453a8a77653f96606d67c" - -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: - version "4.11.8" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" - -body-parser@^1.12.4: - version "1.17.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.17.2.tgz#f8892abc8f9e627d42aedafbca66bf5ab99104ee" - dependencies: - bytes "2.4.0" - content-type "~1.0.2" - debug "2.6.7" - depd "~1.1.0" - http-errors "~1.6.1" - iconv-lite "0.4.15" - on-finished "~2.3.0" - qs "6.4.0" - raw-body "~2.2.0" - type-is "~1.6.15" - -bonjour@^3.5.0: - version "3.5.0" - resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" - dependencies: - array-flatten "^2.1.0" - deep-equal "^1.0.1" - dns-equal "^1.0.0" - dns-txt "^2.0.2" - multicast-dns "^6.0.1" - multicast-dns-service-types "^1.1.0" - -boolbase@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - -boom@2.x.x: - version "2.10.1" - resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" - dependencies: - hoek "2.x.x" - -bootstrap@^3.3.7: - version "3.3.7" - resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.3.7.tgz#5a389394549f23330875a3b150656574f8a9eb71" - -boxen@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/boxen/-/boxen-1.1.0.tgz#b1b69dd522305e807a99deee777dbd6e5167b102" - dependencies: - ansi-align "^2.0.0" - camelcase "^4.0.0" - chalk "^1.1.1" - cli-boxes "^1.0.0" - string-width "^2.0.0" - term-size "^0.1.0" - widest-line "^1.0.0" - -brace-expansion@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.7.tgz#3effc3c50e000531fb720eaff80f0ae8ef23cf59" - dependencies: - balanced-match "^0.4.1" - concat-map "0.0.1" - -braces@^0.1.2: - version "0.1.5" - resolved "https://registry.yarnpkg.com/braces/-/braces-0.1.5.tgz#c085711085291d8b75fdd74eab0f8597280711e6" - dependencies: - expand-range "^0.1.0" - -braces@^1.8.2: - version "1.8.5" - resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" - dependencies: - expand-range "^1.8.1" - preserve "^0.2.0" - repeat-element "^1.1.2" - -brorand@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.0.8" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.0.8.tgz#c8fa3b1b7585bb7ba77c5560b60996ddec6d5309" - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.0.tgz#9988244874bf5ed4e28da95666dcd66ac8fc363a" - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.0.tgz#daa277717470922ed2fe18594118a175439721dd" - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - -browserify-rsa@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" - dependencies: - bn.js "^4.1.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" - dependencies: - bn.js "^4.1.1" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.2" - elliptic "^6.0.0" - inherits "^2.0.1" - parse-asn1 "^5.0.0" - -browserify-zlib@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d" - dependencies: - pako "~0.2.0" - -browserslist@^1.3.6, browserslist@^1.5.2, browserslist@^1.7.6: - version "1.7.7" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.7.tgz#0bd76704258be829b2398bb50e4b62d1a166b0b9" - dependencies: - caniuse-db "^1.0.30000639" - electron-to-chromium "^1.2.7" - -buffer-indexof@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" - -buffer-shims@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-shims/-/buffer-shims-1.0.0.tgz#9978ce317388c649ad8793028c3477ef044a8b51" - -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - -buffer@^4.3.0: - version "4.9.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298" - dependencies: - base64-js "^1.0.2" - ieee754 "^1.1.4" - isarray "^1.0.0" - -builtin-modules@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" - -builtin-status-codes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" - -bytes@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.4.0.tgz#7d97196f9d5baf7f6935e25985549edd2a6c2339" - -bytes@2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.5.0.tgz#4c9423ea2d252c270c41b2bdefeff9bb6b62c06a" - -callsite@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" - -camel-case@3.0.x: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" - dependencies: - no-case "^2.2.0" - upper-case "^1.1.1" - -camelcase-keys@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" - dependencies: - camelcase "^2.0.0" - map-obj "^1.0.0" - -camelcase@^1.0.2: - version "1.2.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" - -camelcase@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" - -camelcase@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" - -camelcase@^4.0.0, camelcase@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" - -caniuse-api@^1.5.2: - version "1.6.1" - resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-1.6.1.tgz#b534e7c734c4f81ec5fbe8aca2ad24354b962c6c" - dependencies: - browserslist "^1.3.6" - caniuse-db "^1.0.30000529" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-db@^1.0.30000529, caniuse-db@^1.0.30000634, caniuse-db@^1.0.30000639: - version "1.0.30000726" - resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000726.tgz#9bb742f8d026a62df873bc03c06843d2255b60d7" - -capture-stack-trace@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d" - -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - -center-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" - dependencies: - align-text "^0.1.3" - lazy-cache "^1.0.3" - -chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.1.0.tgz#ac5becf14fa21b99c6c92ca7a7d7cfd5b17e743e" - dependencies: - ansi-styles "^3.1.0" - escape-string-regexp "^1.0.5" - supports-color "^4.0.0" - -charenc@~0.0.1: - version "0.0.2" - resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667" - -chokidar@^1.4.1, chokidar@^1.6.0, chokidar@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" - dependencies: - anymatch "^1.3.0" - async-each "^1.0.0" - glob-parent "^2.0.0" - inherits "^2.0.1" - is-binary-path "^1.0.0" - is-glob "^2.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.0.0" - optionalDependencies: - fsevents "^1.0.0" - -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -circular-dependency-plugin@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/circular-dependency-plugin/-/circular-dependency-plugin-3.0.0.tgz#9b68692e35b0e3510998d0164b6ae5011bea5760" - -clap@^1.0.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/clap/-/clap-1.2.0.tgz#59c90fe3e137104746ff19469a27a634ff68c857" - dependencies: - chalk "^1.1.3" - -clean-css@4.1.x: - version "4.1.8" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.1.8.tgz#061455b2494a750ac98f46d8d5ebb17c679ea9d1" - dependencies: - source-map "0.5.x" - -cli-boxes@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" - -cliui@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" - dependencies: - center-align "^0.1.1" - right-align "^0.1.1" - wordwrap "0.0.2" - -cliui@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - wrap-ansi "^2.0.0" - -clone-deep@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-0.3.0.tgz#348c61ae9cdbe0edfe053d91ff4cc521d790ede8" - dependencies: - for-own "^1.0.0" - is-plain-object "^2.0.1" - kind-of "^3.2.2" - shallow-clone "^0.1.2" - -clone@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.2.tgz#260b7a99ebb1edfe247538175f783243cb19d149" - -clone@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.1.tgz#d217d1e961118e3ac9a4b8bba3285553bf647cdb" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - -coa@~1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/coa/-/coa-1.0.4.tgz#a9ef153660d6a86a8bdec0289a5c684d217432fd" - dependencies: - q "^1.1.2" - -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - -codelyzer@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/codelyzer/-/codelyzer-2.0.1.tgz#d0f7121f67a8424c92d21d3b31f3640b83def9ed" - dependencies: - app-root-path "^2.0.1" - css-selector-tokenizer "^0.7.0" - cssauron "^1.4.0" - semver-dsl "^1.0.1" - source-map "^0.5.6" - sprintf-js "^1.0.3" - -color-convert@^1.3.0, color-convert@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a" - dependencies: - color-name "^1.1.1" - -color-name@^1.0.0, color-name@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - -color-string@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/color-string/-/color-string-0.3.0.tgz#27d46fb67025c5c2fa25993bfbf579e47841b991" - dependencies: - color-name "^1.0.0" - -color@^0.11.0: - version "0.11.4" - resolved "https://registry.yarnpkg.com/color/-/color-0.11.4.tgz#6d7b5c74fb65e841cd48792ad1ed5e07b904d764" - dependencies: - clone "^1.0.2" - color-convert "^1.3.0" - color-string "^0.3.0" - -colormin@^1.0.5: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colormin/-/colormin-1.1.2.tgz#ea2f7420a72b96881a38aae59ec124a6f7298133" - dependencies: - color "^0.11.0" - css-color-names "0.0.4" - has "^1.0.1" - -colors@1.1.2, colors@^1.1.0, colors@^1.1.2, colors@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" - -combine-lists@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/combine-lists/-/combine-lists-1.0.1.tgz#458c07e09e0d900fc28b70a3fec2dacd1d2cb7f6" - dependencies: - lodash "^4.5.0" - -combined-stream@^1.0.5, combined-stream@~1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" - dependencies: - delayed-stream "~1.0.0" - -commander@2: - version "2.9.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" - dependencies: - graceful-readlink ">= 1.0.0" - -commander@2.11.x, commander@~2.11.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563" - -common-tags@^1.3.1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.4.0.tgz#1187be4f3d4cf0c0427d43f74eef1f73501614c0" - dependencies: - babel-runtime "^6.18.0" - -component-bind@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" - -component-emitter@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3" - -component-emitter@1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" - -component-inherit@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" - -compressible@~2.0.10: - version "2.0.11" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.11.tgz#16718a75de283ed8e604041625a2064586797d8a" - dependencies: - mime-db ">= 1.29.0 < 2" - -compression@^1.5.2: - version "1.7.0" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.0.tgz#030c9f198f1643a057d776a738e922da4373012d" - dependencies: - accepts "~1.3.3" - bytes "2.5.0" - compressible "~2.0.10" - debug "2.6.8" - on-headers "~1.0.1" - safe-buffer "5.1.1" - vary "~1.1.1" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - -concat-stream@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" - dependencies: - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -configstore@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/configstore/-/configstore-3.1.0.tgz#45df907073e26dfa1cf4b2d52f5b60545eaa11d1" - dependencies: - dot-prop "^4.1.0" - graceful-fs "^4.1.2" - make-dir "^1.0.0" - unique-string "^1.0.0" - write-file-atomic "^2.0.0" - xdg-basedir "^3.0.0" - -connect-history-api-fallback@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.3.0.tgz#e51d17f8f0ef0db90a64fdb47de3051556e9f169" - -connect@^3.3.5: - version "3.6.2" - resolved "https://registry.yarnpkg.com/connect/-/connect-3.6.2.tgz#694e8d20681bfe490282c8ab886be98f09f42fe7" - dependencies: - debug "2.6.7" - finalhandler "1.0.3" - parseurl "~1.3.1" - utils-merge "1.0.0" - -console-browserify@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10" - dependencies: - date-now "^0.1.4" - -console-control-strings@^1.0.0, console-control-strings@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - -constants-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" - -content-disposition@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4" - -content-type@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed" - -convert-source-map@^1.3.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.0.tgz#9acd70851c6d5dfdd93d9282e5edf94a03ff46b5" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - -cookie@0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" - -copy-webpack-plugin@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-4.0.1.tgz#9728e383b94316050d0c7463958f2b85c0aa8200" - dependencies: - bluebird "^2.10.2" - fs-extra "^0.26.4" - glob "^6.0.4" - is-glob "^3.1.0" - loader-utils "^0.2.15" - lodash "^4.3.0" - minimatch "^3.0.0" - node-dir "^0.1.10" - -core-js@^2.2.0, core-js@^2.4.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e" - -core-js@^2.4.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b" - -core-object@^3.1.0: - version "3.1.5" - resolved "https://registry.yarnpkg.com/core-object/-/core-object-3.1.5.tgz#fa627b87502adc98045e44678e9a8ec3b9c0d2a9" - dependencies: - chalk "^2.0.0" - -core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - -cosmiconfig@^2.1.0, cosmiconfig@^2.1.1: - version "2.2.2" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-2.2.2.tgz#6173cebd56fac042c1f4390edf7af6c07c7cb892" - dependencies: - is-directory "^0.3.1" - js-yaml "^3.4.3" - minimist "^1.2.0" - object-assign "^4.1.0" - os-homedir "^1.0.1" - parse-json "^2.2.0" - require-from-string "^1.1.0" - -create-ecdh@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d" - dependencies: - bn.js "^4.1.0" - elliptic "^6.0.0" - -create-error-class@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" - dependencies: - capture-stack-trace "^1.0.0" - -create-hash@^1.1.0, create-hash@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd" - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - ripemd160 "^2.0.0" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: - version "1.1.6" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.6.tgz#acb9e221a4e17bdb076e90657c42b93e3726cf06" - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -cross-spawn-async@^2.1.1: - version "2.2.5" - resolved "https://registry.yarnpkg.com/cross-spawn-async/-/cross-spawn-async-2.2.5.tgz#845ff0c0834a3ded9d160daca6d390906bb288cc" - dependencies: - lru-cache "^4.0.0" - which "^1.2.8" - -cross-spawn@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-3.0.1.tgz#1256037ecb9f0c5f79e3d6ef135e30770184b982" - dependencies: - lru-cache "^4.0.1" - which "^1.2.9" - -cross-spawn@^5.0.1: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -crypt@~0.0.1: - version "0.0.2" - resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" - -cryptiles@2.x.x: - version "2.0.5" - resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" - dependencies: - boom "2.x.x" - -crypto-browserify@^3.11.0: - version "3.11.1" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f" - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - -crypto-random-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" - -css-color-names@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0" - -css-loader@^0.28.1: - version "0.28.7" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-0.28.7.tgz#5f2ee989dd32edd907717f953317656160999c1b" - dependencies: - babel-code-frame "^6.11.0" - css-selector-tokenizer "^0.7.0" - cssnano ">=2.6.1 <4" - icss-utils "^2.1.0" - loader-utils "^1.0.2" - lodash.camelcase "^4.3.0" - object-assign "^4.0.1" - postcss "^5.0.6" - postcss-modules-extract-imports "^1.0.0" - postcss-modules-local-by-default "^1.0.1" - postcss-modules-scope "^1.0.0" - postcss-modules-values "^1.1.0" - postcss-value-parser "^3.3.0" - source-list-map "^2.0.0" - -css-parse@1.7.x: - version "1.7.0" - resolved "https://registry.yarnpkg.com/css-parse/-/css-parse-1.7.0.tgz#321f6cf73782a6ff751111390fc05e2c657d8c9b" - -css-select@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" - dependencies: - boolbase "~1.0.0" - css-what "2.1" - domutils "1.5.1" - nth-check "~1.0.1" - -css-selector-tokenizer@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.0.tgz#e6988474ae8c953477bf5e7efecfceccd9cf4c86" - dependencies: - cssesc "^0.1.0" - fastparse "^1.1.1" - regexpu-core "^1.0.0" - -css-what@2.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.0.tgz#9467d032c38cfaefb9f2d79501253062f87fa1bd" - -cssauron@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/cssauron/-/cssauron-1.4.0.tgz#a6602dff7e04a8306dc0db9a551e92e8b5662ad8" - dependencies: - through X.X.X - -cssesc@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-0.1.0.tgz#c814903e45623371a0477b40109aaafbeeaddbb4" - -"cssnano@>=2.6.1 <4", cssnano@^3.10.0: - version "3.10.0" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-3.10.0.tgz#4f38f6cea2b9b17fa01490f23f1dc68ea65c1c38" - dependencies: - autoprefixer "^6.3.1" - decamelize "^1.1.2" - defined "^1.0.0" - has "^1.0.1" - object-assign "^4.0.1" - postcss "^5.0.14" - postcss-calc "^5.2.0" - postcss-colormin "^2.1.8" - postcss-convert-values "^2.3.4" - postcss-discard-comments "^2.0.4" - postcss-discard-duplicates "^2.0.1" - postcss-discard-empty "^2.0.1" - postcss-discard-overridden "^0.1.1" - postcss-discard-unused "^2.2.1" - postcss-filter-plugins "^2.0.0" - postcss-merge-idents "^2.1.5" - postcss-merge-longhand "^2.0.1" - postcss-merge-rules "^2.0.3" - postcss-minify-font-values "^1.0.2" - postcss-minify-gradients "^1.0.1" - postcss-minify-params "^1.0.4" - postcss-minify-selectors "^2.0.4" - postcss-normalize-charset "^1.1.0" - postcss-normalize-url "^3.0.7" - postcss-ordered-values "^2.1.0" - postcss-reduce-idents "^2.2.2" - postcss-reduce-initial "^1.0.0" - postcss-reduce-transforms "^1.0.3" - postcss-svgo "^2.1.1" - postcss-unique-selectors "^2.0.2" - postcss-value-parser "^3.2.3" - postcss-zindex "^2.0.1" - -csso@~2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/csso/-/csso-2.3.2.tgz#ddd52c587033f49e94b71fc55569f252e8ff5f85" - dependencies: - clap "^1.0.9" - source-map "^0.5.3" - -currently-unhandled@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" - dependencies: - array-find-index "^1.0.1" - -custom-event@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" - -d3-array@1, d3-array@1.2.0, d3-array@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.0.tgz#147d269720e174c4057a7f42be8b0f3f2ba53108" - -d3-axis@1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-1.0.8.tgz#31a705a0b535e65759de14173a31933137f18efa" - -d3-brush@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-1.0.4.tgz#00c2f238019f24f6c0a194a26d41a1530ffe7bc4" - dependencies: - d3-dispatch "1" - d3-drag "1" - d3-interpolate "1" - d3-selection "1" - d3-transition "1" - -d3-chord@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-1.0.4.tgz#7dec4f0ba886f713fe111c45f763414f6f74ca2c" - dependencies: - d3-array "1" - d3-path "1" - -d3-collection@1, d3-collection@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/d3-collection/-/d3-collection-1.0.4.tgz#342dfd12837c90974f33f1cc0a785aea570dcdc2" - -d3-color@1, d3-color@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-1.0.3.tgz#bc7643fca8e53a8347e2fbdaffa236796b58509b" - -d3-dispatch@1, d3-dispatch@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-1.0.3.tgz#46e1491eaa9b58c358fce5be4e8bed626e7871f8" - -d3-drag@1, d3-drag@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-1.1.1.tgz#b5155304433b18ba38726b2184d0098e820dc64b" - dependencies: - d3-dispatch "1" - d3-selection "1" - -d3-dsv@1: - version "1.0.5" - resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-1.0.5.tgz#419f7db47f628789fc3fdb636e678449d0821136" - dependencies: - commander "2" - iconv-lite "0.4" - rw "1" - -d3-dsv@1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-1.0.7.tgz#137076663f398428fc3d031ae65370522492b78f" - dependencies: - commander "2" - iconv-lite "0.4" - rw "1" - -d3-ease@1, d3-ease@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-1.0.3.tgz#68bfbc349338a380c44d8acc4fbc3304aa2d8c0e" - -d3-force@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-1.0.6.tgz#ea7e1b7730e2664cd314f594d6718c57cc132b79" - dependencies: - d3-collection "1" - d3-dispatch "1" - d3-quadtree "1" - d3-timer "1" - -d3-format@1, d3-format@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.2.0.tgz#6b480baa886885d4651dc248a8f4ac9da16db07a" - -d3-geo@1.6.4: - version "1.6.4" - resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-1.6.4.tgz#f20e1e461cb1845f5a8be55ab6f876542a7e3199" - dependencies: - d3-array "1" - -d3-hierarchy@1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-1.1.5.tgz#a1c845c42f84a206bcf1c01c01098ea4ddaa7a26" - -d3-interpolate@1, d3-interpolate@1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-1.1.5.tgz#69e099ff39214716e563c9aec3ea9d1ea4b8a79f" - dependencies: - d3-color "1" - -d3-path@1, d3-path@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.5.tgz#241eb1849bd9e9e8021c0d0a799f8a0e8e441764" - -d3-polygon@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-1.0.3.tgz#16888e9026460933f2b179652ad378224d382c62" - -d3-quadtree@1, d3-quadtree@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-1.0.3.tgz#ac7987e3e23fe805a990f28e1b50d38fcb822438" - -d3-queue@3.0.7: - version "3.0.7" - resolved "https://registry.yarnpkg.com/d3-queue/-/d3-queue-3.0.7.tgz#c93a2e54b417c0959129d7d73f6cf7d4292e7618" - -d3-random@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-1.1.0.tgz#6642e506c6fa3a648595d2b2469788a8d12529d3" - -d3-request@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-request/-/d3-request-1.0.6.tgz#a1044a9ef4ec28c824171c9379fae6d79474b19f" - dependencies: - d3-collection "1" - d3-dispatch "1" - d3-dsv "1" - xmlhttprequest "1" - -d3-scale-chromatic@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.1.1.tgz#811406e8e09dab78a49dac4a32047d5d3edd0c44" - dependencies: - d3-interpolate "1" - -d3-scale@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-1.0.6.tgz#bce19da80d3a0cf422c9543ae3322086220b34ed" - dependencies: - d3-array "^1.2.0" - d3-collection "1" - d3-color "1" - d3-format "1" - d3-interpolate "1" - d3-time "1" - d3-time-format "2" - -d3-selection@1, d3-selection@1.1.0, d3-selection@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-1.1.0.tgz#1998684896488f839ca0372123da34f1d318809c" - -d3-shape@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.2.0.tgz#45d01538f064bafd05ea3d6d2cb748fd8c41f777" - dependencies: - d3-path "1" - -d3-time-format@2, d3-time-format@2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-2.0.5.tgz#9d7780204f7c9119c9170b1a56db4de9a8af972e" - dependencies: - d3-time "1" - -d3-time@1, d3-time@1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.0.7.tgz#94caf6edbb7879bb809d0d1f7572bc48482f7270" - -d3-timer@1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-1.0.6.tgz#4044bf15d7025c06ce7d1149f73cd07b54dbd784" - -d3-timer@1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-1.0.7.tgz#df9650ca587f6c96607ff4e60cc38229e8dd8531" - -d3-transition@1, d3-transition@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-1.1.0.tgz#cfc85c74e5239324290546623572990560c3966f" - dependencies: - d3-color "1" - d3-dispatch "1" - d3-ease "1" - d3-interpolate "1" - d3-selection "^1.1.0" - d3-timer "1" - -d3-voronoi@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/d3-voronoi/-/d3-voronoi-1.1.2.tgz#1687667e8f13a2d158c80c1480c5a29cb0d8973c" - -d3-zoom@1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-1.5.0.tgz#8417de9a077f98f9ce83b1998efb8ee12b4db26e" - dependencies: - d3-dispatch "1" - d3-drag "1" - d3-interpolate "1" - d3-selection "1" - d3-transition "1" - -d3@^4.10.0: - version "4.10.2" - resolved "https://registry.yarnpkg.com/d3/-/d3-4.10.2.tgz#d401b2bc0372a77e6822f278c0e4b4090206babd" - dependencies: - d3-array "1.2.0" - d3-axis "1.0.8" - d3-brush "1.0.4" - d3-chord "1.0.4" - d3-collection "1.0.4" - d3-color "1.0.3" - d3-dispatch "1.0.3" - d3-drag "1.1.1" - d3-dsv "1.0.7" - d3-ease "1.0.3" - d3-force "1.0.6" - d3-format "1.2.0" - d3-geo "1.6.4" - d3-hierarchy "1.1.5" - d3-interpolate "1.1.5" - d3-path "1.0.5" - d3-polygon "1.0.3" - d3-quadtree "1.0.3" - d3-queue "3.0.7" - d3-random "1.1.0" - d3-request "1.0.6" - d3-scale "1.0.6" - d3-selection "1.1.0" - d3-shape "1.2.0" - d3-time "1.0.7" - d3-time-format "2.0.5" - d3-timer "1.0.7" - d3-transition "1.1.0" - d3-voronoi "1.1.2" - d3-zoom "1.5.0" - -d@1: - version "1.0.0" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f" - dependencies: - es5-ext "^0.10.9" - -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - dependencies: - assert-plus "^1.0.0" - -date-now@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b" - -debug@*: - version "3.0.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.0.1.tgz#0564c612b521dc92d9f2988f0549e34f9c98db64" - dependencies: - ms "2.0.0" - -debug@2, debug@2.6.8, debug@^2.2.0, debug@^2.6.3, debug@^2.6.6, debug@^2.6.8: - version "2.6.8" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc" - dependencies: - ms "2.0.0" - -debug@2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" - dependencies: - ms "0.7.1" - -debug@2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c" - dependencies: - ms "0.7.2" - -debug@2.6.7: - version "2.6.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.7.tgz#92bad1f6d05bbb6bba22cca88bcd0ec894c2861e" - dependencies: - ms "2.0.0" - -decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - -deep-equal@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5" - -deep-extend@~0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" - -default-require-extensions@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8" - dependencies: - strip-bom "^2.0.0" - -define-properties@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94" - dependencies: - foreach "^2.0.5" - object-keys "^1.0.8" - -defined@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - -del@^2.2.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" - dependencies: - globby "^5.0.0" - is-path-cwd "^1.0.0" - is-path-in-cwd "^1.0.0" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - rimraf "^2.2.8" - -del@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/del/-/del-3.0.0.tgz#53ecf699ffcbcb39637691ab13baf160819766e5" - dependencies: - globby "^6.1.0" - is-path-cwd "^1.0.0" - is-path-in-cwd "^1.0.0" - p-map "^1.1.1" - pify "^3.0.0" - rimraf "^2.2.8" - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - -denodeify@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/denodeify/-/denodeify-1.2.1.tgz#3a36287f5034e699e7577901052c2e6c94251631" - -depd@1.1.0, depd@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.0.tgz#e1bd82c6aab6ced965b97b88b17ed3e528ca18c3" - -depd@1.1.1, depd@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359" - -des.js@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc" - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -destroy@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - -detect-indent@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" - dependencies: - repeating "^2.0.0" - -detect-node@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.3.tgz#a2033c09cc8e158d37748fbde7507832bd6ce127" - -di@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" - -diff@^3.0.1, diff@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" - -diffie-hellman@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.2.tgz#b5835739270cfe26acf632099fded2a07f209e5e" - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - -directory-encoder@^0.7.2: - version "0.7.2" - resolved "https://registry.yarnpkg.com/directory-encoder/-/directory-encoder-0.7.2.tgz#59b4e2aa4f25422f6c63b527b462f5e2d0dd2c58" - dependencies: - fs-extra "^0.23.1" - handlebars "^1.3.0" - img-stats "^0.5.2" - -dns-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - -dns-packet@^1.0.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.2.2.tgz#a8a26bec7646438963fc86e06f8f8b16d6c8bf7a" - dependencies: - ip "^1.1.0" - safe-buffer "^5.0.1" - -dns-txt@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" - dependencies: - buffer-indexof "^1.0.0" - -dom-converter@~0.1: - version "0.1.4" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.1.4.tgz#a45ef5727b890c9bffe6d7c876e7b19cb0e17f3b" - dependencies: - utila "~0.3" - -dom-serialize@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" - dependencies: - custom-event "~1.0.0" - ent "~2.2.0" - extend "^3.0.0" - void-elements "^2.0.0" - -dom-serializer@0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.0.tgz#073c697546ce0780ce23be4a28e293e40bc30c82" - dependencies: - domelementtype "~1.1.1" - entities "~1.1.1" - -domain-browser@^1.1.1: - version "1.1.7" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.1.7.tgz#867aa4b093faa05f1de08c06f4d7b21fdf8698bc" - -domelementtype@1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.0.tgz#b17aed82e8ab59e52dd9c19b1756e0fc187204c2" - -domelementtype@~1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.1.3.tgz#bd28773e2642881aec51544924299c5cd822185b" - -domhandler@2.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.1.0.tgz#d2646f5e57f6c3bab11cf6cb05d3c0acf7412594" - dependencies: - domelementtype "1" - -domutils@1.1: - version "1.1.6" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.1.6.tgz#bddc3de099b9a2efacc51c623f28f416ecc57485" - dependencies: - domelementtype "1" - -domutils@1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" - dependencies: - dom-serializer "0" - domelementtype "1" - -dot-prop@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.1.1.tgz#a8493f0b7b5eeec82525b5c7587fa7de7ca859c1" - dependencies: - is-obj "^1.0.0" - -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - -ecc-jsbn@~0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" - dependencies: - jsbn "~0.1.0" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - -ejs@^2.5.7: - version "2.5.7" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.5.7.tgz#cc872c168880ae3c7189762fd5ffc00896c9518a" - -electron-to-chromium@^1.2.7: - version "1.3.21" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.21.tgz#a967ebdcfe8ed0083fc244d1894022a8e8113ea2" - -elliptic@^6.0.0: - version "6.4.0" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.0.tgz#cac9af8762c85836187003c8dfe193e5e2eae5df" - dependencies: - bn.js "^4.4.0" - brorand "^1.0.1" - hash.js "^1.0.0" - hmac-drbg "^1.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.0" - -ember-cli-string-utils@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ember-cli-string-utils/-/ember-cli-string-utils-1.1.0.tgz#39b677fc2805f55173735376fcef278eaa4452a1" - -emojis-list@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" - -encodeurl@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.1.tgz#79e3d58655346909fe6f0f45a5de68103b294d20" - -engine.io-client@1.8.2: - version "1.8.2" - resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-1.8.2.tgz#c38767547f2a7d184f5752f6f0ad501006703766" - dependencies: - component-emitter "1.2.1" - component-inherit "0.0.3" - debug "2.3.3" - engine.io-parser "1.3.2" - has-cors "1.1.0" - indexof "0.0.1" - parsejson "0.0.3" - parseqs "0.0.5" - parseuri "0.0.5" - ws "1.1.1" - xmlhttprequest-ssl "1.5.3" - yeast "0.1.2" - -engine.io-parser@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-1.3.2.tgz#937b079f0007d0893ec56d46cb220b8cb435220a" - dependencies: - after "0.8.2" - arraybuffer.slice "0.0.6" - base64-arraybuffer "0.1.5" - blob "0.0.4" - has-binary "0.1.7" - wtf-8 "1.0.0" - -engine.io@1.8.2: - version "1.8.2" - resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-1.8.2.tgz#6b59be730b348c0125b0a4589de1c355abcf7a7e" - dependencies: - accepts "1.3.3" - base64id "1.0.0" - cookie "0.3.1" - debug "2.3.3" - engine.io-parser "1.3.2" - ws "1.1.1" - -enhanced-resolve@^3.1.0, enhanced-resolve@^3.4.0: - version "3.4.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e" - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.4.0" - object-assign "^4.0.1" - tapable "^0.2.7" - -ent@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - -entities@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.1.tgz#6e5c2d0a5621b5dadaecef80b90edfb5cd7772f0" - -errno@^0.1.1, errno@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.4.tgz#b896e23a9e5e8ba33871fc996abd3635fc9a1c7d" - dependencies: - prr "~0.0.0" - -error-ex@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" - dependencies: - is-arrayish "^0.2.1" - -es-abstract@^1.7.0: - version "1.8.2" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.8.2.tgz#25103263dc4decbda60e0c737ca32313518027ee" - dependencies: - es-to-primitive "^1.1.1" - function-bind "^1.1.1" - has "^1.0.1" - is-callable "^1.1.3" - is-regex "^1.0.4" - -es-to-primitive@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d" - dependencies: - is-callable "^1.1.1" - is-date-object "^1.0.1" - is-symbol "^1.0.1" - -es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14: - version "0.10.30" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.30.tgz#7141a16836697dbabfaaaeee41495ce29f52c939" - dependencies: - es6-iterator "2" - es6-symbol "~3.1" - -es6-iterator@2, es6-iterator@^2.0.1, es6-iterator@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.1.tgz#8e319c9f0453bf575d374940a655920e59ca5512" - dependencies: - d "1" - es5-ext "^0.10.14" - es6-symbol "^3.1" - -es6-map@^0.1.3: - version "0.1.5" - resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0" - dependencies: - d "1" - es5-ext "~0.10.14" - es6-iterator "~2.0.1" - es6-set "~0.1.5" - es6-symbol "~3.1.1" - event-emitter "~0.3.5" - -es6-promise@~4.0.3: - version "4.0.5" - resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.0.5.tgz#7882f30adde5b240ccfa7f7d78c548330951ae42" - -es6-set@~0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1" - dependencies: - d "1" - es5-ext "~0.10.14" - es6-iterator "~2.0.1" - es6-symbol "3.1.1" - event-emitter "~0.3.5" - -es6-symbol@3.1.1, es6-symbol@^3.1, es6-symbol@^3.1.1, es6-symbol@~3.1, es6-symbol@~3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77" - dependencies: - d "1" - es5-ext "~0.10.14" - -es6-weak-map@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f" - dependencies: - d "1" - es5-ext "^0.10.14" - es6-iterator "^2.0.1" - es6-symbol "^3.1.1" - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - -escope@^3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3" - dependencies: - es6-map "^0.1.3" - es6-weak-map "^2.0.1" - esrecurse "^4.1.0" - estraverse "^4.1.1" - -esprima@^2.6.0: - version "2.7.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" - -esprima@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" - -esprima@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" - -esrecurse@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.0.tgz#fa9568d98d3823f9a41d91e902dcab9ea6e5b163" - dependencies: - estraverse "^4.1.0" - object-assign "^4.0.1" - -estraverse@^4.1.0, estraverse@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - -esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - -etag@~1.8.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.0.tgz#6f631aef336d6c46362b51764044ce216be3c051" - -event-emitter@~0.3.5: - version "0.3.5" - resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" - dependencies: - d "1" - es5-ext "~0.10.14" - -eventemitter3@1.x.x: - version "1.2.0" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-1.2.0.tgz#1c86991d816ad1e504750e73874224ecf3bec508" - -events@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" - -eventsource@0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-0.1.6.tgz#0acede849ed7dd1ccc32c811bb11b944d4f29232" - dependencies: - original ">=0.0.5" - -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - -execa@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.4.0.tgz#4eb6467a36a095fabb2970ff9d5e3fb7bce6ebc3" - dependencies: - cross-spawn-async "^2.1.1" - is-stream "^1.1.0" - npm-run-path "^1.0.0" - object-assign "^4.0.1" - path-key "^1.0.0" - strip-eof "^1.0.0" - -execa@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - -expand-braces@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/expand-braces/-/expand-braces-0.1.2.tgz#488b1d1d2451cb3d3a6b192cfc030f44c5855fea" - dependencies: - array-slice "^0.2.3" - array-unique "^0.2.1" - braces "^0.1.2" - -expand-brackets@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" - dependencies: - is-posix-bracket "^0.1.0" - -expand-range@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-0.1.1.tgz#4cb8eda0993ca56fa4f41fc42f3cbb4ccadff044" - dependencies: - is-number "^0.1.1" - repeat-string "^0.2.2" - -expand-range@^1.8.1: - version "1.8.2" - resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" - dependencies: - fill-range "^2.1.0" - -exports-loader@^0.6.3: - version "0.6.4" - resolved "https://registry.yarnpkg.com/exports-loader/-/exports-loader-0.6.4.tgz#d70fc6121975b35fc12830cf52754be2740fc886" - dependencies: - loader-utils "^1.0.2" - source-map "0.5.x" - -express@^4.13.3: - version "4.15.4" - resolved "https://registry.yarnpkg.com/express/-/express-4.15.4.tgz#032e2253489cf8fce02666beca3d11ed7a2daed1" - dependencies: - accepts "~1.3.3" - array-flatten "1.1.1" - content-disposition "0.5.2" - content-type "~1.0.2" - cookie "0.3.1" - cookie-signature "1.0.6" - debug "2.6.8" - depd "~1.1.1" - encodeurl "~1.0.1" - escape-html "~1.0.3" - etag "~1.8.0" - finalhandler "~1.0.4" - fresh "0.5.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "~2.3.0" - parseurl "~1.3.1" - path-to-regexp "0.1.7" - proxy-addr "~1.1.5" - qs "6.5.0" - range-parser "~1.2.0" - send "0.15.4" - serve-static "1.12.4" - setprototypeof "1.0.3" - statuses "~1.3.1" - type-is "~1.6.15" - utils-merge "1.0.0" - vary "~1.1.1" - -extend@3, extend@^3.0.0, extend@~3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" - -extglob@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" - dependencies: - is-extglob "^1.0.0" - -extract-text-webpack-plugin@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/extract-text-webpack-plugin/-/extract-text-webpack-plugin-3.0.0.tgz#90caa7907bc449f335005e3ac7532b41b00de612" - dependencies: - async "^2.4.1" - loader-utils "^1.1.0" - schema-utils "^0.3.0" - webpack-sources "^1.0.1" - -extract-zip@~1.6.5: - version "1.6.5" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.6.5.tgz#99a06735b6ea20ea9b705d779acffcc87cff0440" - dependencies: - concat-stream "1.6.0" - debug "2.2.0" - mkdirp "0.5.0" - yauzl "2.4.1" - -extsprintf@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550" - -fast-deep-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff" - -fastparse@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.1.tgz#d1e2643b38a94d7583b479060e6c4affc94071f8" - -faye-websocket@^0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" - dependencies: - websocket-driver ">=0.5.1" - -faye-websocket@~0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.1.tgz#f0efe18c4f56e4f40afc7e06c719fd5ee6188f38" - dependencies: - websocket-driver ">=0.5.1" - -fd-slicer@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.0.1.tgz#8b5bcbd9ec327c5041bf9ab023fd6750f1177e65" - dependencies: - pend "~1.2.0" - -file-loader@^0.10.0: - version "0.10.1" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-0.10.1.tgz#815034119891fc6441fb5a64c11bc93c22ddd842" - dependencies: - loader-utils "^1.0.2" - -filename-regex@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" - -fileset@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" - dependencies: - glob "^7.0.3" - minimatch "^3.0.3" - -fill-range@^2.1.0: - version "2.2.3" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" - dependencies: - is-number "^2.1.0" - isobject "^2.0.0" - randomatic "^1.1.3" - repeat-element "^1.1.2" - repeat-string "^1.5.2" - -finalhandler@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.3.tgz#ef47e77950e999780e86022a560e3217e0d0cc89" - dependencies: - debug "2.6.7" - encodeurl "~1.0.1" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.1" - statuses "~1.3.1" - unpipe "~1.0.0" - -finalhandler@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.4.tgz#18574f2e7c4b98b8ae3b230c21f201f31bdb3fb7" - dependencies: - debug "2.6.8" - encodeurl "~1.0.1" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.1" - statuses "~1.3.1" - unpipe "~1.0.0" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -find-up@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - dependencies: - locate-path "^2.0.0" - -findup-sync@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-0.3.0.tgz#37930aa5d816b777c03445e1966cc6790a4c0b16" - dependencies: - glob "~5.0.0" - -flatten@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.2.tgz#dae46a9d78fbe25292258cc1e780a41d95c03782" - -font-awesome@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133" - -for-in@^0.1.3: - version "0.1.8" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.8.tgz#d8773908e31256109952b1fdb9b3fa867d2775e1" - -for-in@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - -for-own@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" - dependencies: - for-in "^1.0.1" - -for-own@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" - dependencies: - for-in "^1.0.1" - -foreach@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" - -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - -form-data@~2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1" - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.5" - mime-types "^2.1.12" - -forwarded@~0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.0.tgz#19ef9874c4ae1c297bcf078fde63a09b66a84363" - -fresh@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.0.tgz#f474ca5e6a9246d6fd8e0953cfa9b9c805afa78e" - -fs-extra@^0.23.1: - version "0.23.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.23.1.tgz#6611dba6adf2ab8dc9c69fab37cddf8818157e3d" - dependencies: - graceful-fs "^4.1.2" - jsonfile "^2.1.0" - path-is-absolute "^1.0.0" - rimraf "^2.2.8" - -fs-extra@^0.26.4: - version "0.26.7" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.26.7.tgz#9ae1fdd94897798edab76d0918cf42d0c3184fa9" - dependencies: - graceful-fs "^4.1.2" - jsonfile "^2.1.0" - klaw "^1.0.0" - path-is-absolute "^1.0.0" - rimraf "^2.2.8" - -fs-extra@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.1.tgz#7fc0c6c8957f983f57f306a24e5b9ddd8d0dd880" - dependencies: - graceful-fs "^4.1.2" - jsonfile "^3.0.0" - universalify "^0.1.0" - -fs-extra@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-1.0.0.tgz#cd3ce5f7e7cb6145883fcae3191e9877f8587950" - dependencies: - graceful-fs "^4.1.2" - jsonfile "^2.1.0" - klaw "^1.0.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - -fsevents@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.1.tgz#f19fd28f43eeaf761680e519a203c4d0b3d31aff" - dependencies: - nan "^2.3.0" - node-pre-gyp "^0.6.29" - -fstream-ignore@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" - dependencies: - fstream "^1.0.0" - inherits "2" - minimatch "^3.0.0" - -fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: - version "1.0.11" - resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" - dependencies: - graceful-fs "^4.1.2" - inherits "~2.0.0" - mkdirp ">=0.5 0" - rimraf "2" - -function-bind@^1.0.2, function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - dependencies: - aproba "^1.0.3" - console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" - signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" - -gaze@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/gaze/-/gaze-1.1.2.tgz#847224677adb8870d679257ed3388fdb61e40105" - dependencies: - globule "^1.0.0" - -get-caller-file@^1.0.0, get-caller-file@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" - -get-stdin@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" - -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - dependencies: - assert-plus "^1.0.0" - -glob-base@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" - dependencies: - glob-parent "^2.0.0" - is-glob "^2.0.0" - -glob-parent@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" - dependencies: - is-glob "^2.0.0" - -glob@7.0.x: - version "7.0.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.0.6.tgz#211bafaf49e525b8cd93260d14ab136152b3f57a" - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.2" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^6.0.4: - version "6.0.4" - resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" - dependencies: - inflight "^1.0.4" - inherits "2" - minimatch "2 || 3" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1, glob@~7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@~5.0.0: - version "5.0.15" - resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" - dependencies: - inflight "^1.0.4" - inherits "2" - minimatch "2 || 3" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^9.0.0: - version "9.17.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-9.17.0.tgz#0c0ca696d9b9bb694d2e5470bd37777caad50286" - -globby@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" - dependencies: - array-union "^1.0.1" - arrify "^1.0.0" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -globby@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" - dependencies: - array-union "^1.0.1" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -globule@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/globule/-/globule-1.2.0.tgz#1dc49c6822dd9e8a2fa00ba2a295006e8664bd09" - dependencies: - glob "~7.1.1" - lodash "~4.17.4" - minimatch "~3.0.2" - -got@^6.7.1: - version "6.7.1" - resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0" - dependencies: - create-error-class "^3.0.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-redirect "^1.0.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - lowercase-keys "^1.0.0" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - unzip-response "^2.0.1" - url-parse-lax "^1.0.0" - -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9: - version "4.1.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" - -"graceful-readlink@>= 1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" - -handle-thing@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-1.2.5.tgz#fd7aad726bf1a5fd16dfc29b2f7a6601d27139c4" - -handlebars@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-1.3.0.tgz#9e9b130a93e389491322d975cf3ec1818c37ce34" - dependencies: - optimist "~0.3" - optionalDependencies: - uglify-js "~2.3" - -handlebars@^4.0.3: - version "4.0.10" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.10.tgz#3d30c718b09a3d96f23ea4cc1f403c4d3ba9ff4f" - dependencies: - async "^1.4.0" - optimist "^0.6.1" - source-map "^0.4.4" - optionalDependencies: - uglify-js "^2.6" - -har-schema@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" - -har-validator@~4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" - dependencies: - ajv "^4.9.1" - har-schema "^1.0.5" - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - dependencies: - ansi-regex "^2.0.0" - -has-binary@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/has-binary/-/has-binary-0.1.7.tgz#68e61eb16210c9545a0a5cce06a873912fe1e68c" - dependencies: - isarray "0.0.1" - -has-cors@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" - -has-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" - -has-flag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" - -has-unicode@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - -has@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" - dependencies: - function-bind "^1.0.2" - -hash-base@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-2.0.2.tgz#66ea1d856db4e8a5470cadf6fce23ae5244ef2e1" - dependencies: - inherits "^2.0.1" - -hash-base@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846" - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.0" - -hasha@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/hasha/-/hasha-2.2.0.tgz#78d7cbfc1e6d66303fe79837365984517b2f6ee1" - dependencies: - is-stream "^1.0.1" - pinkie-promise "^2.0.0" - -hawk@~3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" - dependencies: - boom "2.x.x" - cryptiles "2.x.x" - hoek "2.x.x" - sntp "1.x.x" - -he@1.1.x: - version "1.1.1" - resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd" - -heimdalljs-logger@^0.1.9: - version "0.1.9" - resolved "https://registry.yarnpkg.com/heimdalljs-logger/-/heimdalljs-logger-0.1.9.tgz#d76ada4e45b7bb6f786fc9c010a68eb2e2faf176" - dependencies: - debug "^2.2.0" - heimdalljs "^0.2.0" - -heimdalljs@^0.2.0, heimdalljs@^0.2.4: - version "0.2.5" - resolved "https://registry.yarnpkg.com/heimdalljs/-/heimdalljs-0.2.5.tgz#6aa54308eee793b642cff9cf94781445f37730ac" - dependencies: - rsvp "~3.2.1" - -hmac-drbg@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - -hoek@2.x.x: - version "2.16.3" - resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" - -hosted-git-info@^2.1.4: - version "2.5.0" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" - -hpack.js@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-comment-regex@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.1.tgz#668b93776eaae55ebde8f3ad464b307a4963625e" - -html-entities@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" - -html-minifier@^3.2.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.3.tgz#4a275e3b1a16639abb79b4c11191ff0d0fcf1ab9" - dependencies: - camel-case "3.0.x" - clean-css "4.1.x" - commander "2.11.x" - he "1.1.x" - ncname "1.0.x" - param-case "2.1.x" - relateurl "0.2.x" - uglify-js "3.0.x" - -html-webpack-plugin@^2.29.0, html-webpack-plugin@^2.30.1: - version "2.30.1" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-2.30.1.tgz#7f9c421b7ea91ec460f56527d78df484ee7537d5" - dependencies: - bluebird "^3.4.7" - html-minifier "^3.2.3" - loader-utils "^0.2.16" - lodash "^4.17.3" - pretty-error "^2.0.2" - toposort "^1.0.0" - -htmlparser2@~3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.3.0.tgz#cc70d05a59f6542e43f0e685c982e14c924a9efe" - dependencies: - domelementtype "1" - domhandler "2.1" - domutils "1.1" - readable-stream "1.0" - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - -http-errors@~1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.1.tgz#5f8b8ed98aca545656bf572997387f904a722257" - dependencies: - depd "1.1.0" - inherits "2.0.3" - setprototypeof "1.0.3" - statuses ">= 1.3.1 < 2" - -http-errors@~1.6.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736" - dependencies: - depd "1.1.1" - inherits "2.0.3" - setprototypeof "1.0.3" - statuses ">= 1.3.1 < 2" - -http-proxy-middleware@~0.17.4: - version "0.17.4" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.17.4.tgz#642e8848851d66f09d4f124912846dbaeb41b833" - dependencies: - http-proxy "^1.16.2" - is-glob "^3.1.0" - lodash "^4.17.2" - micromatch "^2.3.11" - -http-proxy@^1.13.0, http-proxy@^1.16.2: - version "1.16.2" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.16.2.tgz#06dff292952bf64dbe8471fa9df73066d4f37742" - dependencies: - eventemitter3 "1.x.x" - requires-port "1.x.x" - -http-signature@~1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" - dependencies: - assert-plus "^0.2.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -https-browserify@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-0.0.1.tgz#3f91365cabe60b77ed0ebba24b454e3e09d95a82" - -https-proxy-agent@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz#35f7da6c48ce4ddbfa264891ac593ee5ff8671e6" - dependencies: - agent-base "2" - debug "2" - extend "3" - -iconv-lite@0.4: - version "0.4.17" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.17.tgz#4fdaa3b38acbc2c031b045d0edcdfe1ecab18c8d" - -iconv-lite@0.4.15: - version "0.4.15" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.15.tgz#fe265a218ac6a57cfe854927e9d04c19825eddeb" - -icss-replace-symbols@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" - -icss-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-2.1.0.tgz#83f0a0ec378bf3246178b6c2ad9136f135b1c962" - dependencies: - postcss "^6.0.1" - -ieee754@^1.1.4: - version "1.1.8" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4" - -image-size@~0.5.0: - version "0.5.5" - resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" - -img-stats@^0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/img-stats/-/img-stats-0.5.2.tgz#c203496c42f2d9eb2e5ab8232fa756bab32c9e2b" - dependencies: - xmldom "^0.1.19" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - -in-publish@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/in-publish/-/in-publish-2.0.0.tgz#e20ff5e3a2afc2690320b6dc552682a9c7fadf51" - -indent-string@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" - dependencies: - repeating "^2.0.0" - -indexes-of@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" - -indexof@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - -inherits@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" - -ini@^1.3.4, ini@~1.3.0: - version "1.3.4" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" - -internal-ip@1.2.0, internal-ip@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-1.2.0.tgz#ae9fbf93b984878785d50a8de1b356956058cf5c" - dependencies: - meow "^3.3.0" - -interpret@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.3.tgz#cbc35c62eeee73f19ab7b10a801511401afc0f90" - -invariant@^2.2.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" - dependencies: - loose-envify "^1.0.0" - -invert-kv@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" - -ip@^1.1.0, ip@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" - -ipaddr.js@1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.4.0.tgz#296aca878a821816e5b85d0a285a99bcff4582f0" - -is-absolute-url@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - dependencies: - binary-extensions "^1.0.0" - -is-buffer@^1.0.2, is-buffer@^1.1.5, is-buffer@~1.1.1: - version "1.1.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc" - -is-builtin-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" - dependencies: - builtin-modules "^1.0.0" - -is-callable@^1.1.1, is-callable@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2" - -is-date-object@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" - -is-directory@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" - -is-dotfile@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d" - -is-equal-shallow@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" - dependencies: - is-primitive "^2.0.0" - -is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - -is-extglob@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" - -is-extglob@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - -is-finite@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - -is-glob@^2.0.0, is-glob@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" - dependencies: - is-extglob "^1.0.0" - -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - dependencies: - is-extglob "^2.1.0" - -is-npm@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" - -is-number@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-0.1.1.tgz#69a7af116963d47206ec9bd9b48a14216f1e3806" - -is-number@^2.0.2, is-number@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - dependencies: - kind-of "^3.0.2" - -is-number@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" - -is-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - -is-path-cwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" - -is-path-in-cwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" - dependencies: - is-path-inside "^1.0.0" - -is-path-inside@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.0.tgz#fc06e5a1683fbda13de667aff717bbc10a48f37f" - dependencies: - path-is-inside "^1.0.1" - -is-plain-obj@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - -is-plain-object@^2.0.1: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - dependencies: - isobject "^3.0.1" - -is-posix-bracket@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" - -is-primitive@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" - -is-redirect@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" - -is-regex@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" - dependencies: - has "^1.0.1" - -is-retry-allowed@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" - -is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - -is-svg@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-2.1.0.tgz#cf61090da0d9efbcab8722deba6f032208dbb0e9" - dependencies: - html-comment-regex "^1.1.0" - -is-symbol@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572" - -is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - -is-utf8@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" - -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - -isbinaryfile@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.2.tgz#4a3e974ec0cba9004d3fc6cde7209ea69368a621" - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - dependencies: - isarray "1.0.0" - -isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - -istanbul-api@^1.1.1: - version "1.1.9" - resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.1.9.tgz#2827920d380d4286d857d57a2968a841db8a7ec8" - dependencies: - async "^2.1.4" - fileset "^2.0.2" - istanbul-lib-coverage "^1.1.1" - istanbul-lib-hook "^1.0.7" - istanbul-lib-instrument "^1.7.2" - istanbul-lib-report "^1.1.1" - istanbul-lib-source-maps "^1.2.1" - istanbul-reports "^1.1.1" - js-yaml "^3.7.0" - mkdirp "^0.5.1" - once "^1.4.0" - -istanbul-instrumenter-loader@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/istanbul-instrumenter-loader/-/istanbul-instrumenter-loader-2.0.0.tgz#e5492900ab0bba835efa8024cb00be9b3eea2700" - dependencies: - convert-source-map "^1.3.0" - istanbul-lib-instrument "^1.1.3" - loader-utils "^0.2.16" - object-assign "^4.1.0" - -istanbul-lib-coverage@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da" - -istanbul-lib-hook@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.0.7.tgz#dd6607f03076578fe7d6f2a630cf143b49bacddc" - dependencies: - append-transform "^0.4.0" - -istanbul-lib-instrument@^1.1.3: - version "1.8.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.8.0.tgz#66f6c9421cc9ec4704f76f2db084ba9078a2b532" - dependencies: - babel-generator "^6.18.0" - babel-template "^6.16.0" - babel-traverse "^6.18.0" - babel-types "^6.18.0" - babylon "^6.18.0" - istanbul-lib-coverage "^1.1.1" - semver "^5.3.0" - -istanbul-lib-instrument@^1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.7.2.tgz#6014b03d3470fb77638d5802508c255c06312e56" - dependencies: - babel-generator "^6.18.0" - babel-template "^6.16.0" - babel-traverse "^6.18.0" - babel-types "^6.18.0" - babylon "^6.13.0" - istanbul-lib-coverage "^1.1.1" - semver "^5.3.0" - -istanbul-lib-report@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz#f0e55f56655ffa34222080b7a0cd4760e1405fc9" - dependencies: - istanbul-lib-coverage "^1.1.1" - mkdirp "^0.5.1" - path-parse "^1.0.5" - supports-color "^3.1.2" - -istanbul-lib-source-maps@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.1.tgz#a6fe1acba8ce08eebc638e572e294d267008aa0c" - dependencies: - debug "^2.6.3" - istanbul-lib-coverage "^1.1.1" - mkdirp "^0.5.1" - rimraf "^2.6.1" - source-map "^0.5.3" - -istanbul-reports@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.1.tgz#042be5c89e175bc3f86523caab29c014e77fee4e" - dependencies: - handlebars "^4.0.3" - -jasmine-core@~2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.5.2.tgz#6f61bd79061e27f43e6f9355e44b3c6cab6ff297" - -jasmine-core@~2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.6.2.tgz#74ea1f7cf428691af201107d631234027a09daab" - -jasmine-spec-reporter@~3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/jasmine-spec-reporter/-/jasmine-spec-reporter-3.2.0.tgz#fdbe85a80ccdd3b276746bc77fde83c1ce773eff" - dependencies: - colors "1.1.2" - -jasmine@^2.5.3: - version "2.6.0" - resolved "https://registry.yarnpkg.com/jasmine/-/jasmine-2.6.0.tgz#6b22e70883e8e589d456346153b4d206ddbe217f" - dependencies: - exit "^0.1.2" - glob "^7.0.6" - jasmine-core "~2.6.0" - -jasminewd2@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/jasminewd2/-/jasminewd2-2.1.0.tgz#da595275d1ae631de736ac0a7c7d85c9f73ef652" - -jodid25519@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" - dependencies: - jsbn "~0.1.0" - -jquery@^1.12.4: - version "1.12.4" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-1.12.4.tgz#01e1dfba290fe73deba77ceeacb0f9ba2fec9e0c" - -js-base64@^2.1.5, js-base64@^2.1.8, js-base64@^2.1.9: - version "2.1.9" - resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.1.9.tgz#f0e80ae039a4bd654b5f281fc93f04a914a7fcce" - -js-tokens@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7" - -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - -js-yaml@^3.4.3: - version "3.9.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.9.1.tgz#08775cebdfdd359209f0d2acd383c8f86a6904a0" - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^3.7.0: - version "3.8.4" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.4.tgz#520b4564f86573ba96662af85a8cafa7b4b5a6f6" - dependencies: - argparse "^1.0.7" - esprima "^3.1.1" - -js-yaml@~3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.7.0.tgz#5c967ddd837a9bfdca5f2de84253abe8a1c03b80" - dependencies: - argparse "^1.0.7" - esprima "^2.6.0" - -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - -jsesc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - -json-loader@^0.5.4: - version "0.5.7" - resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.7.tgz#dca14a70235ff82f0ac9a3abeb60d337a365185d" - -json-schema-traverse@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" - -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - -json-stable-stringify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" - dependencies: - jsonify "~0.0.0" - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - -json3@3.3.2, json3@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" - -json5@^0.5.0, json5@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - -jsonfile@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8" - optionalDependencies: - graceful-fs "^4.1.6" - -jsonfile@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-3.0.1.tgz#a5ecc6f65f53f662c4415c7675a0331d0992ec66" - optionalDependencies: - graceful-fs "^4.1.6" - -jsonify@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" - -jsprim@^1.2.2: - version "1.4.0" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918" - dependencies: - assert-plus "1.0.0" - extsprintf "1.0.2" - json-schema "0.2.3" - verror "1.3.6" - -karma-cli@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/karma-cli/-/karma-cli-1.0.1.tgz#ae6c3c58a313a1d00b45164c455b9b86ce17f960" - dependencies: - resolve "^1.1.6" - -karma-coverage-istanbul-reporter@^0.2.0: - version "0.2.3" - resolved "https://registry.yarnpkg.com/karma-coverage-istanbul-reporter/-/karma-coverage-istanbul-reporter-0.2.3.tgz#11f1be9cfa93755a77bac39ab16e315a7100b5c5" - dependencies: - istanbul-api "^1.1.1" - -karma-jasmine-html-reporter@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/karma-jasmine-html-reporter/-/karma-jasmine-html-reporter-0.2.2.tgz#48a8e5ef18807617ee2b5e33c1194c35b439524c" - dependencies: - karma-jasmine "^1.0.2" - -karma-jasmine@^1.0.2, karma-jasmine@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-1.1.0.tgz#22e4c06bf9a182e5294d1f705e3733811b810acf" - -karma-phantomjs-launcher@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/karma-phantomjs-launcher/-/karma-phantomjs-launcher-1.0.4.tgz#d23ca34801bda9863ad318e3bb4bd4062b13acd2" - dependencies: - lodash "^4.0.1" - phantomjs-prebuilt "^2.1.7" - -karma-source-map-support@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/karma-source-map-support/-/karma-source-map-support-1.2.0.tgz#1bf81e7bb4b089627ab352ec4179e117c406a540" - dependencies: - source-map-support "^0.4.1" - -karma@~1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/karma/-/karma-1.4.1.tgz#41981a71d54237606b0a3ea8c58c90773f41650e" - dependencies: - bluebird "^3.3.0" - body-parser "^1.12.4" - chokidar "^1.4.1" - colors "^1.1.0" - combine-lists "^1.0.0" - connect "^3.3.5" - core-js "^2.2.0" - di "^0.0.1" - dom-serialize "^2.2.0" - expand-braces "^0.1.1" - glob "^7.1.1" - graceful-fs "^4.1.2" - http-proxy "^1.13.0" - isbinaryfile "^3.0.0" - lodash "^3.8.0" - log4js "^0.6.31" - mime "^1.3.4" - minimatch "^3.0.0" - optimist "^0.6.1" - qjobs "^1.1.4" - range-parser "^1.2.0" - rimraf "^2.3.3" - safe-buffer "^5.0.1" - socket.io "1.7.2" - source-map "^0.5.3" - tmp "0.0.28" - useragent "^2.1.10" - -kew@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/kew/-/kew-0.7.0.tgz#79d93d2d33363d6fdd2970b335d9141ad591d79b" - -kind-of@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-2.0.1.tgz#018ec7a4ce7e3a86cb9141be519d24c8faa981b5" - dependencies: - is-buffer "^1.0.2" - -kind-of@^3.0.2, kind-of@^3.2.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - dependencies: - is-buffer "^1.1.5" - -kind-of@^6.0.0: - version "6.0.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" - -klaw@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/klaw/-/klaw-1.3.1.tgz#4088433b46b3b1ba259d78785d8e96f73ba02439" - optionalDependencies: - graceful-fs "^4.1.9" - -latest-version@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15" - dependencies: - package-json "^4.0.0" - -lazy-cache@^0.2.3: - version "0.2.7" - resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-0.2.7.tgz#7feddf2dcb6edb77d11ef1d117ab5ffdf0ab1b65" - -lazy-cache@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" - -lazy-req@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lazy-req/-/lazy-req-2.0.0.tgz#c9450a363ecdda2e6f0c70132ad4f37f8f06f2b4" - -lcid@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" - dependencies: - invert-kv "^1.0.0" - -less-loader@^4.0.5: - version "4.0.5" - resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-4.0.5.tgz#ae155a7406cac6acd293d785587fcff0f478c4dd" - dependencies: - clone "^2.1.1" - loader-utils "^1.1.0" - pify "^2.3.0" - -less@^2.7.2: - version "2.7.2" - resolved "https://registry.yarnpkg.com/less/-/less-2.7.2.tgz#368d6cc73e1fb03981183280918743c5dcf9b3df" - optionalDependencies: - errno "^0.1.1" - graceful-fs "^4.1.2" - image-size "~0.5.0" - mime "^1.2.11" - mkdirp "^0.5.0" - promise "^7.1.1" - request "^2.72.0" - source-map "^0.5.3" - -license-webpack-plugin@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/license-webpack-plugin/-/license-webpack-plugin-1.0.0.tgz#9515229075bacce8ec420cadf99a54a5f78cc7df" - dependencies: - ejs "^2.5.7" - -load-json-file@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - pinkie-promise "^2.0.0" - strip-bom "^2.0.0" - -load-json-file@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - strip-bom "^3.0.0" - -loader-runner@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.3.0.tgz#f482aea82d543e07921700d5a46ef26fdac6b8a2" - -loader-utils@^0.2.15, loader-utils@^0.2.16, loader-utils@~0.2.2: - version "0.2.17" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" - dependencies: - big.js "^3.1.3" - emojis-list "^2.0.0" - json5 "^0.5.0" - object-assign "^4.0.1" - -loader-utils@^1.0.1, loader-utils@^1.0.2, loader-utils@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.1.0.tgz#c98aef488bcceda2ffb5e2de646d6a754429f5cd" - dependencies: - big.js "^3.1.3" - emojis-list "^2.0.0" - json5 "^0.5.0" - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - -lodash.assign@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" - -lodash.camelcase@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" - -lodash.clonedeep@^4.3.2, lodash.clonedeep@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - -lodash.mergewith@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.0.tgz#150cf0a16791f5903b8891eab154609274bdea55" - -lodash.tail@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.tail/-/lodash.tail-4.1.1.tgz#d2333a36d9e7717c8ad2f7cacafec7c32b444664" - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - -lodash@^3.8.0: - version "3.10.1" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" - -lodash@^4.0.0, lodash@^4.0.1, lodash@^4.11.1, lodash@^4.14.0, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.0, lodash@~4.17.4: - version "4.17.4" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" - -log4js@^0.6.31: - version "0.6.38" - resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" - dependencies: - readable-stream "~1.0.2" - semver "~4.3.3" - -loglevel@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.4.1.tgz#95b383f91a3c2756fd4ab093667e4309161f2bcd" - -longest@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" - -loose-envify@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" - dependencies: - js-tokens "^3.0.0" - -loud-rejection@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" - dependencies: - currently-unhandled "^0.4.1" - signal-exit "^3.0.0" - -lower-case@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" - -lowercase-keys@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" - -lru-cache@2.2.x: - version "2.2.4" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.2.4.tgz#6c658619becf14031d0d0b594b16042ce4dc063d" - -lru-cache@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e" - dependencies: - pseudomap "^1.0.1" - yallist "^2.0.0" - -lru-cache@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -macaddress@^0.2.8: - version "0.2.8" - resolved "https://registry.yarnpkg.com/macaddress/-/macaddress-0.2.8.tgz#5904dc537c39ec6dbefeae902327135fa8511f12" - -magic-string@^0.22.3: - version "0.22.4" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.4.tgz#31039b4e40366395618c1d6cf8193c53917475ff" - dependencies: - vlq "^0.2.1" - -make-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.0.0.tgz#97a011751e91dd87cfadef58832ebb04936de978" - dependencies: - pify "^2.3.0" - -make-error@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.0.tgz#52ad3a339ccf10ce62b4040b708fe707244b8b96" - -map-obj@^1.0.0, map-obj@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" - -math-expression-evaluator@^1.2.14: - version "1.2.17" - resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz#de819fdbcd84dccd8fae59c6aeb79615b9d266ac" - -math-random@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.1.tgz#8b3aac588b8a66e4975e3cdea67f7bb329601fac" - -md5.js@^1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d" - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - -md5@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/md5/-/md5-2.2.1.tgz#53ab38d5fe3c8891ba465329ea23fac0540126f9" - dependencies: - charenc "~0.0.1" - crypt "~0.0.1" - is-buffer "~1.1.1" - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - -mem@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" - dependencies: - mimic-fn "^1.0.0" - -memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -meow@^3.3.0, meow@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" - dependencies: - camelcase-keys "^2.0.0" - decamelize "^1.1.2" - loud-rejection "^1.0.0" - map-obj "^1.0.1" - minimist "^1.1.3" - normalize-package-data "^2.3.4" - object-assign "^4.0.1" - read-pkg-up "^1.0.1" - redent "^1.0.0" - trim-newlines "^1.0.0" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - -micromatch@^2.1.5, micromatch@^2.3.11: - version "2.3.11" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" - dependencies: - arr-diff "^2.0.0" - array-unique "^0.2.1" - braces "^1.8.2" - expand-brackets "^0.1.4" - extglob "^0.3.1" - filename-regex "^2.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.1" - kind-of "^3.0.2" - normalize-path "^2.0.1" - object.omit "^2.0.0" - parse-glob "^3.0.4" - regex-cache "^0.4.2" - -miller-rabin@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.0.tgz#4a62fb1d42933c05583982f4c716f6fb9e6c6d3d" - dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" - -"mime-db@>= 1.29.0 < 2", mime-db@~1.30.0: - version "1.30.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01" - -mime-db@~1.27.0: - version "1.27.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.27.0.tgz#820f572296bbd20ec25ed55e5b5de869e5436eb1" - -mime-types@^2.1.12, mime-types@~2.1.11, mime-types@~2.1.15, mime-types@~2.1.7: - version "2.1.15" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.15.tgz#a4ebf5064094569237b8cf70046776d09fc92aed" - dependencies: - mime-db "~1.27.0" - -mime-types@~2.1.16: - version "2.1.17" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a" - dependencies: - mime-db "~1.30.0" - -mime@1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53" - -mime@1.3.x, mime@^1.3.4: - version "1.3.6" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.6.tgz#591d84d3653a6b0b4a3b9df8de5aa8108e72e5e0" - -mime@^1.2.11: - version "1.4.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.0.tgz#69e9e0db51d44f2a3b56e48b7817d7d137f1a343" - -mimic-fn@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" - -minimalistic-assert@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3" - -minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - -"minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4, minimatch@~3.0.2: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - dependencies: - brace-expansion "^1.1.7" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - -minimist@^1.1.3, minimist@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - -minimist@~0.0.1: - version "0.0.10" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" - -mixin-object@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/mixin-object/-/mixin-object-2.0.1.tgz#4fb949441dab182540f1fe035ba60e1947a5e57e" - dependencies: - for-in "^0.1.3" - is-extendable "^0.1.1" - -mkdirp@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.0.tgz#1d73076a6df986cd9344e15e71fcc05a4c9abf12" - dependencies: - minimist "0.0.8" - -mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - dependencies: - minimist "0.0.8" - -moment-timezone@^0.5.13: - version "0.5.13" - resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.13.tgz#99ce5c7d827262eb0f1f702044177f60745d7b90" - dependencies: - moment ">= 2.9.0" - -moment@*, "moment@>= 2.9.0", moment@>=2.14.0, moment@^2.16.0, moment@^2.18.1: - version "2.18.1" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.18.1.tgz#c36193dd3ce1c2eed2adb7c802dbbc77a81b1c0f" - -ms@0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" - -ms@0.7.2: - version "0.7.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - -multicast-dns-service-types@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" - -multicast-dns@^6.0.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.1.1.tgz#6e7de86a570872ab17058adea7160bbeca814dde" - dependencies: - dns-packet "^1.0.1" - thunky "^0.1.0" - -nan@^2.3.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45" - -nan@^2.3.2: - version "2.7.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.7.0.tgz#d95bf721ec877e08db276ed3fc6eb78f9083ad46" - -ncname@1.0.x: - version "1.0.0" - resolved "https://registry.yarnpkg.com/ncname/-/ncname-1.0.0.tgz#5b57ad18b1ca092864ef62b0b1ed8194f383b71c" - dependencies: - xml-char-classes "^1.0.0" - -negotiator@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" - -ngx-bootstrap@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/ngx-bootstrap/-/ngx-bootstrap-2.0.5.tgz#83aab39d1e4fe811fad2b34f7927f9ce19d68daa" - -no-case@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.1.tgz#7aeba1c73a52184265554b7dc03baf720df80081" - dependencies: - lower-case "^1.1.1" - -node-dir@^0.1.10: - version "0.1.17" - resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5" - dependencies: - minimatch "^3.0.2" - -node-forge@0.6.33: - version "0.6.33" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.6.33.tgz#463811879f573d45155ad6a9f43dc296e8e85ebc" - -node-gyp@^3.3.1: - version "3.6.2" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-3.6.2.tgz#9bfbe54562286284838e750eac05295853fa1c60" - dependencies: - fstream "^1.0.0" - glob "^7.0.3" - graceful-fs "^4.1.2" - minimatch "^3.0.2" - mkdirp "^0.5.0" - nopt "2 || 3" - npmlog "0 || 1 || 2 || 3 || 4" - osenv "0" - request "2" - rimraf "2" - semver "~5.3.0" - tar "^2.0.0" - which "1" - -node-libs-browser@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.0.0.tgz#a3a59ec97024985b46e958379646f96c4b616646" - dependencies: - assert "^1.1.1" - browserify-zlib "^0.1.4" - buffer "^4.3.0" - console-browserify "^1.1.0" - constants-browserify "^1.0.0" - crypto-browserify "^3.11.0" - domain-browser "^1.1.1" - events "^1.0.0" - https-browserify "0.0.1" - os-browserify "^0.2.0" - path-browserify "0.0.0" - process "^0.11.0" - punycode "^1.2.4" - querystring-es3 "^0.2.0" - readable-stream "^2.0.5" - stream-browserify "^2.0.1" - stream-http "^2.3.1" - string_decoder "^0.10.25" - timers-browserify "^2.0.2" - tty-browserify "0.0.0" - url "^0.11.0" - util "^0.10.3" - vm-browserify "0.0.4" - -node-modules-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/node-modules-path/-/node-modules-path-1.0.1.tgz#40096b08ce7ad0ea14680863af449c7c75a5d1c8" - -node-pre-gyp@^0.6.29: - version "0.6.34" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.34.tgz#94ad1c798a11d7fc67381b50d47f8cc18d9799f7" - dependencies: - mkdirp "^0.5.1" - nopt "^4.0.1" - npmlog "^4.0.2" - rc "^1.1.7" - request "^2.81.0" - rimraf "^2.6.1" - semver "^5.3.0" - tar "^2.2.1" - tar-pack "^3.4.0" - -node-sass@^4.3.0: - version "4.5.3" - resolved "https://registry.yarnpkg.com/node-sass/-/node-sass-4.5.3.tgz#d09c9d1179641239d1b97ffc6231fdcec53e1568" - dependencies: - async-foreach "^0.1.3" - chalk "^1.1.1" - cross-spawn "^3.0.0" - gaze "^1.0.0" - get-stdin "^4.0.1" - glob "^7.0.3" - in-publish "^2.0.0" - lodash.assign "^4.2.0" - lodash.clonedeep "^4.3.2" - lodash.mergewith "^4.6.0" - meow "^3.7.0" - mkdirp "^0.5.1" - nan "^2.3.2" - node-gyp "^3.3.1" - npmlog "^4.0.0" - request "^2.79.0" - sass-graph "^2.1.1" - stdout-stream "^1.4.0" - -"nopt@2 || 3": - version "3.0.6" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" - dependencies: - abbrev "1" - -nopt@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" - dependencies: - abbrev "1" - osenv "^0.1.4" - -normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: - version "2.4.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" - dependencies: - hosted-git-info "^2.1.4" - is-builtin-module "^1.0.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - dependencies: - remove-trailing-separator "^1.0.1" - -normalize-range@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - -normalize-url@^1.4.0: - version "1.9.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c" - dependencies: - object-assign "^4.0.1" - prepend-http "^1.0.0" - query-string "^4.1.0" - sort-keys "^1.0.0" - -npm-run-path@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-1.0.0.tgz#f5c32bf595fe81ae927daec52e82f8b000ac3c8f" - dependencies: - path-key "^1.0.0" - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - dependencies: - path-key "^2.0.0" - -"npmlog@0 || 1 || 2 || 3 || 4", npmlog@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -npmlog@^4.0.2: - version "4.1.0" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.0.tgz#dc59bee85f64f00ed424efb2af0783df25d1c0b5" - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -nth-check@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.1.tgz#9929acdf628fc2c41098deab82ac580cf149aae4" - dependencies: - boolbase "~1.0.0" - -num2fraction@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" - -number-is-nan@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" - -oauth-sign@~0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" - -object-assign@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.0.tgz#7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0" - -object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - -object-component@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" - -object-keys@^1.0.8: - version "1.0.11" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" - -object.omit@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" - dependencies: - for-own "^0.1.4" - is-extendable "^0.1.1" - -obuf@^1.0.0, obuf@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.1.tgz#104124b6c602c6796881a042541d36db43a5264e" - -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.1.tgz#928f5d0f470d49342651ea6794b0857c100693f7" - -once@^1.3.0, once@^1.3.3, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - dependencies: - wrappy "1" - -opn@4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/opn/-/opn-4.0.2.tgz#7abc22e644dff63b0a96d5ab7f2790c0f01abc95" - dependencies: - object-assign "^4.0.1" - pinkie-promise "^2.0.0" - -opn@^5.1.0, opn@~5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/opn/-/opn-5.1.0.tgz#72ce2306a17dbea58ff1041853352b4a8fc77519" - dependencies: - is-wsl "^1.1.0" - -optimist@^0.6.1, optimist@~0.6.0: - version "0.6.1" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" - dependencies: - minimist "~0.0.1" - wordwrap "~0.0.2" - -optimist@~0.3, optimist@~0.3.5: - version "0.3.7" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.3.7.tgz#c90941ad59e4273328923074d2cf2e7cbc6ec0d9" - dependencies: - wordwrap "~0.0.2" - -options@>=0.0.5: - version "0.0.6" - resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f" - -original@>=0.0.5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/original/-/original-1.0.0.tgz#9147f93fa1696d04be61e01bd50baeaca656bd3b" - dependencies: - url-parse "1.0.x" - -os-browserify@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.2.1.tgz#63fc4ccee5d2d7763d26bbf8601078e6c2e0044f" - -os-homedir@^1.0.0, os-homedir@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - -os-locale@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" - dependencies: - lcid "^1.0.0" - -os-locale@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" - dependencies: - execa "^0.7.0" - lcid "^1.0.0" - mem "^1.1.0" - -os-tmpdir@^1.0.0, os-tmpdir@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - -osenv@0, osenv@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - -p-limit@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" - -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - dependencies: - p-limit "^1.1.0" - -p-map@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.1.1.tgz#05f5e4ae97a068371bc2a5cc86bfbdbc19c4ae7a" - -package-json@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-4.0.1.tgz#8869a0401253661c4c4ca3da6c2121ed555f5eed" - dependencies: - got "^6.7.1" - registry-auth-token "^3.0.1" - registry-url "^3.0.3" - semver "^5.1.0" - -pako@~0.2.0: - version "0.2.9" - resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" - -param-case@2.1.x: - version "2.1.1" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" - dependencies: - no-case "^2.2.0" - -parse-asn1@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.0.tgz#37c4f9b7ed3ab65c74817b5f2480937fbf97c712" - dependencies: - asn1.js "^4.0.0" - browserify-aes "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - -parse-glob@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" - dependencies: - glob-base "^0.3.0" - is-dotfile "^1.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.0" - -parse-json@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" - dependencies: - error-ex "^1.2.0" - -parsejson@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/parsejson/-/parsejson-0.0.3.tgz#ab7e3759f209ece99437973f7d0f1f64ae0e64ab" - dependencies: - better-assert "~1.0.0" - -parseqs@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" - dependencies: - better-assert "~1.0.0" - -parseuri@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" - dependencies: - better-assert "~1.0.0" - -parseurl@~1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.1.tgz#c8ab8c9223ba34888aa64a297b28853bec18da56" - -path-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.0.tgz#a0b870729aae214005b7d5032ec2cbbb0fb4451a" - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - dependencies: - pinkie-promise "^2.0.0" - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - -path-is-inside@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" - -path-key@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-1.0.0.tgz#5d53d578019646c0d68800db4e146e6bdc2ac7af" - -path-key@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - -path-parse@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - -path-type@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" - dependencies: - graceful-fs "^4.1.2" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -path-type@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" - dependencies: - pify "^2.0.0" - -pbkdf2@^3.0.3: - version "3.0.13" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.13.tgz#c37d295531e786b1da3e3eadc840426accb0ae25" - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - -performance-now@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" - -phantomjs-prebuilt@^2.1.7: - version "2.1.15" - resolved "https://registry.yarnpkg.com/phantomjs-prebuilt/-/phantomjs-prebuilt-2.1.15.tgz#20f86e82d3349c505917527745b7a411e08b3903" - dependencies: - es6-promise "~4.0.3" - extract-zip "~1.6.5" - fs-extra "~1.0.0" - hasha "~2.2.0" - kew "~0.7.0" - progress "~1.1.8" - request "~2.81.0" - request-progress "~2.0.1" - which "~1.2.10" - -pify@^2.0.0, pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0, pinkie@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - -portfinder@^1.0.9, portfinder@~1.0.12: - version "1.0.13" - resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.13.tgz#bb32ecd87c27104ae6ee44b5a3ccbf0ebb1aede9" - dependencies: - async "^1.5.2" - debug "^2.2.0" - mkdirp "0.5.x" - -postcss-calc@^5.2.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-5.3.1.tgz#77bae7ca928ad85716e2fda42f261bf7c1d65b5e" - dependencies: - postcss "^5.0.2" - postcss-message-helpers "^2.0.0" - reduce-css-calc "^1.2.6" - -postcss-colormin@^2.1.8: - version "2.2.2" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-2.2.2.tgz#6631417d5f0e909a3d7ec26b24c8a8d1e4f96e4b" - dependencies: - colormin "^1.0.5" - postcss "^5.0.13" - postcss-value-parser "^3.2.3" - -postcss-convert-values@^2.3.4: - version "2.6.1" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-2.6.1.tgz#bbd8593c5c1fd2e3d1c322bb925dcae8dae4d62d" - dependencies: - postcss "^5.0.11" - postcss-value-parser "^3.1.2" - -postcss-discard-comments@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-2.0.4.tgz#befe89fafd5b3dace5ccce51b76b81514be00e3d" - dependencies: - postcss "^5.0.14" - -postcss-discard-duplicates@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-2.1.0.tgz#b9abf27b88ac188158a5eb12abcae20263b91932" - dependencies: - postcss "^5.0.4" - -postcss-discard-empty@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-2.1.0.tgz#d2b4bd9d5ced5ebd8dcade7640c7d7cd7f4f92b5" - dependencies: - postcss "^5.0.14" - -postcss-discard-overridden@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-0.1.1.tgz#8b1eaf554f686fb288cd874c55667b0aa3668d58" - dependencies: - postcss "^5.0.16" - -postcss-discard-unused@^2.2.1: - version "2.2.3" - resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-2.2.3.tgz#bce30b2cc591ffc634322b5fb3464b6d934f4433" - dependencies: - postcss "^5.0.14" - uniqs "^2.0.0" - -postcss-filter-plugins@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/postcss-filter-plugins/-/postcss-filter-plugins-2.0.2.tgz#6d85862534d735ac420e4a85806e1f5d4286d84c" - dependencies: - postcss "^5.0.4" - uniqid "^4.0.0" - -postcss-load-config@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-1.2.0.tgz#539e9afc9ddc8620121ebf9d8c3673e0ce50d28a" - dependencies: - cosmiconfig "^2.1.0" - object-assign "^4.1.0" - postcss-load-options "^1.2.0" - postcss-load-plugins "^2.3.0" - -postcss-load-options@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-load-options/-/postcss-load-options-1.2.0.tgz#b098b1559ddac2df04bc0bb375f99a5cfe2b6d8c" - dependencies: - cosmiconfig "^2.1.0" - object-assign "^4.1.0" - -postcss-load-plugins@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/postcss-load-plugins/-/postcss-load-plugins-2.3.0.tgz#745768116599aca2f009fad426b00175049d8d92" - dependencies: - cosmiconfig "^2.1.1" - object-assign "^4.1.0" - -postcss-loader@^1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-1.3.3.tgz#a621ea1fa29062a83972a46f54486771301916eb" - dependencies: - loader-utils "^1.0.2" - object-assign "^4.1.1" - postcss "^5.2.15" - postcss-load-config "^1.2.0" - -postcss-merge-idents@^2.1.5: - version "2.1.7" - resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-2.1.7.tgz#4c5530313c08e1d5b3bbf3d2bbc747e278eea270" - dependencies: - has "^1.0.1" - postcss "^5.0.10" - postcss-value-parser "^3.1.1" - -postcss-merge-longhand@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-2.0.2.tgz#23d90cd127b0a77994915332739034a1a4f3d658" - dependencies: - postcss "^5.0.4" - -postcss-merge-rules@^2.0.3: - version "2.1.2" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-2.1.2.tgz#d1df5dfaa7b1acc3be553f0e9e10e87c61b5f721" - dependencies: - browserslist "^1.5.2" - caniuse-api "^1.5.2" - postcss "^5.0.4" - postcss-selector-parser "^2.2.2" - vendors "^1.0.0" - -postcss-message-helpers@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-message-helpers/-/postcss-message-helpers-2.0.0.tgz#a4f2f4fab6e4fe002f0aed000478cdf52f9ba60e" - -postcss-minify-font-values@^1.0.2: - version "1.0.5" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-1.0.5.tgz#4b58edb56641eba7c8474ab3526cafd7bbdecb69" - dependencies: - object-assign "^4.0.1" - postcss "^5.0.4" - postcss-value-parser "^3.0.2" - -postcss-minify-gradients@^1.0.1: - version "1.0.5" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-1.0.5.tgz#5dbda11373703f83cfb4a3ea3881d8d75ff5e6e1" - dependencies: - postcss "^5.0.12" - postcss-value-parser "^3.3.0" - -postcss-minify-params@^1.0.4: - version "1.2.2" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-1.2.2.tgz#ad2ce071373b943b3d930a3fa59a358c28d6f1f3" - dependencies: - alphanum-sort "^1.0.1" - postcss "^5.0.2" - postcss-value-parser "^3.0.2" - uniqs "^2.0.0" - -postcss-minify-selectors@^2.0.4: - version "2.1.1" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-2.1.1.tgz#b2c6a98c0072cf91b932d1a496508114311735bf" - dependencies: - alphanum-sort "^1.0.2" - has "^1.0.1" - postcss "^5.0.14" - postcss-selector-parser "^2.0.0" - -postcss-modules-extract-imports@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.0.tgz#66140ecece38ef06bf0d3e355d69bf59d141ea85" - dependencies: - postcss "^6.0.1" - -postcss-modules-local-by-default@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069" - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" - -postcss-modules-scope@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90" - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" - -postcss-modules-values@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20" - dependencies: - icss-replace-symbols "^1.1.0" - postcss "^6.0.1" - -postcss-normalize-charset@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-1.1.1.tgz#ef9ee71212d7fe759c78ed162f61ed62b5cb93f1" - dependencies: - postcss "^5.0.5" - -postcss-normalize-url@^3.0.7: - version "3.0.8" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-3.0.8.tgz#108f74b3f2fcdaf891a2ffa3ea4592279fc78222" - dependencies: - is-absolute-url "^2.0.0" - normalize-url "^1.4.0" - postcss "^5.0.14" - postcss-value-parser "^3.2.3" - -postcss-ordered-values@^2.1.0: - version "2.2.3" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-2.2.3.tgz#eec6c2a67b6c412a8db2042e77fe8da43f95c11d" - dependencies: - postcss "^5.0.4" - postcss-value-parser "^3.0.1" - -postcss-reduce-idents@^2.2.2: - version "2.4.0" - resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-2.4.0.tgz#c2c6d20cc958284f6abfbe63f7609bf409059ad3" - dependencies: - postcss "^5.0.4" - postcss-value-parser "^3.0.2" - -postcss-reduce-initial@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-1.0.1.tgz#68f80695f045d08263a879ad240df8dd64f644ea" - dependencies: - postcss "^5.0.4" - -postcss-reduce-transforms@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-1.0.4.tgz#ff76f4d8212437b31c298a42d2e1444025771ae1" - dependencies: - has "^1.0.1" - postcss "^5.0.8" - postcss-value-parser "^3.0.1" - -postcss-selector-parser@^2.0.0, postcss-selector-parser@^2.2.2: - version "2.2.3" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz#f9437788606c3c9acee16ffe8d8b16297f27bb90" - dependencies: - flatten "^1.0.2" - indexes-of "^1.0.1" - uniq "^1.0.1" - -postcss-svgo@^2.1.1: - version "2.1.6" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-2.1.6.tgz#b6df18aa613b666e133f08adb5219c2684ac108d" - dependencies: - is-svg "^2.0.0" - postcss "^5.0.14" - postcss-value-parser "^3.2.3" - svgo "^0.7.0" - -postcss-unique-selectors@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-2.0.2.tgz#981d57d29ddcb33e7b1dfe1fd43b8649f933ca1d" - dependencies: - alphanum-sort "^1.0.1" - postcss "^5.0.4" - uniqs "^2.0.0" - -postcss-url@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/postcss-url/-/postcss-url-5.1.2.tgz#98b3165be8d592471cb0caadde2c0d1f832f133e" - dependencies: - directory-encoder "^0.7.2" - js-base64 "^2.1.5" - mime "^1.2.11" - minimatch "^3.0.0" - mkdirp "^0.5.0" - path-is-absolute "^1.0.0" - postcss "^5.0.0" - -postcss-value-parser@^3.0.1, postcss-value-parser@^3.0.2, postcss-value-parser@^3.1.1, postcss-value-parser@^3.1.2, postcss-value-parser@^3.2.3, postcss-value-parser@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15" - -postcss-zindex@^2.0.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-2.2.0.tgz#d2109ddc055b91af67fc4cb3b025946639d2af22" - dependencies: - has "^1.0.1" - postcss "^5.0.4" - uniqs "^2.0.0" - -postcss@^5.0.0, postcss@^5.0.10, postcss@^5.0.11, postcss@^5.0.12, postcss@^5.0.13, postcss@^5.0.14, postcss@^5.0.16, postcss@^5.0.2, postcss@^5.0.4, postcss@^5.0.5, postcss@^5.0.6, postcss@^5.0.8, postcss@^5.2.15, postcss@^5.2.16: - version "5.2.17" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.17.tgz#cf4f597b864d65c8a492b2eabe9d706c879c388b" - dependencies: - chalk "^1.1.3" - js-base64 "^2.1.9" - source-map "^0.5.6" - supports-color "^3.2.3" - -postcss@^6.0.1: - version "6.0.11" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.11.tgz#f48db210b1d37a7f7ab6499b7a54982997ab6f72" - dependencies: - chalk "^2.1.0" - source-map "^0.5.7" - supports-color "^4.4.0" - -prepend-http@^1.0.0, prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - -preserve@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" - -pretty-error@^2.0.2: - version "2.1.1" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" - dependencies: - renderkid "^2.0.1" - utila "~0.4" - -process-nextick-args@~1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" - -process@^0.11.0: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - -progress@~1.1.8: - version "1.1.8" - resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be" - -promise@^7.1.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" - dependencies: - asap "~2.0.3" - -protractor@~5.1.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/protractor/-/protractor-5.1.2.tgz#9b221741709a4c62d5cd53c6aadd54a71137e95f" - dependencies: - "@types/node" "^6.0.46" - "@types/q" "^0.0.32" - "@types/selenium-webdriver" "~2.53.39" - blocking-proxy "0.0.5" - chalk "^1.1.3" - glob "^7.0.3" - jasmine "^2.5.3" - jasminewd2 "^2.1.0" - optimist "~0.6.0" - q "1.4.1" - saucelabs "~1.3.0" - selenium-webdriver "3.0.1" - source-map-support "~0.4.0" - webdriver-js-extender "^1.0.0" - webdriver-manager "^12.0.6" - -proxy-addr@~1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-1.1.5.tgz#71c0ee3b102de3f202f3b64f608d173fcba1a918" - dependencies: - forwarded "~0.1.0" - ipaddr.js "1.4.0" - -prr@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" - -pseudomap@^1.0.1, pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - -public-encrypt@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.0.tgz#39f699f3a46560dd5ebacbca693caf7c65c18cc6" - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - -punycode@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - -punycode@^1.2.4, punycode@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - -q@1.4.1, q@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/q/-/q-1.4.1.tgz#55705bcd93c5f3673530c2c2cbc0c2b3addc286e" - -q@^1.1.2: - version "1.5.0" - resolved "https://registry.yarnpkg.com/q/-/q-1.5.0.tgz#dd01bac9d06d30e6f219aecb8253ee9ebdc308f1" - -qjobs@^1.1.4: - version "1.1.5" - resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.1.5.tgz#659de9f2cf8dcc27a1481276f205377272382e73" - -qs@6.4.0, qs@~6.4.0: - version "6.4.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" - -qs@6.5.0: - version "6.5.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.0.tgz#8d04954d364def3efc55b5a0793e1e2c8b1e6e49" - -query-string@^4.1.0: - version "4.3.4" - resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb" - dependencies: - object-assign "^4.1.0" - strict-uri-encode "^1.0.0" - -querystring-es3@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - -querystringify@0.0.x: - version "0.0.4" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-0.0.4.tgz#0cf7f84f9463ff0ae51c4c4b142d95be37724d9c" - -querystringify@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-1.0.0.tgz#6286242112c5b712fa654e526652bf6a13ff05cb" - -randomatic@^1.1.3: - version "1.1.6" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" - dependencies: - is-number "^2.0.2" - kind-of "^3.0.2" - -randomatic@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.0.0.tgz#d35490030eb4f7578de292ce6dfb04a91a128923" - dependencies: - is-number "^4.0.0" - kind-of "^6.0.0" - math-random "^1.0.1" - -randombytes@^2.0.0, randombytes@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.0.5.tgz#dc009a246b8d09a177b4b7a0ae77bc570f4b1b79" - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.0.3, range-parser@^1.2.0, range-parser@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e" - -raw-body@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.2.0.tgz#994976cf6a5096a41162840492f0bdc5d6e7fb96" - dependencies: - bytes "2.4.0" - iconv-lite "0.4.15" - unpipe "1.0.0" - -raw-loader@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-0.5.1.tgz#0c3d0beaed8a01c966d9787bf778281252a979aa" - -rc@^1.0.1, rc@^1.1.6, rc@^1.1.7: - version "1.2.1" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.1.tgz#2e03e8e42ee450b8cb3dce65be1bf8974e1dfd95" - dependencies: - deep-extend "~0.4.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -read-pkg-up@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" - dependencies: - find-up "^1.0.0" - read-pkg "^1.0.0" - -read-pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" - dependencies: - find-up "^2.0.0" - read-pkg "^2.0.0" - -read-pkg@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" - dependencies: - load-json-file "^1.0.0" - normalize-package-data "^2.3.2" - path-type "^1.0.0" - -read-pkg@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" - dependencies: - load-json-file "^2.0.0" - normalize-package-data "^2.3.2" - path-type "^2.0.0" - -readable-stream@1.0, readable-stream@~1.0.2: - version "1.0.34" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - -readable-stream@^2.0.1, readable-stream@^2.0.5, readable-stream@^2.2.2, readable-stream@^2.2.6, readable-stream@^2.2.9: - version "2.3.3" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~1.0.6" - safe-buffer "~5.1.1" - string_decoder "~1.0.3" - util-deprecate "~1.0.1" - -readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4: - version "2.2.9" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.9.tgz#cf78ec6f4a6d1eb43d26488cac97f042e74b7fc8" - dependencies: - buffer-shims "~1.0.0" - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "~1.0.0" - process-nextick-args "~1.0.6" - string_decoder "~1.0.0" - util-deprecate "~1.0.1" - -readdirp@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" - dependencies: - graceful-fs "^4.1.2" - minimatch "^3.0.2" - readable-stream "^2.0.2" - set-immediate-shim "^1.0.1" - -redent@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" - dependencies: - indent-string "^2.1.0" - strip-indent "^1.0.1" - -reduce-css-calc@^1.2.6: - version "1.3.0" - resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz#747c914e049614a4c9cfbba629871ad1d2927716" - dependencies: - balanced-match "^0.4.2" - math-expression-evaluator "^1.2.14" - reduce-function-call "^1.0.1" - -reduce-function-call@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/reduce-function-call/-/reduce-function-call-1.0.2.tgz#5a200bf92e0e37751752fe45b0ab330fd4b6be99" - dependencies: - balanced-match "^0.4.2" - -reflect-metadata@^0.1.2: - version "0.1.10" - resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.10.tgz#b4f83704416acad89988c9b15635d47e03b9344a" - -regenerate@^1.2.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.2.tgz#d1941c67bad437e1be76433add5b385f95b19260" - -regenerator-runtime@^0.10.0: - version "0.10.5" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658" - -regenerator-runtime@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.0.tgz#7e54fe5b5ccd5d6624ea6255c3473be090b802e1" - -regex-cache@^0.4.2: - version "0.4.3" - resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145" - dependencies: - is-equal-shallow "^0.1.3" - is-primitive "^2.0.0" - -regexpu-core@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-1.0.0.tgz#86a763f58ee4d7c2f6b102e4764050de7ed90c6b" - dependencies: - regenerate "^1.2.1" - regjsgen "^0.2.0" - regjsparser "^0.1.4" - -registry-auth-token@^3.0.1: - version "3.3.1" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.3.1.tgz#fb0d3289ee0d9ada2cbb52af5dfe66cb070d3006" - dependencies: - rc "^1.1.6" - safe-buffer "^5.0.1" - -registry-url@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" - dependencies: - rc "^1.0.1" - -regjsgen@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" - -regjsparser@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" - dependencies: - jsesc "~0.5.0" - -relateurl@0.2.x: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - -remove-trailing-separator@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.1.tgz#615ebb96af559552d4bf4057c8436d486ab63cc4" - -renderkid@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.1.tgz#898cabfc8bede4b7b91135a3ffd323e58c0db319" - dependencies: - css-select "^1.1.0" - dom-converter "~0.1" - htmlparser2 "~3.3.0" - strip-ansi "^3.0.0" - utila "~0.3" - -repeat-element@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" - -repeat-string@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-0.2.2.tgz#c7a8d3236068362059a7e4651fc6884e8b1fb4ae" - -repeat-string@^1.5.2: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - -repeating@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" - dependencies: - is-finite "^1.0.0" - -request-progress@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/request-progress/-/request-progress-2.0.1.tgz#5d36bb57961c673aa5b788dbc8141fdf23b44e08" - dependencies: - throttleit "^1.0.0" - -request@2, request@^2.72.0, request@^2.78.0, request@^2.79.0, request@^2.81.0, request@~2.81.0: - version "2.81.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" - dependencies: - aws-sign2 "~0.6.0" - aws4 "^1.2.1" - caseless "~0.12.0" - combined-stream "~1.0.5" - extend "~3.0.0" - forever-agent "~0.6.1" - form-data "~2.1.1" - har-validator "~4.2.1" - hawk "~3.1.3" - http-signature "~1.1.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.7" - oauth-sign "~0.8.1" - performance-now "^0.2.0" - qs "~6.4.0" - safe-buffer "^5.0.1" - stringstream "~0.0.4" - tough-cookie "~2.3.0" - tunnel-agent "^0.6.0" - uuid "^3.0.0" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - -require-from-string@^1.1.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-1.2.1.tgz#529c9ccef27380adfec9a2f965b649bbee636418" - -require-main-filename@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" - -requires-port@1.0.x, requires-port@1.x.x: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - -resolve@^1.1.6, resolve@^1.1.7: - version "1.3.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5" - dependencies: - path-parse "^1.0.5" - -right-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" - dependencies: - align-text "^0.1.1" - -rimraf@2, rimraf@^2.2.8, rimraf@^2.3.3, rimraf@^2.5.1, rimraf@^2.5.2, rimraf@^2.5.4, rimraf@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" - dependencies: - glob "^7.0.5" - -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7" - dependencies: - hash-base "^2.0.0" - inherits "^2.0.1" - -rsvp@~3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-3.2.1.tgz#07cb4a5df25add9e826ebc67dcc9fd89db27d84a" - -rw@1: - version "1.3.3" - resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" - -rxjs@^5.1.0: - version "5.4.0" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.0.tgz#a7db14ab157f9d7aac6a56e655e7a3860d39bf26" - dependencies: - symbol-observable "^1.0.1" - -rxjs@^5.4.2, rxjs@^5.4.3: - version "5.4.3" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.3.tgz#0758cddee6033d68e0fd53676f0f3596ce3d483f" - dependencies: - symbol-observable "^1.0.1" - -safe-buffer@5.1.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" - -safe-buffer@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" - -sass-graph@^2.1.1: - version "2.2.4" - resolved "https://registry.yarnpkg.com/sass-graph/-/sass-graph-2.2.4.tgz#13fbd63cd1caf0908b9fd93476ad43a51d1e0b49" - dependencies: - glob "^7.0.0" - lodash "^4.0.0" - scss-tokenizer "^0.2.3" - yargs "^7.0.0" - -sass-loader@^6.0.3: - version "6.0.6" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-6.0.6.tgz#e9d5e6c1f155faa32a4b26d7a9b7107c225e40f9" - dependencies: - async "^2.1.5" - clone-deep "^0.3.0" - loader-utils "^1.0.1" - lodash.tail "^4.1.1" - pify "^3.0.0" - -saucelabs@~1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/saucelabs/-/saucelabs-1.3.0.tgz#d240e8009df7fa87306ec4578a69ba3b5c424fee" - dependencies: - https-proxy-agent "^1.0.0" - -sax@0.5.x: - version "0.5.8" - resolved "https://registry.yarnpkg.com/sax/-/sax-0.5.8.tgz#d472db228eb331c2506b0e8c15524adb939d12c1" - -sax@0.6.x: - version "0.6.1" - resolved "https://registry.yarnpkg.com/sax/-/sax-0.6.1.tgz#563b19c7c1de892e09bfc4f2fc30e3c27f0952b9" - -sax@>=0.6.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828" - -sax@~1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - -schema-utils@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf" - dependencies: - ajv "^5.0.0" - -scss-tokenizer@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/scss-tokenizer/-/scss-tokenizer-0.2.3.tgz#8eb06db9a9723333824d3f5530641149847ce5d1" - dependencies: - js-base64 "^2.1.8" - source-map "^0.4.2" - -select-hose@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - -selenium-webdriver@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/selenium-webdriver/-/selenium-webdriver-3.0.1.tgz#a2dea5da4a97f6672e89e7ca7276cefa365147a7" - dependencies: - adm-zip "^0.4.7" - rimraf "^2.5.4" - tmp "0.0.30" - xml2js "^0.4.17" - -selenium-webdriver@^2.53.2: - version "2.53.3" - resolved "https://registry.yarnpkg.com/selenium-webdriver/-/selenium-webdriver-2.53.3.tgz#d29ff5a957dff1a1b49dc457756e4e4bfbdce085" - dependencies: - adm-zip "0.4.4" - rimraf "^2.2.8" - tmp "0.0.24" - ws "^1.0.1" - xml2js "0.4.4" - -selfsigned@^1.9.1: - version "1.10.1" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.1.tgz#bf8cb7b83256c4551e31347c6311778db99eec52" - dependencies: - node-forge "0.6.33" - -semver-diff@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" - dependencies: - semver "^5.0.3" - -semver-dsl@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/semver-dsl/-/semver-dsl-1.0.1.tgz#d3678de5555e8a61f629eed025366ae5f27340a0" - dependencies: - semver "^5.3.0" - -"semver@2 || 3 || 4 || 5": - version "5.4.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" - -semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@~5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" - -semver@~4.3.3: - version "4.3.6" - resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da" - -semver@~5.0.1: - version "5.0.3" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.0.3.tgz#77466de589cd5d3c95f138aa78bc569a3cb5d27a" - -send@0.15.4: - version "0.15.4" - resolved "https://registry.yarnpkg.com/send/-/send-0.15.4.tgz#985faa3e284b0273c793364a35c6737bd93905b9" - dependencies: - debug "2.6.8" - depd "~1.1.1" - destroy "~1.0.4" - encodeurl "~1.0.1" - escape-html "~1.0.3" - etag "~1.8.0" - fresh "0.5.0" - http-errors "~1.6.2" - mime "1.3.4" - ms "2.0.0" - on-finished "~2.3.0" - range-parser "~1.2.0" - statuses "~1.3.1" - -serve-index@^1.7.2: - version "1.9.0" - resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.0.tgz#d2b280fc560d616ee81b48bf0fa82abed2485ce7" - dependencies: - accepts "~1.3.3" - batch "0.6.1" - debug "2.6.8" - escape-html "~1.0.3" - http-errors "~1.6.1" - mime-types "~2.1.15" - parseurl "~1.3.1" - -serve-static@1.12.4: - version "1.12.4" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.12.4.tgz#9b6aa98eeb7253c4eedc4c1f6fdbca609901a961" - dependencies: - encodeurl "~1.0.1" - escape-html "~1.0.3" - parseurl "~1.3.1" - send "0.15.4" - -set-blocking@^2.0.0, set-blocking@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - -set-immediate-shim@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" - -setimmediate@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - -setprototypeof@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04" - -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.8" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.8.tgz#37068c2c476b6baf402d14a49c67f597921f634f" - dependencies: - inherits "^2.0.1" - -shallow-clone@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-0.1.2.tgz#5909e874ba77106d73ac414cfec1ffca87d97060" - dependencies: - is-extendable "^0.1.1" - kind-of "^2.0.1" - lazy-cache "^0.2.3" - mixin-object "^2.0.1" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - dependencies: - shebang-regex "^1.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - -signal-exit@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" - -silent-error@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/silent-error/-/silent-error-1.1.0.tgz#2209706f1c850a9f1d10d0d840918b46f26e1bc9" - dependencies: - debug "^2.2.0" - -slide@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" - -sntp@1.x.x: - version "1.0.9" - resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" - dependencies: - hoek "2.x.x" - -socket.io-adapter@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-0.5.0.tgz#cb6d4bb8bec81e1078b99677f9ced0046066bb8b" - dependencies: - debug "2.3.3" - socket.io-parser "2.3.1" - -socket.io-client@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-1.7.2.tgz#39fdb0c3dd450e321b7e40cfd83612ec533dd644" - dependencies: - backo2 "1.0.2" - component-bind "1.0.0" - component-emitter "1.2.1" - debug "2.3.3" - engine.io-client "1.8.2" - has-binary "0.1.7" - indexof "0.0.1" - object-component "0.0.3" - parseuri "0.0.5" - socket.io-parser "2.3.1" - to-array "0.1.4" - -socket.io-parser@2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-2.3.1.tgz#dd532025103ce429697326befd64005fcfe5b4a0" - dependencies: - component-emitter "1.1.2" - debug "2.2.0" - isarray "0.0.1" - json3 "3.3.2" - -socket.io@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-1.7.2.tgz#83bbbdf2e79263b378900da403e7843e05dc3b71" - dependencies: - debug "2.3.3" - engine.io "1.8.2" - has-binary "0.1.7" - object-assign "4.1.0" - socket.io-adapter "0.5.0" - socket.io-client "1.7.2" - socket.io-parser "2.3.1" - -sockjs-client@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.1.4.tgz#5babe386b775e4cf14e7520911452654016c8b12" - dependencies: - debug "^2.6.6" - eventsource "0.1.6" - faye-websocket "~0.11.0" - inherits "^2.0.1" - json3 "^3.3.2" - url-parse "^1.1.8" - -sockjs@0.3.18: - version "0.3.18" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.18.tgz#d9b289316ca7df77595ef299e075f0f937eb4207" - dependencies: - faye-websocket "^0.10.0" - uuid "^2.0.2" - -sort-keys@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" - dependencies: - is-plain-obj "^1.0.0" - -source-list-map@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.0.tgz#aaa47403f7b245a92fbc97ea08f250d6087ed085" - -source-map-loader@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-0.2.1.tgz#48126be9230bd47fad05e46a8c3c2e3d2dabe507" - dependencies: - async "^0.9.0" - loader-utils "~0.2.2" - source-map "~0.1.33" - -source-map-support@^0.4.0, source-map-support@^0.4.2, source-map-support@~0.4.0: - version "0.4.15" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.15.tgz#03202df65c06d2bd8c7ec2362a193056fef8d3b1" - dependencies: - source-map "^0.5.6" - -source-map-support@^0.4.1: - version "0.4.17" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.17.tgz#6f2150553e6375375d0ccb3180502b78c18ba430" - dependencies: - source-map "^0.5.6" - -source-map@0.1.x, source-map@~0.1.33, source-map@~0.1.7: - version "0.1.43" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346" - dependencies: - amdefine ">=0.0.4" - -source-map@0.5.x, source-map@^0.5.7, source-map@~0.5.3: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - -source-map@^0.4.2, source-map@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" - dependencies: - amdefine ">=0.0.4" - -source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" - -spdx-correct@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" - dependencies: - spdx-license-ids "^1.0.2" - -spdx-expression-parse@~1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" - -spdx-license-ids@^1.0.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" - -spdy-transport@^2.0.18: - version "2.0.20" - resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-2.0.20.tgz#735e72054c486b2354fe89e702256004a39ace4d" - dependencies: - debug "^2.6.8" - detect-node "^2.0.3" - hpack.js "^2.1.6" - obuf "^1.1.1" - readable-stream "^2.2.9" - safe-buffer "^5.0.1" - wbuf "^1.7.2" - -spdy@^3.4.1: - version "3.4.7" - resolved "https://registry.yarnpkg.com/spdy/-/spdy-3.4.7.tgz#42ff41ece5cc0f99a3a6c28aabb73f5c3b03acbc" - dependencies: - debug "^2.6.8" - handle-thing "^1.2.5" - http-deceiver "^1.2.7" - safe-buffer "^5.0.1" - select-hose "^2.0.0" - spdy-transport "^2.0.18" - -sprintf-js@^1.0.3: - version "1.1.1" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.1.tgz#36be78320afe5801f6cea3ee78b6e5aab940ea0c" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - -sshpk@^1.7.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.0.tgz#ff2a3e4fd04497555fed97b39a0fd82fafb3a33c" - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - dashdash "^1.12.0" - getpass "^0.1.1" - optionalDependencies: - bcrypt-pbkdf "^1.0.0" - ecc-jsbn "~0.1.1" - jodid25519 "^1.0.0" - jsbn "~0.1.0" - tweetnacl "~0.14.0" - -"statuses@>= 1.3.1 < 2", statuses@~1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e" - -stdout-stream@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/stdout-stream/-/stdout-stream-1.4.0.tgz#a2c7c8587e54d9427ea9edb3ac3f2cd522df378b" - dependencies: - readable-stream "^2.0.1" - -stream-browserify@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db" - dependencies: - inherits "~2.0.1" - readable-stream "^2.0.2" - -stream-http@^2.3.1: - version "2.7.2" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.7.2.tgz#40a050ec8dc3b53b33d9909415c02c0bf1abfbad" - dependencies: - builtin-status-codes "^3.0.0" - inherits "^2.0.1" - readable-stream "^2.2.6" - to-arraybuffer "^1.0.0" - xtend "^4.0.0" - -strict-uri-encode@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" - -string-width@^1.0.1, string-width@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.0.0.tgz#635c5436cc72a6e0c387ceca278d4e2eec52687e" - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^3.0.0" - -string_decoder@^0.10.25, string_decoder@~0.10.x: - version "0.10.31" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" - -string_decoder@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.1.tgz#62e200f039955a6810d8df0a33ffc0f013662d98" - dependencies: - safe-buffer "^5.0.1" - -string_decoder@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" - dependencies: - safe-buffer "~5.1.0" - -stringstream@~0.0.4: - version "0.0.5" - resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" - -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - dependencies: - ansi-regex "^2.0.0" - -strip-bom@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" - dependencies: - is-utf8 "^0.2.0" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - -strip-indent@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" - dependencies: - get-stdin "^4.0.1" - -strip-json-comments@^2.0.0, strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - -style-loader@^0.13.1: - version "0.13.2" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.13.2.tgz#74533384cf698c7104c7951150b49717adc2f3bb" - dependencies: - loader-utils "^1.0.2" - -stylus-loader@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/stylus-loader/-/stylus-loader-3.0.1.tgz#77f4b34fd030d25b2617bcf5513db5b0730c4089" - dependencies: - loader-utils "^1.0.2" - lodash.clonedeep "^4.5.0" - when "~3.6.x" - -stylus@^0.54.5: - version "0.54.5" - resolved "https://registry.yarnpkg.com/stylus/-/stylus-0.54.5.tgz#42b9560931ca7090ce8515a798ba9e6aa3d6dc79" - dependencies: - css-parse "1.7.x" - debug "*" - glob "7.0.x" - mkdirp "0.5.x" - sax "0.5.x" - source-map "0.1.x" - -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - -supports-color@^3.1.1, supports-color@^3.1.2, supports-color@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" - dependencies: - has-flag "^1.0.0" - -supports-color@^4.0.0, supports-color@^4.2.1, supports-color@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.4.0.tgz#883f7ddabc165142b2a61427f3352ded195d1a3e" - dependencies: - has-flag "^2.0.0" - -svgo@^0.7.0: - version "0.7.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-0.7.2.tgz#9f5772413952135c6fefbf40afe6a4faa88b4bb5" - dependencies: - coa "~1.0.1" - colors "~1.1.2" - csso "~2.3.1" - js-yaml "~3.7.0" - mkdirp "~0.5.1" - sax "~1.2.1" - whet.extend "~0.9.9" - -symbol-observable@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.4.tgz#29bf615d4aa7121bdd898b22d4b3f9bc4e2aa03d" - -tapable@^0.2.7: - version "0.2.8" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.8.tgz#99372a5c999bf2df160afc0d74bed4f47948cd22" - -tar-pack@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.0.tgz#23be2d7f671a8339376cbdb0b8fe3fdebf317984" - dependencies: - debug "^2.2.0" - fstream "^1.0.10" - fstream-ignore "^1.0.5" - once "^1.3.3" - readable-stream "^2.1.4" - rimraf "^2.5.1" - tar "^2.2.1" - uid-number "^0.0.6" - -tar@^2.0.0, tar@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" - dependencies: - block-stream "*" - fstream "^1.0.2" - inherits "2" - -term-size@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/term-size/-/term-size-0.1.1.tgz#87360b96396cab5760963714cda0d0cbeecad9ca" - dependencies: - execa "^0.4.0" - -throttleit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-1.0.0.tgz#9e785836daf46743145a5984b6268d828528ac6c" - -through@X.X.X: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - -thunky@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/thunky/-/thunky-0.1.0.tgz#bf30146824e2b6e67b0f2d7a4ac8beb26908684e" - -time-stamp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-2.0.0.tgz#95c6a44530e15ba8d6f4a3ecb8c3a3fac46da357" - -timed-out@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" - -timers-browserify@^2.0.2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.4.tgz#96ca53f4b794a5e7c0e1bd7cc88a372298fa01e6" - dependencies: - setimmediate "^1.0.4" - -tmp@0.0.24: - version "0.0.24" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.24.tgz#d6a5e198d14a9835cc6f2d7c3d9e302428c8cf12" - -tmp@0.0.28, tmp@0.0.x: - version "0.0.28" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.28.tgz#172735b7f614ea7af39664fa84cf0de4e515d120" - dependencies: - os-tmpdir "~1.0.1" - -tmp@0.0.30: - version "0.0.30" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.30.tgz#72419d4a8be7d6ce75148fd8b324e593a711c2ed" - dependencies: - os-tmpdir "~1.0.1" - -to-array@0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" - -to-arraybuffer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" - -to-fast-properties@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" - -toposort@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.3.tgz#f02cd8a74bd8be2fc0e98611c3bacb95a171869c" - -tough-cookie@~2.3.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" - dependencies: - punycode "^1.4.1" - -trim-newlines@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" - -trim-right@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" - -ts-node@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-2.0.0.tgz#16e4fecc949088238b4cbf1c39c9582526b66f74" - dependencies: - arrify "^1.0.0" - chalk "^1.1.1" - diff "^3.1.0" - make-error "^1.1.1" - minimist "^1.2.0" - mkdirp "^0.5.1" - pinkie "^2.0.4" - source-map-support "^0.4.0" - tsconfig "^5.0.2" - v8flags "^2.0.11" - xtend "^4.0.0" - yn "^1.2.0" - -tsconfig@^5.0.2: - version "5.0.3" - resolved "https://registry.yarnpkg.com/tsconfig/-/tsconfig-5.0.3.tgz#5f4278e701800967a8fc383fd19648878f2a6e3a" - dependencies: - any-promise "^1.3.0" - parse-json "^2.2.0" - strip-bom "^2.0.0" - strip-json-comments "^2.0.0" - -tsickle@^0.21.0: - version "0.21.6" - resolved "https://registry.yarnpkg.com/tsickle/-/tsickle-0.21.6.tgz#53b01b979c5c13fdb13afb3fb958177e5991588d" - dependencies: - minimist "^1.2.0" - mkdirp "^0.5.1" - source-map "^0.5.6" - source-map-support "^0.4.2" - -tslib@^1.7.1: - version "1.7.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.7.1.tgz#bc8004164691923a79fe8378bbeb3da2017538ec" - -tslint@~4.5.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/tslint/-/tslint-4.5.1.tgz#05356871bef23a434906734006fc188336ba824b" - dependencies: - babel-code-frame "^6.20.0" - colors "^1.1.2" - diff "^3.0.1" - findup-sync "~0.3.0" - glob "^7.1.1" - optimist "~0.6.0" - resolve "^1.1.7" - tsutils "^1.1.0" - update-notifier "^2.0.0" - -tsutils@^1.1.0: - version "1.9.1" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-1.9.1.tgz#b9f9ab44e55af9681831d5f28d0aeeaf5c750cb0" - -tty-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - -type-is@~1.6.15: - version "1.6.15" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.15.tgz#cab10fb4909e441c82842eafe1ad646c81804410" - dependencies: - media-typer "0.3.0" - mime-types "~2.1.15" - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - -"typescript@>=2.0.0 <2.6.0", typescript@^2.3.3: - version "2.5.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.5.2.tgz#038a95f7d9bbb420b1bf35ba31d4c5c1dd3ffe34" - -typescript@~2.5.0: - version "2.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.5.3.tgz#df3dcdc38f3beb800d4bc322646b04a3f6ca7f0d" - -uglify-js@3.0.x: - version "3.0.28" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.0.28.tgz#96b8495f0272944787b5843a1679aa326640d5f7" - dependencies: - commander "~2.11.0" - source-map "~0.5.1" - -uglify-js@^2.6: - version "2.8.27" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.27.tgz#47787f912b0f242e5b984343be8e35e95f694c9c" - dependencies: - source-map "~0.5.1" - yargs "~3.10.0" - optionalDependencies: - uglify-to-browserify "~1.0.0" - -uglify-js@^2.8.29: - version "2.8.29" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" - dependencies: - source-map "~0.5.1" - yargs "~3.10.0" - optionalDependencies: - uglify-to-browserify "~1.0.0" - -uglify-js@~2.3: - version "2.3.6" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.3.6.tgz#fa0984770b428b7a9b2a8058f46355d14fef211a" - dependencies: - async "~0.2.6" - optimist "~0.3.5" - source-map "~0.1.7" - -uglify-to-browserify@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" - -uglifyjs-webpack-plugin@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz#b951f4abb6bd617e66f63eb891498e391763e309" - dependencies: - source-map "^0.5.6" - uglify-js "^2.8.29" - webpack-sources "^1.0.1" - -uid-number@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" - -ultron@1.0.x: - version "1.0.2" - resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa" - -uniq@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" - -uniqid@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/uniqid/-/uniqid-4.1.1.tgz#89220ddf6b751ae52b5f72484863528596bb84c1" - dependencies: - macaddress "^0.2.8" - -uniqs@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" - -unique-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" - dependencies: - crypto-random-string "^1.0.0" - -universalify@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.1.tgz#fa71badd4437af4c148841e3b3b165f9e9e590b7" - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - -unzip-response@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97" - -update-notifier@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-2.1.0.tgz#ec0c1e53536b76647a24b77cb83966d9315123d9" - dependencies: - boxen "^1.0.0" - chalk "^1.0.0" - configstore "^3.0.0" - is-npm "^1.0.0" - latest-version "^3.0.0" - lazy-req "^2.0.0" - semver-diff "^2.0.0" - xdg-basedir "^3.0.0" - -upper-case@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" - -url-loader@^0.5.7: - version "0.5.9" - resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-0.5.9.tgz#cc8fea82c7b906e7777019250869e569e995c295" - dependencies: - loader-utils "^1.0.2" - mime "1.3.x" - -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - dependencies: - prepend-http "^1.0.1" - -url-parse@1.0.x: - version "1.0.5" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.0.5.tgz#0854860422afdcfefeb6c965c662d4800169927b" - dependencies: - querystringify "0.0.x" - requires-port "1.0.x" - -url-parse@^1.1.8: - version "1.1.9" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.1.9.tgz#c67f1d775d51f0a18911dd7b3ffad27bb9e5bd19" - dependencies: - querystringify "~1.0.0" - requires-port "1.0.x" - -url@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" - dependencies: - punycode "1.3.2" - querystring "0.2.0" - -user-home@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" - -useragent@^2.1.10: - version "2.1.13" - resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.1.13.tgz#bba43e8aa24d5ceb83c2937473e102e21df74c10" - dependencies: - lru-cache "2.2.x" - tmp "0.0.x" - -util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - -util@0.10.3, util@^0.10.3: - version "0.10.3" - resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" - dependencies: - inherits "2.0.1" - -utila@~0.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.3.3.tgz#d7e8e7d7e309107092b05f8d9688824d633a4226" - -utila@~0.4: - version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - -utils-merge@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.0.tgz#0294fb922bb9375153541c4f7096231f287c8af8" - -uuid@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" - -uuid@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" - -v8flags@^2.0.11: - version "2.1.1" - resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-2.1.1.tgz#aab1a1fa30d45f88dd321148875ac02c0b55e5b4" - dependencies: - user-home "^1.1.1" - -validate-npm-package-license@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" - dependencies: - spdx-correct "~1.0.0" - spdx-expression-parse "~1.0.0" - -vary@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.1.tgz#67535ebb694c1d52257457984665323f587e8d37" - -vendors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.1.tgz#37ad73c8ee417fb3d580e785312307d274847f22" - -verror@1.3.6: - version "1.3.6" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c" - dependencies: - extsprintf "1.0.2" - -vlq@^0.2.1: - version "0.2.2" - resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.2.tgz#e316d5257b40b86bb43cb8d5fea5d7f54d6b0ca1" - -vm-browserify@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73" - dependencies: - indexof "0.0.1" - -void-elements@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" - -watchpack@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.4.0.tgz#4a1472bcbb952bd0a9bb4036801f954dfb39faac" - dependencies: - async "^2.1.2" - chokidar "^1.7.0" - graceful-fs "^4.1.2" - -wbuf@^1.1.0, wbuf@^1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.2.tgz#d697b99f1f59512df2751be42769c1580b5801fe" - dependencies: - minimalistic-assert "^1.0.0" - -webdriver-js-extender@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/webdriver-js-extender/-/webdriver-js-extender-1.0.0.tgz#81c533a9e33d5bfb597b4e63e2cdb25b54777515" - dependencies: - "@types/selenium-webdriver" "^2.53.35" - selenium-webdriver "^2.53.2" - -webdriver-manager@^12.0.6: - version "12.0.6" - resolved "https://registry.yarnpkg.com/webdriver-manager/-/webdriver-manager-12.0.6.tgz#3df1a481977010b4cbf8c9d85c7a577828c0e70b" - dependencies: - adm-zip "^0.4.7" - chalk "^1.1.1" - del "^2.2.0" - glob "^7.0.3" - ini "^1.3.4" - minimist "^1.2.0" - q "^1.4.1" - request "^2.78.0" - rimraf "^2.5.2" - semver "^5.3.0" - xml2js "^0.4.17" - -webpack-concat-plugin@1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/webpack-concat-plugin/-/webpack-concat-plugin-1.4.0.tgz#a6eb3f0082d03c79d8ee2f1518c7f48e44ee12c5" - dependencies: - md5 "^2.2.1" - uglify-js "^2.8.29" - -webpack-dev-middleware@^1.11.0, webpack-dev-middleware@~1.12.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-1.12.0.tgz#d34efefb2edda7e1d3b5dbe07289513219651709" - dependencies: - memory-fs "~0.4.1" - mime "^1.3.4" - path-is-absolute "^1.0.0" - range-parser "^1.0.3" - time-stamp "^2.0.0" - -webpack-dev-server@~2.7.1: - version "2.7.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-2.7.1.tgz#21580f5a08cd065c71144cf6f61c345bca59a8b8" - dependencies: - ansi-html "0.0.7" - bonjour "^3.5.0" - chokidar "^1.6.0" - compression "^1.5.2" - connect-history-api-fallback "^1.3.0" - del "^3.0.0" - express "^4.13.3" - html-entities "^1.2.0" - http-proxy-middleware "~0.17.4" - internal-ip "^1.2.0" - ip "^1.1.5" - loglevel "^1.4.1" - opn "4.0.2" - portfinder "^1.0.9" - selfsigned "^1.9.1" - serve-index "^1.7.2" - sockjs "0.3.18" - sockjs-client "1.1.4" - spdy "^3.4.1" - strip-ansi "^3.0.0" - supports-color "^3.1.1" - webpack-dev-middleware "^1.11.0" - yargs "^6.0.0" - -webpack-dev-server@~2.9.0: - version "2.9.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-2.9.1.tgz#7ac9320b61b00eb65b2109f15c82747fc5b93585" - dependencies: - ansi-html "0.0.7" - array-includes "^3.0.3" - bonjour "^3.5.0" - chokidar "^1.6.0" - compression "^1.5.2" - connect-history-api-fallback "^1.3.0" - del "^3.0.0" - express "^4.13.3" - html-entities "^1.2.0" - http-proxy-middleware "~0.17.4" - internal-ip "1.2.0" - ip "^1.1.5" - loglevel "^1.4.1" - opn "^5.1.0" - portfinder "^1.0.9" - selfsigned "^1.9.1" - serve-index "^1.7.2" - sockjs "0.3.18" - sockjs-client "1.1.4" - spdy "^3.4.1" - strip-ansi "^3.0.1" - supports-color "^4.2.1" - webpack-dev-middleware "^1.11.0" - yargs "^6.6.0" - -webpack-merge@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-4.1.0.tgz#6ad72223b3e0b837e531e4597c199f909361511e" - dependencies: - lodash "^4.17.4" - -webpack-sources@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.0.1.tgz#c7356436a4d13123be2e2426a05d1dad9cbe65cf" - dependencies: - source-list-map "^2.0.0" - source-map "~0.5.3" - -webpack@~3.5.5: - version "3.5.6" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.5.6.tgz#a492fb6c1ed7f573816f90e00c8fbb5a20cc5c36" - dependencies: - acorn "^5.0.0" - acorn-dynamic-import "^2.0.0" - ajv "^5.1.5" - ajv-keywords "^2.0.0" - async "^2.1.2" - enhanced-resolve "^3.4.0" - escope "^3.6.0" - interpret "^1.0.0" - json-loader "^0.5.4" - json5 "^0.5.1" - loader-runner "^2.3.0" - loader-utils "^1.1.0" - memory-fs "~0.4.1" - mkdirp "~0.5.0" - node-libs-browser "^2.0.0" - source-map "^0.5.3" - supports-color "^4.2.1" - tapable "^0.2.7" - uglifyjs-webpack-plugin "^0.4.6" - watchpack "^1.4.0" - webpack-sources "^1.0.1" - yargs "^8.0.2" - -webpack@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.6.0.tgz#a89a929fbee205d35a4fa2cc487be9cbec8898bc" - dependencies: - acorn "^5.0.0" - acorn-dynamic-import "^2.0.0" - ajv "^5.1.5" - ajv-keywords "^2.0.0" - async "^2.1.2" - enhanced-resolve "^3.4.0" - escope "^3.6.0" - interpret "^1.0.0" - json-loader "^0.5.4" - json5 "^0.5.1" - loader-runner "^2.3.0" - loader-utils "^1.1.0" - memory-fs "~0.4.1" - mkdirp "~0.5.0" - node-libs-browser "^2.0.0" - source-map "^0.5.3" - supports-color "^4.2.1" - tapable "^0.2.7" - uglifyjs-webpack-plugin "^0.4.6" - watchpack "^1.4.0" - webpack-sources "^1.0.1" - yargs "^8.0.2" - -websocket-driver@>=0.5.1: - version "0.6.5" - resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.6.5.tgz#5cb2556ceb85f4373c6d8238aa691c8454e13a36" - dependencies: - websocket-extensions ">=0.1.1" - -websocket-extensions@>=0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.1.tgz#76899499c184b6ef754377c2dbb0cd6cb55d29e7" - -when@~3.6.x: - version "3.6.4" - resolved "https://registry.yarnpkg.com/when/-/when-3.6.4.tgz#473b517ec159e2b85005497a13983f095412e34e" - -whet.extend@~0.9.9: - version "0.9.9" - resolved "https://registry.yarnpkg.com/whet.extend/-/whet.extend-0.9.9.tgz#f877d5bf648c97e5aa542fadc16d6a259b9c11a1" - -which-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - -which@1, which@^1.2.9: - version "1.3.0" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" - dependencies: - isexe "^2.0.0" - -which@^1.2.8, which@~1.2.10: - version "1.2.14" - resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" - dependencies: - isexe "^2.0.0" - -wide-align@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" - dependencies: - string-width "^1.0.2" - -widest-line@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-1.0.0.tgz#0c09c85c2a94683d0d7eaf8ee097d564bf0e105c" - dependencies: - string-width "^1.0.1" - -window-size@0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" - -wordwrap@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" - -wordwrap@~0.0.2: - version "0.0.3" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" - -wrap-ansi@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - -write-file-atomic@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.1.0.tgz#1769f4b551eedce419f0505deae2e26763542d37" - dependencies: - graceful-fs "^4.1.11" - imurmurhash "^0.1.4" - slide "^1.1.5" - -ws@1.1.1, ws@^1.0.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.1.tgz#082ddb6c641e85d4bb451f03d52f06eabdb1f018" - dependencies: - options ">=0.0.5" - ultron "1.0.x" - -wtf-8@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wtf-8/-/wtf-8-1.0.0.tgz#392d8ba2d0f1c34d1ee2d630f15d0efb68e1048a" - -xdg-basedir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" - -xml-char-classes@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/xml-char-classes/-/xml-char-classes-1.0.0.tgz#64657848a20ffc5df583a42ad8a277b4512bbc4d" - -xml2js@0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.4.tgz#3111010003008ae19240eba17497b57c729c555d" - dependencies: - sax "0.6.x" - xmlbuilder ">=1.0.0" - -xml2js@^0.4.17: - version "0.4.17" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.17.tgz#17be93eaae3f3b779359c795b419705a8817e868" - dependencies: - sax ">=0.6.0" - xmlbuilder "^4.1.0" - -xmlbuilder@>=1.0.0, xmlbuilder@^4.1.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-4.2.1.tgz#aa58a3041a066f90eaa16c2f5389ff19f3f461a5" - dependencies: - lodash "^4.0.0" - -xmldom@^0.1.19: - version "0.1.27" - resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.27.tgz#d501f97b3bdb403af8ef9ecc20573187aadac0e9" - -xmlhttprequest-ssl@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz#185a888c04eca46c3e4070d99f7b49de3528992d" - -xmlhttprequest@1: - version "1.8.0" - resolved "https://registry.yarnpkg.com/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz#67fe075c5c24fef39f9d65f5f7b7fe75171968fc" - -xtend@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - -y18n@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" - -yallist@^2.0.0, yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - -yargs-parser@^4.2.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-4.2.1.tgz#29cceac0dc4f03c6c87b4a9f217dd18c9f74871c" - dependencies: - camelcase "^3.0.0" - -yargs-parser@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.0.tgz#275ecf0d7ffe05c77e64e7c86e4cd94bf0e1228a" - dependencies: - camelcase "^3.0.0" - -yargs-parser@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9" - dependencies: - camelcase "^4.1.0" - -yargs@^6.0.0, yargs@^6.6.0: - version "6.6.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-6.6.0.tgz#782ec21ef403345f830a808ca3d513af56065208" - dependencies: - camelcase "^3.0.0" - cliui "^3.2.0" - decamelize "^1.1.1" - get-caller-file "^1.0.1" - os-locale "^1.4.0" - read-pkg-up "^1.0.1" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^1.0.2" - which-module "^1.0.0" - y18n "^3.2.1" - yargs-parser "^4.2.0" - -yargs@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-7.1.0.tgz#6ba318eb16961727f5d284f8ea003e8d6154d0c8" - dependencies: - camelcase "^3.0.0" - cliui "^3.2.0" - decamelize "^1.1.1" - get-caller-file "^1.0.1" - os-locale "^1.4.0" - read-pkg-up "^1.0.1" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^1.0.2" - which-module "^1.0.0" - y18n "^3.2.1" - yargs-parser "^5.0.0" - -yargs@^8.0.2: - version "8.0.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360" - dependencies: - camelcase "^4.1.0" - cliui "^3.2.0" - decamelize "^1.1.1" - get-caller-file "^1.0.1" - os-locale "^2.0.0" - read-pkg-up "^2.0.0" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^2.0.0" - which-module "^2.0.0" - y18n "^3.2.1" - yargs-parser "^7.0.0" - -yargs@~3.10.0: - version "3.10.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" - dependencies: - camelcase "^1.0.2" - cliui "^2.1.0" - decamelize "^1.0.0" - window-size "0.1.0" - -yauzl@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.4.1.tgz#9528f442dab1b2284e58b4379bb194e22e0c4005" - dependencies: - fd-slicer "~1.0.1" - -yeast@0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" - -yn@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/yn/-/yn-1.3.0.tgz#1b0812abb8d805d48966f8df385dc9dacc9a19d8" - dependencies: - object-assign "^4.1.1" - -zone.js@^0.8.14: - version "0.8.17" - resolved "https://registry.yarnpkg.com/zone.js/-/zone.js-0.8.17.tgz#4c5e5185a857da8da793daf3919371c5a36b2a0b" - -zone.js@^0.8.4: - version "0.8.18" - resolved "https://registry.yarnpkg.com/zone.js/-/zone.js-0.8.18.tgz#8cecb3977fcd1b3090562ff4570e2847e752b48d" diff --git a/ambari-logsearch/docker/.gitignore b/ambari-logsearch/docker/.gitignore deleted file mode 100644 index b73aed028a5..00000000000 --- a/ambari-logsearch/docker/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -Profile -.env diff --git a/ambari-logsearch/docker/Dockerfile b/ambari-logsearch/docker/Dockerfile deleted file mode 100644 index c1101cbf6b1..00000000000 --- a/ambari-logsearch/docker/Dockerfile +++ /dev/null @@ -1,102 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM centos:centos7 - -RUN echo root:changeme | chpasswd - -RUN yum clean all -y && yum update -y -RUN yum -y install firefox-45.8.0-2.el6.centos xvfb xeyes vim wget rpm-build sudo which telnet tar openssh-server openssh-clients ntp git python-setuptools python-devel httpd lsof -RUN rpm -e --nodeps --justdb glibc-common -RUN yum -y install glibc-common - -ENV HOME /root - -#Install JAVA -ENV JAVA_VERSION 8u131 -ENV BUILD_VERSION b11 -RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm -RUN rpm -ivh jdk-8-linux-x64.rpm -ENV JAVA_HOME /usr/java/default/ - -#Install Selenium server -RUN wget --no-check-certificate -O /root/selenium-server-standalone.jar http://selenium-release.storage.googleapis.com/2.53/selenium-server-standalone-2.53.1.jar - -#Install Maven -RUN mkdir -p /opt/maven -WORKDIR /opt/maven -RUN wget http://archive.apache.org/dist/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -RUN tar -xvzf /opt/maven/apache-maven-3.3.9-bin.tar.gz -RUN rm -rf /opt/maven/apache-maven-3.3.9-bin.tar.gz - -ENV M2_HOME /opt/maven/apache-maven-3.3.9 -ENV MAVEN_OPTS -Xmx2048m -ENV PATH $PATH:$JAVA_HOME/bin:$M2_HOME/bin - -# SSH key -RUN ssh-keygen -f /root/.ssh/id_rsa -t rsa -N '' -RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys -RUN chmod 600 /root/.ssh/authorized_keys -RUN sed -ri 's/UsePAM yes/UsePAM no/g' /etc/ssh/sshd_config -RUN echo 'X11Forwarding yes\n' /etc/ssh/sshd_config -RUN echo 'X11DisplayOffset 10\n' /etc/ssh/sshd_config - -#To allow bower install behind proxy. See https://github.com/bower/bower/issues/731 -RUN git config --global url."https://".insteadOf git:// - -# Install Solr -ENV SOLR_VERSION 7.4.0 -RUN wget --no-check-certificate -O /root/solr-$SOLR_VERSION.tgz http://public-repo-1.hortonworks.com/ARTIFACTS/dist/lucene/solr/$SOLR_VERSION/solr-$SOLR_VERSION.tgz -RUN cd /root && tar -zxvf /root/solr-$SOLR_VERSION.tgz - -# Install Knox -WORKDIR / -RUN adduser knox -ENV KNOX_VERSION 1.1.0 -RUN wget -q -O /knox-${KNOX_VERSION}.zip http://download.nextag.com/apache/knox/${KNOX_VERSION}/knox-${KNOX_VERSION}.zip && unzip /knox-${KNOX_VERSION}.zip && rm knox-${KNOX_VERSION}.zip && ln -nsf knox-${KNOX_VERSION} knox && chmod +x /knox/bin/*.sh && chown -R knox /knox/ - -ADD knox/keystores /knox-secrets -RUN cd /knox-secrets && unzip test-secrets.zip -RUN mkdir -p /knox/data/security/keystores -RUN mv /knox-secrets/master /knox/data/security/master -RUN cd /knox-secrets && cp -r * /knox/data/security/keystores/ -RUN chown -R knox /knox/data/security - -ADD knox/ldap.sh /ldap.sh -ADD knox/gateway.sh /gateway.sh -RUN touch /gateway.out && chown -R knox /gateway.out -RUN touch /ldap.out && chown -R knox /ldap.out - -ADD bin/start.sh /root/start.sh -ADD test-config /root/test-config -ADD test-logs /root/test-logs -RUN chmod -R 777 /root/test-config -RUN chmod +x /root/start.sh - -ENV SOLR_CONFIG_LOCATION /root/config/solr -ENV LOGSEARCH_CONF_DIR /root/config/logsearch -ENV LOGFEEDER_CONF_DIR /root/config/logfeeder -ENV SOLR_INCLUDE /root/config/solr/solr-env.sh - -RUN mkdir -p /var/run/ambari-logsearch-solr /var/log/ambari-logsearch-solr /var/run/ambari-infra-solr-client \ - /var/log/ambari-infra-solr-client /root/logsearch_solr_index/data \ - /var/run/ambari-logsearch-portal /var/log/ambari-logsearch-portal \ - /var/run/ambari-logsearch-logfeeder /var/log/ambari-logsearch-logfeeder - -RUN cp /root/test-config/solr/solr.xml /root/logsearch_solr_index/data -RUN cp /root/test-config/solr/zoo.cfg /root/logsearch_solr_index/data - -RUN mkdir -p /root/config -RUN chmod -R 777 /root/config - -WORKDIR /root -CMD /root/start.sh diff --git a/ambari-logsearch/docker/all.yml b/ambari-logsearch/docker/all.yml deleted file mode 100644 index cd008531dd7..00000000000 --- a/ambari-logsearch/docker/all.yml +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - logsearch-all: - image: ambari-logsearch:v1.0 - restart: always - hostname: logsearch.apache.org - container_name: logsearch - env_file: - - Profile - ports: - - 61888:61888 - - 5005:5005 - - 5006:5006 - - 8886:8886 - - 18886:18886 - - 4444:4444 - - 9983:9983 - environment: - DISPLAY: $DISPLAY_MAC - volumes: - - $MAVEN_REPOSITORY_LOCATION:/root/.m2 - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config \ No newline at end of file diff --git a/ambari-logsearch/docker/bin/start.sh b/ambari-logsearch/docker/bin/start.sh deleted file mode 100644 index d212b916721..00000000000 --- a/ambari-logsearch/docker/bin/start.sh +++ /dev/null @@ -1,233 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -AMBARI_PATH=/root/ambari -LOGSEARCH_SERVER_PATH=$AMBARI_PATH/ambari-logsearch/ambari-logsearch-server/target/package -LOGFEEDER_PATH=$AMBARI_PATH/ambari-logsearch/ambari-logsearch-logfeeder/target/package -SOLR_LOCATION=/root/solr-$SOLR_VERSION.tgz -SOLR_SERVER_LOCATION=/root/solr-$SOLR_VERSION -ZKCLI=$SOLR_SERVER_LOCATION/server/scripts/cloud-scripts/zkcli.sh -ZK_CONNECT_STRING=${ZK_CONNECT_STRING:-"localhost:9983"} -COMPONENT=${COMPONENT:-"ALL"} - -command="$1" - -function build_all() { - echo "build all" - cd $AMBARI_PATH/ambari-logsearch && mvn clean package -DskipTests && mvn -pl ambari-logsearch-logfeeder clean package -DskipTests -} - -function set_custom_zookeeper_address() { - local file_to_update=${1:?"usage: "} - local zk_connect_string="$ZK_CONNECT_STRING" - if [ "$zk_connect_string" != "localhost:9983" ] ; then - sed -i "s|localhost:9983|$zk_connect_string|g" $file_to_update - fi -} - -function create_logfeeder_configs() { - mkdir /root/config/logfeeder - cp -r /root/test-config/logfeeder/* /root/config/logfeeder/ - set_custom_zookeeper_address /root/config/logfeeder/logfeeder.properties - set_custom_zookeeper_address /root/config/logfeeder/shipper-conf/output.config.json -} - -function create_logsearch_configs() { - mkdir -p /root/config/logsearch - cp /root/test-config/logsearch/log4j.xml /root/config/logsearch/ - cp /root/test-config/logsearch/logsearch-env.sh /root/config/logsearch/ - cp $LOGSEARCH_SERVER_PATH/conf/user_pass.json /root/config/logsearch/user_pass.json - cp $LOGSEARCH_SERVER_PATH/conf/roles.json /root/config/logsearch/roles.json - if [ "$LOGSEARCH_HTTPS_ENABLED" == "true" ] - then - cp /root/test-config/logsearch/logsearch-https.properties /root/config/logsearch/logsearch.properties - else - cp /root/test-config/logsearch/logsearch.properties /root/config/logsearch/logsearch.properties - fi - - if [ "$KNOX" == "true" ] - then - cp /root/test-config/logsearch/logsearch-sso.properties /root/config/logsearch/logsearch.properties - fi - - set_custom_zookeeper_address /root/config/logsearch/logsearch.properties -} - -function create_solr_configs() { - mkdir /root/config/solr - cp /root/test-config/solr/log4j.properties /root/config/solr/ - cp /root/test-config/solr/zoo.cfg /root/config/solr/ - cp /root/test-config/solr/solr.xml /root/config/solr/ - if [ "$LOGSEARCH_SOLR_SSL_ENABLED" == "true" ] - then - cp /root/test-config/solr/solr-env-ssl.sh /root/config/solr/solr-env.sh - else - cp /root/test-config/solr/solr-env.sh /root/config/solr/solr-env.sh - fi -} - -function create_configs() { - create_solr_configs - create_logfeeder_configs - create_logsearch_configs -} - -function generate_keys() { - if [ "$GENERATE_KEYSTORE_AT_START" == "true" ] - then - IP=`hostname --ip-address` - echo "generating stores for IP: $IP" - mkdir -p /etc/ambari-logsearch-portal/conf/keys/ - keytool -genkeypair -alias logsearch -keyalg RSA -keysize 2048 -keypass bigdata -storepass bigdata -validity 9999 -keystore /etc/ambari-logsearch-portal/conf/keys/logsearch.jks -ext SAN=DNS:localhost,IP:127.0.0.1,IP:$IP -dname "CN=Common Name, OU=Organizational Unit, O=Organization, L=Location, ST=State, C=Country" -rfc - fi -} - -function start_solr_d() { - echo "Starting Solr..." - /root/solr-$SOLR_VERSION/bin/solr start -cloud -s /root/logsearch_solr_index/data -verbose -force - touch /var/log/ambari-logsearch-solr/solr.log - - if [ "$LOGSEARCH_SOLR_SSL_ENABLED" == "true" ] - then - echo "Setting urlScheme as https and restarting solr..." - $ZKCLI -zkhost localhost:9983 -cmd clusterprop -name urlScheme -val https - /root/solr-$SOLR_VERSION/bin/solr stop - /root/solr-$SOLR_VERSION/bin/solr start -cloud -s /root/logsearch_solr_index/data -verbose -force - fi -} - -function start_logsearch() { - $LOGSEARCH_SERVER_PATH/bin/logsearch.sh start -f -} - -function start_logsearch_d() { - $LOGSEARCH_SERVER_PATH/bin/logsearch.sh start - touch /var/log/ambari-logsearch-portal/logsearch-app.log -} - -function start_logfeeder() { - $LOGFEEDER_PATH/bin/logfeeder.sh start -f -} - - -function start_logfeeder_d() { - $LOGFEEDER_PATH/bin/logfeeder.sh start - touch /var/log/ambari-logsearch-logfeeder/logsearch-logfeeder.log -} - -function start_selenium_server_d() { - nohup java -jar /root/selenium-server-standalone.jar > /var/log/selenium-test.log & -} - -function start_ldap_d() { - if [ "$KNOX" == "true" ] - then - echo "KNOX is enabled. Starting Demo LDAP." - su knox -c "/ldap.sh" - else - echo "KNOX is not enabled. Skip Starting Demo LDAP." - fi -} - -function start_knox_d() { - if [ "$KNOX" == "true" ] - then - echo "KNOX is enabled. Starting Demo KNOX gateway." - su knox -c "/gateway.sh" - else - echo "KNOX is not enabled. Skip Starting KNOX gateway." - fi -} - -function log() { - component_log=${COMPONENT_LOG:-"logsearch"} - case $component_log in - "logfeeder") - tail -f /var/log/ambari-logsearch-logfeeder/logsearch-logfeeder.log - ;; - "solr") - tail -f /var/log/ambari-logsearch-solr/solr.log - ;; - "selenium") - tail -f /var/log/selenium-test.log - ;; - "knox") - tail -f --retry /knox/logs/gateway.log - ;; - "ldap") - tail -f --retry /knox/logs/ldap.log - ;; - *) - tail -f /var/log/ambari-logsearch-portal/logsearch-app.log - ;; - esac -} - -function main() { - component=${COMPONENT:-"ALL"} - case $component in - "solr") - create_solr_configs - echo "Start Solr only ..." - export COMPONENT_LOG="solr" - generate_keys - start_solr_d - log - ;; - "logfeeder") - create_logfeeder_configs - echo "Start Log Feeder only ..." - export COMPONENT_LOG="logfeeder" - generate_keys - start_logfeeder - ;; - "logsearch") - create_logsearch_configs - echo "Start Log Search only ..." - export COMPONENT_LOG="logsearch" - generate_keys - start_logsearch - log - ;; - "knox") - echo "Start KNOX only ..." - export COMPONENT_LOG="knox" - export KNOX="true" - start_knox_d - log - ;; - "ldap") - echo "Start Demo LDAP only ..." - export COMPONENT_LOG="ldap" - export KNOX="true" - start_ldap_d - log - ;; - *) - create_configs - generate_keys - start_selenium_server_d - start_solr_d - start_logfeeder_d - start_ldap_d - start_knox_d - start_logsearch_d - log - ;; - esac -} - -main diff --git a/ambari-logsearch/docker/docker-compose.yml b/ambari-logsearch/docker/docker-compose.yml deleted file mode 100644 index fb14622902d..00000000000 --- a/ambari-logsearch/docker/docker-compose.yml +++ /dev/null @@ -1,95 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - zookeeper: - image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10} - restart: always - hostname: zookeeper - networks: - - logsearch-network - ports: - - 2181:2181 - environment: - ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=zookeeper:2888:3888 - solr: - image: solr:${SOLR_VERSION:-7.4.0} - restart: always - hostname: solr - ports: - - "8983:8983" - networks: - - logsearch-network - env_file: - - Profile - entrypoint: - - docker-entrypoint.sh - - solr - - start - - "-f" - - "-c" - - "-z" - - ${ZOOKEEPER_CONNECTION_STRING} - logsearch: - image: ambari-logsearch:v1.0 - restart: always - hostname: logsearch.apache.org - labels: - logfeeder.log.type: "logsearch_server" - networks: - - logsearch-network - env_file: - - Profile - ports: - - 61888:61888 - - 4444:4444 - - 5005:5005 - environment: - COMPONENT: logsearch - COMPONENT_LOG: logsearch - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - DISPLAY: $DISPLAY_MAC - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config - logfeeder: - image: ambari-logsearch:v1.0 - restart: always - hostname: logfeeder.apache.org - privileged: true - labels: - logfeeder.log.type: "logfeeder" - networks: - - logsearch-network - env_file: - - Profile - ports: - - 5006:5006 - environment: - COMPONENT: logfeeder - COMPONENT_LOG: logfeeder - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config - - /var/run/docker.sock:/var/run/docker.sock - - /usr/local/bin/docker:/usr/local/bin/docker - - /var/lib/docker:/var/lib/docker -networks: - logsearch-network: - driver: bridge diff --git a/ambari-logsearch/docker/knox.yml b/ambari-logsearch/docker/knox.yml deleted file mode 100644 index 936a026182d..00000000000 --- a/ambari-logsearch/docker/knox.yml +++ /dev/null @@ -1,50 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - ldap: - image: ambari-logsearch:v1.0 - restart: always - hostname: ldap.apache.org - networks: - - logsearch-network - ports: - - 33389:33389 - environment: - COMPONENT: ldap - COMPONENT_LOG: ldap - KNOX: "true" - knox: - image: ambari-logsearch:v1.0 - restart: always - hostname: knox.apache.org - networks: - - logsearch-network - ports: - - 8443:8443 - volumes: - - ./knox/topologies:/knox/conf/topologies - - ./knox/logsearch:/knox/data/services/logsearch - #- ./knox/applications:/knox/data/applications - environment: - COMPONENT: knox - COMPONENT_LOG: knox - KNOX: "true" - depends_on: - - ldap - -networks: - logsearch-network: - driver: bridge \ No newline at end of file diff --git a/ambari-logsearch/docker/knox/gateway.sh b/ambari-logsearch/docker/knox/gateway.sh deleted file mode 100755 index 5c74182b960..00000000000 --- a/ambari-logsearch/docker/knox/gateway.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -set -e -set -o pipefail - -nohup java -jar /knox/bin/gateway.jar > /gateway.out & - diff --git a/ambari-logsearch/docker/knox/keystores/test-secrets.zip b/ambari-logsearch/docker/knox/keystores/test-secrets.zip deleted file mode 100644 index e2e2420a6dc..00000000000 Binary files a/ambari-logsearch/docker/knox/keystores/test-secrets.zip and /dev/null differ diff --git a/ambari-logsearch/docker/knox/ldap.sh b/ambari-logsearch/docker/knox/ldap.sh deleted file mode 100755 index 5bc3c1ccd08..00000000000 --- a/ambari-logsearch/docker/knox/ldap.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -set -e -set -o pipefail - -nohup java -jar /knox/bin/ldap.jar /knox/conf > /ldap.out & - diff --git a/ambari-logsearch/docker/knox/logsearch/1.0.0/rewrite.xml b/ambari-logsearch/docker/knox/logsearch/1.0.0/rewrite.xml deleted file mode 100644 index 0bc9ccfe66a..00000000000 --- a/ambari-logsearch/docker/knox/logsearch/1.0.0/rewrite.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/ambari-logsearch/docker/knox/logsearch/1.0.0/service.xml b/ambari-logsearch/docker/knox/logsearch/1.0.0/service.xml deleted file mode 100644 index ad2eacbb3a1..00000000000 --- a/ambari-logsearch/docker/knox/logsearch/1.0.0/service.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/docker/knox/topologies/admin.xml b/ambari-logsearch/docker/knox/topologies/admin.xml deleted file mode 100644 index f65c15a5f2d..00000000000 --- a/ambari-logsearch/docker/knox/topologies/admin.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - federation - SSOCookieProvider - true - - sso.authentication.provider.url - /gateway/knoxsso/api/v1/websso - - - - - authorization - AclsAuthz - true - - knox.acl - admin;*;* - - - - - identity-assertion - Default - true - - - - hostmap - static - true - - localhost - sandbox,sandbox.hortonworks.com - - - - - - - KNOX - - - - admin-ui - - - diff --git a/ambari-logsearch/docker/knox/topologies/knoxsso.xml b/ambari-logsearch/docker/knox/topologies/knoxsso.xml deleted file mode 100644 index 31319dae9d2..00000000000 --- a/ambari-logsearch/docker/knox/topologies/knoxsso.xml +++ /dev/null @@ -1,118 +0,0 @@ - - - - - - webappsec - WebAppSec - true - - xframe.options.enabled - true - - - - - authentication - ShiroProvider - true - - sessionTimeout - 30 - - - redirectToUrl - /gateway/knoxsso/knoxauth/login.html - - - restrictedCookies - rememberme,WWW-Authenticate - - - main.ldapRealm - org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm - - - main.ldapContextFactory - org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory - - - main.ldapRealm.contextFactory - $ldapContextFactory - - - main.ldapRealm.userDnTemplate - uid={0},ou=people,dc=hadoop,dc=apache,dc=org - - - main.ldapRealm.contextFactory.url - ldap://ldap:33389 - - - main.ldapRealm.authenticationCachingEnabled - false - - - main.ldapRealm.contextFactory.authenticationMechanism - simple - - - urls./** - authcBasic - - - - - identity-assertion - Default - true - - - - hostmap - static - true - - localhost - sandbox,sandbox.hortonworks.com - - - - - - - knoxauth - - - - KNOXSSO - - knoxsso.cookie.secure.only - false - - - knoxsso.token.ttl - -1 - - - knoxsso.redirect.whitelist.regex - ^https?:\/\/(www\.local\.com|localhost|127\.0\.0\.1|0:0:0:0:0:0:0:1|::1):[0-9].*$ - - - - diff --git a/ambari-logsearch/docker/knox/topologies/sandbox.xml b/ambari-logsearch/docker/knox/topologies/sandbox.xml deleted file mode 100644 index 49458444196..00000000000 --- a/ambari-logsearch/docker/knox/topologies/sandbox.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - federation - SSOCookieProvider - true - - sso.authentication.provider.url - https://localhost:8443/gateway/knoxsso/api/v1/websso - - - - - identity-assertion - Default - true - - - - hostmap - static - true - - localhost - sandbox,sandbox.hortonworks.com - - - - - - - LOGSEARCH - http://logsearch:61888 - - - \ No newline at end of file diff --git a/ambari-logsearch/docker/logsearch-docker.sh b/ambari-logsearch/docker/logsearch-docker.sh deleted file mode 100755 index 72a332a31b8..00000000000 --- a/ambari-logsearch/docker/logsearch-docker.sh +++ /dev/null @@ -1,165 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -sdir="`dirname \"$0\"`" -: ${1:?"argument is missing: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only)"} -command="$1" -shift - -while getopts "bf" opt; do - case $opt in - b) # build backend only - maven_build_options="-pl !ambari-logsearch-web" - ;; - f) # build frontend only - maven_build_options="-pl ambari-logsearch-web" - ;; - \?) - echo "Invalid option: -$OPTARG" >&2 - exit 1 - ;; - :) - echo "Option -$OPTARG requires an argument." >&2 - exit 1 - ;; - esac -done - -function build_logsearch_project() { - pushd $sdir/../ - mvn clean package -DskipTests $maven_build_options - popd -} - -function build_logsearch_container() { - pushd $sdir - docker build -t ambari-logsearch:v1.0 . - popd -} - -function get_docker_ip() { - local ip=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}') - echo $ip -} - -function start_logsearch_container() { - setup_env - setup_profile - kill_logsearch_container - echo "Run Log Search container" - docker-compose -f all.yml up -d - ip_address=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' logsearch) - echo "Log Search container started on $ip_address (for Mac OSX route to boot2docker/docker-machine VM address, e.g.: 'sudo route add -net 172.17.0.0/16 192.168.59.103')" - echo "You can follow Log Search logs with 'docker logs -f logsearch' command" -} - -function setup_profile() { - if [ -f "$sdir/Profile" ]; - then - echo "Profile file exists" - else - echo "Profile file does not exist, Creating a new one..." - pushd $sdir/../../ - local AMBARI_LOCATION=$(pwd) - popd - cat << EOF > $sdir/Profile -COMPONENT=ALL -COMPONENT_LOG=logsearch -LOGFEEDER_DEBUG_SUSPEND=n -LOGSEARCH_DEBUG_SUSPEND=n -LOGSEARCH_HTTPS_ENABLED=false -LOGSEARCH_SOLR_SSL_ENABLED=false -GENERATE_KEYSTORE_AT_START=false -SOLR_HOST=solr -KNOX=false -EOF - echo "'Profile' file has been created. Check it out before starting Log Search. ($sdir/Profile)" - exit - fi; -} - -function setup_env() { - if [ -f "$sdir/.env" ]; - then - echo ".env file exists" - else - echo ".env file does not exist, Creating a new one..." - pushd $sdir/../../ - local AMBARI_LOCATION=$(pwd) - popd - local display_ip=$(get_docker_ip) - cat << EOF > $sdir/.env -DISPLAY_MAC=$display_ip:0 -MAVEN_REPOSITORY_LOCATION=$HOME/.m2 -AMBARI_LOCATION=$AMBARI_LOCATION - -ZOOKEEPER_VERSION=3.4.10 -ZOOKEEPER_CONNECTION_STRING=zookeeper:2181 - -SOLR_VERSION=7.4.0 -EOF - echo ".env file has been created. Check it out before starting Log Search. ($sdir/.env)" - exit - fi; -} - -function kill_logsearch_container() { - echo "Try to remove logsearch container if exists..." - docker rm -f logsearch -} - -function setup_x11() { - local display_ip=$(get_docker_ip) - xhost + $display_ip -} - -case $command in - "build-and-run") - build_logsearch_project - build_logsearch_container - start_logsearch_container - ;; - "build") - build_logsearch_project - build_logsearch_container - ;; - "build-docker-and-run") - build_logsearch_container - start_logsearch_container - ;; - "build-mvn-and-run") - build_logsearch_project - start_logsearch_container - ;; - "build-docker-only") - build_logsearch_container - ;; - "build-mvn-only") - build_logsearch_project - ;; - "start") - start_logsearch_container - ;; - "stop") - kill_logsearch_container - ;; - "setup-x11") - setup_x11 - ;; - *) - echo "Available commands: (start|stop|build-and-run|build|build-docker-and-run|build-mvn-and-run|build-docker-only|build-mvn-only|setup-x11)" - ;; -esac diff --git a/ambari-logsearch/docker/logsearch-logfeeder.yml b/ambari-logsearch/docker/logsearch-logfeeder.yml deleted file mode 100644 index 75d43913624..00000000000 --- a/ambari-logsearch/docker/logsearch-logfeeder.yml +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - logfeeder: - image: ambari-logsearch:v1.0 - restart: always - networks: - - logsearch-network - env_file: - - Profile - environment: - COMPONENT: logfeeder - COMPONENT_LOG: logfeeder - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config -networks: - logsearch-network: - driver: bridge \ No newline at end of file diff --git a/ambari-logsearch/docker/logsearch-server.yml b/ambari-logsearch/docker/logsearch-server.yml deleted file mode 100644 index 00c08a279e9..00000000000 --- a/ambari-logsearch/docker/logsearch-server.yml +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - logsearch: - image: ambari-logsearch:v1.0 - restart: always - networks: - - logsearch-network - env_file: - - Profile - environment: - COMPONENT: logsearch - COMPONENT_LOG: logsearch - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config -networks: - logsearch-network: - driver: bridge \ No newline at end of file diff --git a/ambari-logsearch/docker/solr.yml b/ambari-logsearch/docker/solr.yml deleted file mode 100644 index 2975af60482..00000000000 --- a/ambari-logsearch/docker/solr.yml +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - solr: - image: solr:${SOLR_VERSION:-7.4.0} - restart: always - networks: - - logsearch-network - env_file: - - Profile - entrypoint: - - docker-entrypoint.sh - - solr - - start - - "-f" - - "-c" - - "-z" - - ${ZOOKEEPER_CONNECTION_STRING} -networks: - logsearch-network: - driver: bridge diff --git a/ambari-logsearch/docker/sso.yml b/ambari-logsearch/docker/sso.yml deleted file mode 100644 index 0837dd837b0..00000000000 --- a/ambari-logsearch/docker/sso.yml +++ /dev/null @@ -1,119 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - zookeeper: - image: zookeeper:${ZOOKEEPER_VERSION:-3.4.10} - restart: always - hostname: zookeeper - networks: - - logsearch-network - ports: - - 2181:2181 - environment: - ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=zookeeper:2888:3888 - solr: - image: solr:${SOLR_VERSION:-7.4.0} - restart: always - hostname: solr - ports: - - "8983:8983" - networks: - - logsearch-network - env_file: - - Profile - entrypoint: - - docker-entrypoint.sh - - solr - - start - - "-f" - - "-c" - - "-z" - - ${ZOOKEEPER_CONNECTION_STRING} - logsearch: - image: ambari-logsearch:v1.0 - restart: always - hostname: logsearch.apache.org - networks: - - logsearch-network - env_file: - - Profile - ports: - - 61888:61888 - - 4444:4444 - - 5005:5005 - environment: - COMPONENT: logsearch - COMPONENT_LOG: logsearch - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - DISPLAY: $DISPLAY_MAC - KNOX: "true" - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config - logfeeder: - image: ambari-logsearch:v1.0 - restart: always - hostname: logfeeder.apache.org - networks: - - logsearch-network - env_file: - - Profile - ports: - - 5006:5006 - environment: - COMPONENT: logfeeder - COMPONENT_LOG: logfeeder - ZK_CONNECT_STRING: ${ZOOKEEPER_CONNECTION_STRING} - volumes: - - $AMBARI_LOCATION:/root/ambari - - $AMBARI_LOCATION/ambari-logsearch/docker/test-logs:/root/test-logs - - $AMBARI_LOCATION/ambari-logsearch/docker/test-config:/root/test-config - ldap: - image: ambari-logsearch:v1.0 - restart: always - hostname: ldap.apache.org - networks: - - logsearch-network - ports: - - 33389:33389 - environment: - COMPONENT: ldap - COMPONENT_LOG: ldap - KNOX: "true" - knox: - image: ambari-logsearch:v1.0 - restart: always - hostname: knox.apache.org - networks: - - logsearch-network - ports: - - 8443:8443 - volumes: - - ./knox/topologies:/knox/conf/topologies - - ./knox/logsearch:/knox/data/services/logsearch - #- ./knox/applications:/knox/data/applications - environment: - COMPONENT: knox - COMPONENT_LOG: knox - KNOX: "true" - depends_on: - - ldap - -networks: - logsearch-network: - driver: bridge diff --git a/ambari-logsearch/docker/test-config/logfeeder/log4j.xml b/ambari-logsearch/docker/test-config/logfeeder/log4j.xml deleted file mode 100644 index 52e3e2aacc2..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/log4j.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder-env.sh b/ambari-logsearch/docker/test-config/logfeeder/logfeeder-env.sh deleted file mode 100644 index a57b1f1b4f6..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder-env.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -export LOGFEEDER_PATH=/root/ambari/ambari-logsearch/ambari-logsearch-logfeeder/target/package - -export LOGFEEDER_CONF_DIR=/root/config/logfeeder - -#Logfile e.g. /var/log/logfeeder.log -export LOG_PATH=/var/log/ambari-logsearch-logfeeder -export LOG_FILE=logfeeder.out - -#pid file e.g. /var/run/logfeeder.pid -export LOGFEEDER_PID_FILE=/var/run/ambari-logsearch-logfeeder/logfeeder.pid - -export JAVA_HOME=/usr/java/default - -LOGFEEDER_JAVA_MEM=${LOGFEEDER_JAVA_MEM:-"-Xmx512m"} - -export LOGFEEDER_DEBUG=true - -export LOGFEEDER_DEBUG_PORT=5006 - -export LOGFEEDER_SSL="true" -export LOGFEEDER_KEYSTORE_LOCATION=/root/config/ssl/logsearch.keyStore.jks -export LOGFEEDER_KEYSTORE_PASSWORD=bigdata -export LOGFEEDER_KEYSTORE_TYPE=jks -export LOGFEEDER_TRUSTSTORE_LOCATION=/root/config/ssl/logsearch.trustStore.jks -export LOGFEEDER_TRUSTSTORE_PASSWORD=bigdata -export LOGFEEDER_TRUSTSTORE_TYPE=jks - diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties deleted file mode 100644 index 83711709770..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cluster.name=CL1 -logfeeder.checkpoint.folder=/root/checkpoints -logfeeder.metrics.collector.hosts= -logfeeder.config.dir=/root/config/logfeeder/shipper-conf/ -logfeeder.config.files=shipper-conf/global.config.json,\ - shipper-conf/output.config.json -logfeeder.log.filter.enable=true -logfeeder.solr.config.interval=5 -logfeeder.solr.zk_connect_string=localhost:9983 -logfeeder.cache.enabled=true -logfeeder.cache.size=100 -logfeeder.cache.key.field=log_message -logfeeder.cache.dedup.interval=1000 -logfeeder.cache.last.dedup.enabled=true -logsearch.config.zk_connect_string=localhost:9983 -logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN -logfeeder.docker.registry.enabled=true -logfeeder.solr.core.config.name=history -#logfeeder.solr.urls=http://solr:8983/solr -#logfeeder.configs.local.enabled=true -#logfeeder.configs.filter.solr.enabled=true -#logfeeder.configs.filter.zk.enabled=true \ No newline at end of file diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/global.config.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/global.config.json deleted file mode 100644 index 65da726e7e0..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/global.config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "global":{ - "add_fields":{ - "cluster":"cl1" - }, - "source":"file", - "tail":"true", - "gen_event_md5":"true" - } -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json deleted file mode 100644 index 66a58f15170..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json +++ /dev/null @@ -1,376 +0,0 @@ -{ - "input": [ - { - "type": "ambari_audit", - "rowtype": "audit", - "add_fields": { - "logType": "AmbariAudit", - "enforcer": "ambari-acl", - "repoType": "1", - "repo": "ambari", - "level": "INFO" - }, - "path": "/root/test-logs/ambari-server/ambari-audit.log" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "ambari_audit" - ] - } - }, - "log4j_format": "%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n", - "multiline_pattern": "^(%{TIMESTAMP_ISO8601:evtTime})", - "message_pattern": "(?m)^%{TIMESTAMP_ISO8601:evtTime},%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "evtTime": { - "map_date": { - "target_date_pattern": "yyyy-MM-dd'T'HH:mm:ss.SSSXX" - } - } - } - }, - { - "filter": "keyvalue", - "sort_order": 1, - "conditions": { - "fields": { - "type": [ - "ambari_audit" - ] - } - }, - "source_field": "log_message", - "field_split": ", ", - "value_borders": "()", - "post_map_values": { - "User": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "reqUser" - } - }, - "Hostname": { - "map_field_name": { - "new_field_name": "host" - } - }, - "Host name": { - "map_field_name": { - "new_field_name": "host" - } - }, - "RemoteIp": { - "map_field_name": { - "new_field_name": "cliIP" - } - }, - "RequestType": { - "map_field_name": { - "new_field_name": "cliType" - } - }, - "Operation": { - "map_field_name": { - "new_field_name": "action" - } - }, - "url": { - "map_field_name": { - "new_field_name": "resource" - } - }, - "ResourcePath": { - "map_field_name": { - "new_field_name": "resource" - } - }, - "Cluster name": { - "map_field_name": { - "new_field_name": "cluster" - } - }, - "Reason": { - "map_field_name": { - "new_field_name": "reason" - } - }, - "Base URL": { - "map_field_name": { - "new_field_name": "ws_base_url" - } - }, - "Command": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "ws_command" - } - }, - "Component": { - "map_field_name": { - "new_field_name": "ws_component" - } - }, - "Details": { - "map_field_name": { - "new_field_name": "ws_details" - } - }, - "Display name": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "ws_display_name" - } - }, - "OS": { - "map_field_name": { - "new_field_name": "ws_os" - } - }, - "Repo id": { - "map_field_name": { - "new_field_name": "ws_repo_id" - } - }, - "Repo version": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "ws_repo_version" - } - }, - "Repositories": { - "map_field_name": { - "new_field_name": "ws_repositories" - } - }, - "RequestId": { - "map_field_name": { - "new_field_name": "ws_request_id" - } - }, - "Roles": { - "map_field_name": { - "new_field_name": "ws_roles" - } - }, - "Stack": { - "map_field_name": { - "new_field_name": "ws_stack" - } - }, - "Stack version": { - "map_field_name": { - "new_field_name": "ws_stack_version" - } - }, - "TaskId": { - "map_field_name": { - "new_field_name": "ws_task_id" - } - }, - "VersionNote": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "ws_version_note" - } - }, - "VersionNumber": { - "map_field_value": { - "pre_value": "null", - "post_value": "unknown" - }, - "map_field_name": { - "new_field_name": "ws_version_number" - } - }, - "Status": [ - { - "map_field_copy": { - "copy_name": "ws_status" - } - }, - { - "map_field_value": { - "pre_value": "Success", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "Successfully queued", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "QUEUED", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "PENDING", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "COMPLETED", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "IN_PROGRESS", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "Failed", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "Failed to queue", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "HOLDING", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "HOLDING_FAILED", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "HOLDING_TIMEDOUT", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "FAILED", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "TIMEDOUT", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "ABORTED", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "SKIPPED_FAILED", - "post_value": "0" - } - }, - { - "map_field_name": { - "new_field_name": "result" - } - } - ], - "ResultStatus": [ - { - "map_field_copy": { - "copy_name": "ws_result_status" - } - }, - { - "map_field_value": { - "pre_value": "200 OK", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "201 Created", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "202 Accepted", - "post_value": "1" - } - }, - { - "map_field_value": { - "pre_value": "400 Bad Request", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "401 Unauthorized", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "403 Forbidden", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "404 Not Found", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "409 Resource Conflict", - "post_value": "0" - } - }, - { - "map_field_value": { - "pre_value": "500 Internal Server Error", - "post_value": "0" - } - }, - { - "map_field_name": { - "new_field_name": "result" - } - } - ] - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json deleted file mode 100644 index 871c778bd6f..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json +++ /dev/null @@ -1,172 +0,0 @@ -{ - "input": [ - { - "type": "hdfs_audit", - "rowtype": "audit", - "is_enabled": "true", - "add_fields": { - "logType": "HDFSAudit", - "enforcer": "hadoop-acl", - "repoType": "1", - "repo": "hdfs" - }, - "path": "/root/test-logs/hdfs-audit/hdfs-audit.log" - } - ], - "filter": [ - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "evtTime":{ - "map_date":{ - "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - - } - - } - - }, - { - "filter":"keyvalue", - "sort_order":1, - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "source_field":"log_message", - "value_split":"=", - "field_split":"\t", - "post_map_values":{ - "src":{ - "map_field_name":{ - "new_field_name":"resource" - } - - }, - "ip":{ - "map_field_name":{ - "new_field_name":"cliIP" - } - - }, - "allowed":[ - { - "map_field_value":{ - "pre_value":"true", - "post_value":"1" - } - - }, - { - "map_field_value":{ - "pre_value":"false", - "post_value":"0" - } - - }, - { - "map_field_name":{ - "new_field_name":"result" - } - - } - - ], - "cmd":{ - "map_field_name":{ - "new_field_name":"action" - } - - }, - "proto":{ - "map_field_name":{ - "new_field_name":"cliType" - } - - }, - "callerContext":{ - "map_field_name":{ - "new_field_name":"req_caller_id" - } - - } - - } - - }, - { - "filter":"grok", - "sort_order":2, - "source_field":"ugi", - "remove_source_field":"false", - "conditions":{ - "fields":{ - "type":[ - "hdfs_audit" - ] - - } - - }, - "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}", - "post_map_values":{ - "user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "x_user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "p_user":{ - "map_field_name":{ - "new_field_name":"reqUser" - } - - }, - "k_user":{ - "map_field_name":{ - "new_field_name":"proxyUsers" - } - - }, - "p_authType":{ - "map_field_name":{ - "new_field_name":"authType" - } - - }, - "k_authType":{ - "map_field_name":{ - "new_field_name":"proxyAuthType" - } - - } - - } - - } - ] -} \ No newline at end of file diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hst.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hst.json deleted file mode 100644 index b9c4252100a..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hst.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "input": [ - { - "type": "hst_agent", - "rowtype": "service", - "path": "/root/test-logs/smartsense/hst-agent-test-log.txt", - "group": "Smartsense" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "hst_agent" - ] - } - }, - "log4j_format": "", - "multiline_pattern": "^(%{CUSTOM_DATESTAMP:logtime})", - "message_pattern": "(?m)^%{CUSTOM_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern":"dd MMM yyyy HH:mm:ss" - } - }, - "level": { - "map_field_value": { - "pre_value": "WARNING", - "post_value": "WARN" - } - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch-docker.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch-docker.json deleted file mode 100644 index 04a3adcdee8..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch-docker.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "input": [ - { - "type": "logsearch_server", - "rowtype": "service", - "docker": "true", - "default_log_levels" : [ - "FATAL", "ERROR", "WARN", "INFO", "DEBUG" - ] - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "logsearch_server" - ] - } - }, - "log4j_format": "", - "multiline_pattern": "^(%{DATESTAMP:logtime})", - "message_pattern": "(?m)^%{DATESTAMP:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch.json deleted file mode 100644 index aae029246fb..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-logsearch.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "input": [ - { - "type": "logsearch_app", - "rowtype": "service", - "path": "/root/test-logs/logsearch/logsearch-test-log.json" - } - ], - "filter": [ - { - "filter": "json", - "conditions": { - "fields": { - "type": [ - "logsearch_app" - ] - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json deleted file mode 100644 index 99412e6b674..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "input": [ - { - "type": "secure_log", - "rowtype": "service", - "path": "/root/test-logs/secure_log/secure-log.txt", - "group": "System" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "secure_log" - ] - } - }, - "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})", - "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS", - "src_date_pattern" :"MMM dd HH:mm:ss" - } - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-storm.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-storm.json deleted file mode 100644 index 68e6fcf443d..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-storm.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "input":[ - { - "type":"storm_worker", - "rowtype":"service", - "path":"/root/test-logs/storm/worker-logs/*/*/worker.log", - "init_default_fields": "true" - } - ], - "filter":[ - { - "filter":"grok", - "sort_order": 1, - "conditions":{ - "fields":{ - "type":[ - "storm_worker" - ] - } - }, - "log4j_format":"", - "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}\\s%{GREEDYDATA:thread_name}\\s\\[%{LOGLEVEL:level}\\]\\s%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS" - } - } - } - }, - { - "filter":"grok", - "sort_order": 2, - "conditions":{ - "fields":{ - "type":[ - "storm_worker" - ] - } - }, - "source_field": "thread_name", - "remove_source_field": "false", - "message_pattern":"(Thread\\-[\\-0-9]+\\-*[\\-0-9]*\\-%{DATA:sdi_storm_component_name}\\-executor%{DATA}|%{DATA:thread_name})" - }, - { - "filter":"grok", - "sort_order": 3, - "conditions":{ - "fields":{ - "type":[ - "storm_worker" - ] - } - }, - "source_field": "path", - "remove_source_field": "false", - "message_pattern":"/root/test-logs/storm/worker-logs/%{DATA:sdi_storm_topology_id}/%{DATA:sdi_storm_worker_port}/worker\\.log" - }, - { - "filter":"grok", - "sort_order": 4, - "conditions":{ - "fields":{ - "type":[ - "storm_worker" - ] - } - }, - "source_field": "sdi_storm_topology_id", - "remove_source_field": "false", - "message_pattern":"(streamline\\-%{DATA:sdi_streamline_topology_id}\\-%{DATA:sdi_streamline_topology_name}\\-[0-9]+\\-[0-9]+)|(%{DATA:sdi_storm_topology_id})" - } - ] -} \ No newline at end of file diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json deleted file mode 100644 index 065ef8796ca..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "input": [ - { - "type": "system_message", - "rowtype": "service", - "path": "/root/test-logs/system_message/message_logs.txt", - "group": "System" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "system_message" - ] - } - }, - "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})", - "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS", - "src_date_pattern" :"MMM dd HH:mm:ss" - } - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-zookeeper.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-zookeeper.json deleted file mode 100644 index 65185a10ca5..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-zookeeper.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "input": [ - { - "type": "zookeeper", - "rowtype": "service", - "path": "/root/test-logs/zookeeper/zookeeper-test-log.txt", - "cache_enabled" : "true", - "cache_size" : "10", - "cache_dedup_interval" : "1000", - "group": "Zookeeper" - } - ], - "filter": [ - { - "filter": "grok", - "conditions": { - "fields": { - "type": [ - "zookeeper" - ] - } - }, - "log4j_format": "%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n", - "multiline_pattern": "^(%{TIMESTAMP_ISO8601:logtime})", - "message_pattern": "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values": { - "logtime": { - "map_date": { - "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/output.config.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/output.config.json deleted file mode 100644 index a85b4a4fe94..00000000000 --- a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/output.config.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "output": [ - { - "is_enabled": "true", - "comment": "Output to solr for service logs", - "collection" : "hadoop_logs", - "destination": "solr", - "zk_connect_string": "localhost:9983", - "type": "service", - "skip_logtime": "true", - "conditions": { - "fields": { - "rowtype": [ - "service" - ] - } - } - }, - { - "comment": "Output to solr for audit records", - "is_enabled": "true", - "collection" : "audit_logs", - "destination": "solr", - "zk_connect_string": "localhost:9983", - "type": "audit", - "skip_logtime": "true", - "conditions": { - "fields": { - "rowtype": [ - "audit" - ] - } - } - } - ] -} diff --git a/ambari-logsearch/docker/test-config/logsearch/log4j.xml b/ambari-logsearch/docker/test-config/logsearch/log4j.xml deleted file mode 100644 index f10522be447..00000000000 --- a/ambari-logsearch/docker/test-config/logsearch/log4j.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch-env.sh b/ambari-logsearch/docker/test-config/logsearch/logsearch-env.sh deleted file mode 100644 index 2a2efd8b8fd..00000000000 --- a/ambari-logsearch/docker/test-config/logsearch/logsearch-env.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -export LOGSEARCH_PATH=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package - -export LOGSEARCH_CONF_DIR=/root/config/logsearch - -export LOG_PATH=/var/log/ambari-logsearch-portal -export LOG_FILE=logsearch-app.log - -export LOGSEARCH_PID_FILE=/var/run/ambari-logsearch-portal/logsearch.pid - -export JAVA_HOME=/usr/java/default - -LOGSEARCH_JAVA_MEM=${LOGSEARCH_JAVA_MEM:-"-Xmx1024m"} - -export LOGSEARCH_DEBUG=true - -export LOGSEARCH_DEBUG_PORT=5005 - -export LOGSEARCH_SSL="true" -export LOGSEARCH_KEYSTORE_LOCATION=/etc/ambari-logsearch-portal/conf/keys/logsearch.jks -export LOGSEARCH_KEYSTORE_TYPE=jks -export LOGSEARCH_TRUSTSTORE_LOCATION=/etc/ambari-logsearch-portal/conf/keys/logsearch.jks -export LOGSEARCH_TRUSTSTORE_TYPE=jks diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch-https.properties b/ambari-logsearch/docker/test-config/logsearch/logsearch-https.properties deleted file mode 100644 index 0dc502aeac4..00000000000 --- a/ambari-logsearch/docker/test-config/logsearch/logsearch-https.properties +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logsearch.solr.zk_connect_string=localhost:9983 - -# Service Logs -logsearch.solr.collection.service.logs=hadoop_logs - -logsearch.service.logs.split.interval.mins=15 -logsearch.collection.service.logs.numshards=3 -logsearch.collection.service.logs.replication.factor=2 - -# Audit logs -logsearch.solr.audit.logs.zk_connect_string=localhost:9983 -logsearch.solr.collection.audit.logs=audit_logs -logsearch.solr.audit.logs.url= - -logsearch.audit.logs.split.interval.mins=15 -logsearch.collection.audit.logs.numshards=3 -logsearch.collection.audit.logs.replication.factor=2 - -# History logs -logsearch.solr.collection.history=history -logsearch.solr.history.config.name=history -logsearch.collection.history.replication.factor=1 - -logsearch.solr.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets -logsearch.solr.audit.logs.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets - -# Metrics -logsearch.solr.metrics.collector.hosts= -logsearch.solr.jmx.port=18886 - -# logsearch-admin.json -logsearch.auth.file.enabled=true -logsearch.login.credentials.file=user_pass.json - -logsearch.auth.ldap.enabled=false -logsearch.auth.simple.enabled=false -logsearch.auth.external_auth.enabled=false - -logsearch.https.port=61888 -logsearch.protocol=https - -logsearch.config.zk_connect_string=localhost:9983 diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch-sso.properties b/ambari-logsearch/docker/test-config/logsearch/logsearch-sso.properties deleted file mode 100644 index 12243acfe31..00000000000 --- a/ambari-logsearch/docker/test-config/logsearch/logsearch-sso.properties +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logsearch.solr.zk_connect_string=localhost:9983 - -# Service Logs -logsearch.solr.collection.service.logs=hadoop_logs - -logsearch.service.logs.split.interval.mins=15 -logsearch.collection.service.logs.numshards=3 -logsearch.collection.service.logs.replication.factor=2 - -# Audit logs -logsearch.solr.audit.logs.zk_connect_string=localhost:9983 -logsearch.solr.collection.audit.logs=audit_logs -logsearch.solr.audit.logs.url= - -logsearch.audit.logs.split.interval.mins=15 -logsearch.collection.audit.logs.numshards=3 -logsearch.collection.audit.logs.replication.factor=2 - -# History logs -logsearch.solr.collection.history=history -logsearch.solr.history.config.name=history -logsearch.collection.history.replication.factor=1 - -logsearch.solr.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets -logsearch.solr.audit.logs.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets - -# Metrics -logsearch.solr.metrics.collector.hosts= -logsearch.solr.jmx.port=18886 - -# logsearch-admin.json -logsearch.auth.file.enabled=true -logsearch.login.credentials.file=user_pass.json - -logsearch.auth.ldap.enabled=false -logsearch.auth.simple.enabled=false -logsearch.auth.external_auth.enabled=false - -logsearch.https.port=61888 -logsearch.protocol=http - -logsearch.config.zk_connect_string=localhost:9983 - -logsearch.auth.jwt.enabled=true -logsearch.auth.jwt.public_key=MIICOjCCAaOgAwIBAgIJAMY1lA6gY1V/MA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNVBAYTAlVTMQ0wCwYDVQQIEwRUZXN0MQ0wCwYDVQQHEwRUZXN0MQ8wDQYDVQQKEwZIYWRvb3AxDTALBgNVBAsTBFRlc3QxEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xODAyMDIxNTAwMTdaFw0xOTAyMDIxNTAwMTdaMF8xCzAJBgNVBAYTAlVTMQ0wCwYDVQQIEwRUZXN0MQ0wCwYDVQQHEwRUZXN0MQ8wDQYDVQQKEwZIYWRvb3AxDTALBgNVBAsTBFRlc3QxEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAq3Gu6ji2nrjCbc3r3ls7L8zG5IoZhDgrO108ke+82Zp9QU9SCdekon0YR9nGnXxJI/gynyPdmjFOmCgrMbwCN8LiIMvaeK7IRtmzh1zzFO4omOVYYYZIeSfEBU8qEw2I2ruHbzC9Qxjf3+ZV+9LWEso5xhBimvA+XUfNN8PB868CAwEAATANBgkqhkiG9w0BAQUFAAOBgQASoA1EgaTZEokjIbWeZABbRDrml4oxQ+mkzrn1DgIx1zbdPBTFbGGXZN81TZ7nq64UFoQLKQq7a7l20iHizTz7oTTM99+1uzHYn2TkdAHNpTWCux+5aQkpUCjZStTbp/S6AHgZKchcY7IUfGTiyeYHnJAPsCqVSEzXWymLKMPjTQ== -logsearch.auth.jwt.provider_url=https://localhost:8443/gateway/knoxsso/api/v1/websso -logsearch.auth.jwt.cookie.name=hadoop-jwt -logsearch.auth.jwt.query.param.original_url=originalUrl - -logsearch.auth.trusted.proxy=true -logsearch.auth.proxyuser.users=* diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties b/ambari-logsearch/docker/test-config/logsearch/logsearch.properties deleted file mode 100644 index 05da507c460..00000000000 --- a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -logsearch.solr.zk_connect_string=localhost:9983 - -# Service Logs -logsearch.solr.collection.service.logs=hadoop_logs - -#logsearch.config.api.filter.solr.enabled=true -#logsearch.config.api.enabled=false -#logsearch.config.api.filter.zk.enabled=true - -logsearch.service.logs.split.interval.mins=15 -logsearch.collection.service.logs.numshards=3 -logsearch.collection.service.logs.replication.factor=2 - -# Audit logs -logsearch.solr.audit.logs.zk_connect_string=localhost:9983 -logsearch.solr.collection.audit.logs=audit_logs -logsearch.solr.audit.logs.url= - -logsearch.audit.logs.split.interval.mins=15 -logsearch.collection.audit.logs.numshards=3 -logsearch.collection.audit.logs.replication.factor=2 - -logsearch.solr.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets -logsearch.solr.audit.logs.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets - -# History logs -logsearch.solr.collection.history=history -logsearch.solr.history.config.name=history -logsearch.collection.history.replication.factor=1 - -# Metrics -logsearch.solr.metrics.collector.hosts= -logsearch.solr.jmx.port=18886 - -# logsearch-admin.json -logsearch.auth.file.enabled=true -logsearch.login.credentials.file=user_pass.json - -logsearch.authr.file.enabled=true - -logsearch.auth.ldap.enabled=false -logsearch.auth.simple.enabled=false -logsearch.auth.external_auth.enabled=false - -logsearch.protocol=http - -logsearch.config.zk_connect_string=localhost:9983 diff --git a/ambari-logsearch/docker/test-config/solr/log4j.properties b/ambari-logsearch/docker/test-config/solr/log4j.properties deleted file mode 100644 index adb2d9ebc72..00000000000 --- a/ambari-logsearch/docker/test-config/solr/log4j.properties +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Logging level -solr.log=/var/log/ambari-logsearch-solr -log4j.rootLogger=INFO, file - -log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender - -log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout -log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x [%X{collection} %X{shard} %X{replica} %X{core}] \u2013 %m%n - -#- size rotation with log cleanup. -log4j.appender.file=org.apache.log4j.RollingFileAppender -log4j.appender.file.MaxFileSize=10MB -log4j.appender.file.MaxBackupIndex=9 - -#- File to log to and log format -log4j.appender.file.File=${solr.log}/solr.log -log4j.appender.file.layout=org.apache.log4j.PatternLayout -log4j.appender.file.layout.ConversionPattern=%d{ISO8601} [%t] %-5p [%X{collection} %X{shard} %X{replica} %X{core}] %C (%F:%L) - %m%n - -log4j.logger.org.apache.zookeeper=WARN -log4j.logger.org.apache.hadoop=WARN - -# set to INFO to enable infostream log messages -log4j.logger.org.apache.solr.update.LoggingInfoStream=OFF \ No newline at end of file diff --git a/ambari-logsearch/docker/test-config/solr/solr-env-ssl.sh b/ambari-logsearch/docker/test-config/solr/solr-env-ssl.sh deleted file mode 100644 index 6c78da0131c..00000000000 --- a/ambari-logsearch/docker/test-config/solr/solr-env-ssl.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# By default the script will use JAVA_HOME to determine which java -# to use, but you can set a specific path for Solr to use without -# affecting other Java applications on your server/workstation. -SOLR_JAVA_HOME=/usr/java/default - -# Increase Java Min/Max Heap as needed to support your indexing / query needs -SOLR_JAVA_MEM="-Xms1024m -Xmx2048m" - -# Enable verbose GC logging -GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \ --XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime" - -# These GC settings have shown to work well for a number of common Solr workloads -GC_TUNE="-XX:NewRatio=3 \ --XX:SurvivorRatio=4 \ --XX:TargetSurvivorRatio=90 \ --XX:MaxTenuringThreshold=8 \ --XX:+UseConcMarkSweepGC \ --XX:+UseParNewGC \ --XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \ --XX:+CMSScavengeBeforeRemark \ --XX:PretenureSizeThreshold=64m \ --XX:+UseCMSInitiatingOccupancyOnly \ --XX:CMSInitiatingOccupancyFraction=50 \ --XX:CMSMaxAbortablePrecleanTime=6000 \ --XX:+CMSParallelRemarkEnabled \ --XX:+ParallelRefProcEnabled" - -# Set the ZooKeeper connection string if using an external ZooKeeper ensemble -# e.g. host1:2181,host2:2181/chroot -# Leave empty if not using SolrCloud -#ZK_HOST="localhost:9983/ambari-solr" - -# Set the ZooKeeper client timeout (for SolrCloud mode) -ZK_CLIENT_TIMEOUT="60000" - -# By default the start script uses "localhost"; override the hostname here -# for production SolrCloud environments to control the hostname exposed to cluster state -#SOLR_HOST="192.168.1.1" - -# By default the start script uses UTC; override the timezone if needed -#SOLR_TIMEZONE="UTC" - -# Set to true to activate the JMX RMI connector to allow remote JMX client applications -# to monitor the JVM hosting Solr; set to "false" to disable that behavior -# (false is recommended in production environments) -ENABLE_REMOTE_JMX_OPTS="true" - -# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here -RMI_PORT=18886 - -# Anything you add to the SOLR_OPTS variable will be included in the java -# start command line as-is, in ADDITION to other options. If you specify the -# -a option on start script, those options will be appended as well. Examples: -#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000" -#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000" -#SOLR_OPTS="$SOLR_OPTS -Dsolr.clustering.enabled=true" -SOLR_OPTS="$SOLR_OPTS -Djava.rmi.server.hostname=localhost" - -# Location where the bin/solr script will save PID files for running instances -# If not set, the script will create PID files in $SOLR_TIP/bin -SOLR_PID_DIR=/var/run/ambari-logsearch-solr - -# Path to a directory where Solr creates index files, the specified directory -# must contain a solr.xml; by default, Solr will use server/solr -SOLR_HOME=/root/logsearch_solr_index/data - -# Solr provides a default Log4J configuration properties file in server/resources -# however, you may want to customize the log settings and file appender location -# so you can point the script to use a different log4j.properties file -LOG4J_PROPS=/root/config/solr/log4j.properties - -# Location where Solr should write logs to; should agree with the file appender -# settings in server/resources/log4j.properties -SOLR_LOGS_DIR=/var/log/ambari-logsearch-solr - -# Sets the port Solr binds to, default is 8983 -SOLR_PORT=8886 - -SOLR_SSL_KEY_STORE=/root/config/ssl/logsearch.keyStore.jks -SOLR_SSL_KEY_STORE_PASSWORD=bigdata -SOLR_SSL_TRUST_STORE=/root/config/ssl/logsearch.trustStore.jks -SOLR_SSL_TRUST_STORE_PASSWORD=bigdata -SOLR_SSL_NEED_CLIENT_AUTH=false -SOLR_SSL_WANT_CLIENT_AUTH=false - diff --git a/ambari-logsearch/docker/test-config/solr/solr-env.sh b/ambari-logsearch/docker/test-config/solr/solr-env.sh deleted file mode 100644 index 89e3a4d3f4d..00000000000 --- a/ambari-logsearch/docker/test-config/solr/solr-env.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# By default the script will use JAVA_HOME to determine which java -# to use, but you can set a specific path for Solr to use without -# affecting other Java applications on your server/workstation. -SOLR_JAVA_HOME=/usr/java/default - -# Increase Java Min/Max Heap as needed to support your indexing / query needs -SOLR_JAVA_MEM="-Xms1024m -Xmx2048m" - -# Enable verbose GC logging -GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \ --XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime" - -# These GC settings have shown to work well for a number of common Solr workloads -GC_TUNE="-XX:NewRatio=3 \ --XX:SurvivorRatio=4 \ --XX:TargetSurvivorRatio=90 \ --XX:MaxTenuringThreshold=8 \ --XX:+UseConcMarkSweepGC \ --XX:+UseParNewGC \ --XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \ --XX:+CMSScavengeBeforeRemark \ --XX:PretenureSizeThreshold=64m \ --XX:+UseCMSInitiatingOccupancyOnly \ --XX:CMSInitiatingOccupancyFraction=50 \ --XX:CMSMaxAbortablePrecleanTime=6000 \ --XX:+CMSParallelRemarkEnabled \ --XX:+ParallelRefProcEnabled" - -# Set the ZooKeeper connection string if using an external ZooKeeper ensemble -# e.g. host1:2181,host2:2181/chroot -# Leave empty if not using SolrCloud -#ZK_HOST="localhost:9983/ambari-solr" - -# Set the ZooKeeper client timeout (for SolrCloud mode) -ZK_CLIENT_TIMEOUT="60000" - -# By default the start script uses "localhost"; override the hostname here -# for production SolrCloud environments to control the hostname exposed to cluster state -#SOLR_HOST="192.168.1.1" - -# By default the start script uses UTC; override the timezone if needed -#SOLR_TIMEZONE="UTC" - -# Set to true to activate the JMX RMI connector to allow remote JMX client applications -# to monitor the JVM hosting Solr; set to "false" to disable that behavior -# (false is recommended in production environments) -ENABLE_REMOTE_JMX_OPTS="true" - -# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here -RMI_PORT=18886 - -# Anything you add to the SOLR_OPTS variable will be included in the java -# start command line as-is, in ADDITION to other options. If you specify the -# -a option on start script, those options will be appended as well. Examples: -#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000" -#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000" -#SOLR_OPTS="$SOLR_OPTS -Dsolr.clustering.enabled=true" -SOLR_OPTS="$SOLR_OPTS -Djava.rmi.server.hostname=localhost" - -# Location where the bin/solr script will save PID files for running instances -# If not set, the script will create PID files in $SOLR_TIP/bin -SOLR_PID_DIR=/var/run/ambari-logsearch-solr - -# Path to a directory where Solr creates index files, the specified directory -# must contain a solr.xml; by default, Solr will use server/solr -SOLR_HOME=/root/logsearch_solr_index/data - -# Solr provides a default Log4J configuration properties file in server/resources -# however, you may want to customize the log settings and file appender location -# so you can point the script to use a different log4j.properties file -LOG4J_PROPS=/root/config/solr/log4j.properties - -# Location where Solr should write logs to; should agree with the file appender -# settings in server/resources/log4j.properties -SOLR_LOGS_DIR=/var/log/ambari-logsearch-solr - -# Sets the port Solr binds to, default is 8983 -SOLR_PORT=8886 diff --git a/ambari-logsearch/docker/test-config/solr/solr.xml b/ambari-logsearch/docker/test-config/solr/solr.xml deleted file mode 100644 index 45f9c137632..00000000000 --- a/ambari-logsearch/docker/test-config/solr/solr.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - ${host:} - ${jetty.port:} - ${hostContext:solr} - ${zkClientTimeout:15000} - ${genericCoreNodeNames:true} - - \ No newline at end of file diff --git a/ambari-logsearch/docker/test-config/solr/zoo.cfg b/ambari-logsearch/docker/test-config/solr/zoo.cfg deleted file mode 100644 index 48665ce791a..00000000000 --- a/ambari-logsearch/docker/test-config/solr/zoo.cfg +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -clientPort=9983 -initLimit=10 -autopurge.purgeInterval=24 -syncLimit=5 -tickTime=2000 -dataDir=/hadoop/zookeeper -autopurge.snapRetainCount=30 -server.1=localhost:2888:3888 \ No newline at end of file diff --git a/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log b/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log deleted file mode 100644 index 90e24e3228a..00000000000 --- a/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log +++ /dev/null @@ -1,390 +0,0 @@ -2016-10-03T16:26:13.333Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles( - Ambari: Ambari Administrator -), Status(Success) -2016-10-03T16:26:54.834Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat6/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat6), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6) -2016-10-03T16:26:54.845Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat7/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat7), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.0.0/) -2016-10-03T16:26:54.847Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat6/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat6), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0/) -2016-10-03T16:26:54.857Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/debian7/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(debian7), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/debian7) -2016-10-03T16:26:54.857Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat7/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat7), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7) -2016-10-03T16:26:54.860Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/debian7/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(debian7), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/debian7/2.x/updates/2.5.0.0) -2016-10-03T16:26:54.935Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse12/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse12), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/sles12) -2016-10-03T16:26:54.943Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse11/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse11), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/suse11sp3/2.x/updates/2.5.0.0) -2016-10-03T16:26:54.945Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu12/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu12), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/ubuntu12/2.x/updates/2.5.0.0) -2016-10-03T16:26:54.951Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse12/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse12), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/sles12/2.x/updates/2.5.0.0) -2016-10-03T16:26:54.954Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse11/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse11), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/suse11sp3) -2016-10-03T16:26:54.959Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu12/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu12), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu12) -2016-10-03T16:26:54.997Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu14/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu14), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/ubuntu14/2.x/updates/2.5.0.0) -2016-10-03T16:26:55.003Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu14/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu14), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu14) -2016-10-03T16:26:58.669Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:27:20.063Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:27:24.185Z, User(admin), RemoteIp(192.168.64.1), Operation(Request from server), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/requests), ResultStatus(202 Accepted), Command(null), Cluster name(null) -2016-10-03T16:27:24.206Z, User(admin), Operation(Check host), Status(IN_PROGRESS), RequestId(1) -2016-10-03T16:27:24.207Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(2), Hostname(c6401.ambari.apache.org) -2016-10-03T16:27:24.211Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(3), Hostname(c6402.ambari.apache.org) -2016-10-03T16:27:24.213Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(4), Hostname(c6403.ambari.apache.org) -2016-10-03T16:27:24.214Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(5), Hostname(c6404.ambari.apache.org) -2016-10-03T16:27:25.836Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(2), Hostname(c6401.ambari.apache.org) -2016-10-03T16:27:26.824Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(3), Hostname(c6402.ambari.apache.org) -2016-10-03T16:27:27.830Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(5), Hostname(c6404.ambari.apache.org) -2016-10-03T16:27:28.831Z, User(admin), Operation(Check host), Status(COMPLETED), RequestId(1) -2016-10-03T16:27:28.833Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(4), Hostname(c6403.ambari.apache.org) -2016-10-03T16:27:29.428Z, User(admin), RemoteIp(192.168.64.1), Operation(Request from server), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/requests), ResultStatus(202 Accepted), Command(null), Cluster name(null) -2016-10-03T16:27:29.440Z, User(admin), Operation(Check host), Status(IN_PROGRESS), RequestId(2) -2016-10-03T16:27:29.443Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(6), Hostname(c6401.ambari.apache.org) -2016-10-03T16:27:29.443Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(7), Hostname(c6402.ambari.apache.org) -2016-10-03T16:27:29.444Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(8), Hostname(c6403.ambari.apache.org) -2016-10-03T16:27:29.444Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(9), Hostname(c6404.ambari.apache.org) -2016-10-03T16:27:38.816Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(7), Hostname(c6402.ambari.apache.org) -2016-10-03T16:27:39.818Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(9), Hostname(c6404.ambari.apache.org) -2016-10-03T16:27:40.822Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(8), Hostname(c6403.ambari.apache.org) -2016-10-03T16:27:42.817Z, User(admin), Operation(Check host), Status(COMPLETED), RequestId(2) -2016-10-03T16:27:42.818Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(6), Hostname(c6401.ambari.apache.org) -2016-10-03T16:28:19.410Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:28:49.829Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:28:56.413Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:29:02.762Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:29:20.084Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:29:30.390Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions), ResultStatus(201 Created) -2016-10-03T16:29:30.430Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository version change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/repository_versions/1), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), Display name(null), Repo version(null), Repositories( -Operating system: debian7 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/debian7/2.x/updates/2.5.0.0) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/debian7) -Operating system: redhat6 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0/) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6) -Operating system: redhat7 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.0.0/) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7) -Operating system: suse11 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/suse11sp3/2.x/updates/2.5.0.0) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/suse11sp3) -Operating system: suse12 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/sles12/2.x/updates/2.5.0.0) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/sles12) -Operating system: ubuntu12 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/ubuntu12/2.x/updates/2.5.0.0) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu12) -Operating system: ubuntu14 - Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/ubuntu14/2.x/updates/2.5.0.0) - Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu14) -) -2016-10-03T16:29:30.578Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(201 Created), VersionNumber(Vnull), VersionNote(null) -2016-10-03T16:29:30.828Z, User(admin), RemoteIp(192.168.64.1), Operation(null), RequestId(null), Status(Successfully queued) -2016-10-03T16:29:32.696Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(V1), VersionNote(Initial configurations for HDFS) -2016-10-03T16:29:32.890Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=HDFS), ResultStatus(201 Created) -2016-10-03T16:29:32.996Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=ZOOKEEPER), ResultStatus(201 Created) -2016-10-03T16:29:33.044Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=AMBARI_INFRA), ResultStatus(201 Created) -2016-10-03T16:29:33.099Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=AMBARI_METRICS), ResultStatus(201 Created) -2016-10-03T16:29:33.239Z, User(admin), RemoteIp(192.168.64.1), Operation(Host addition), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(c6402.ambari.apache.org) -2016-10-03T16:29:33.359Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(NAMENODE) -2016-10-03T16:29:33.428Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(SECONDARY_NAMENODE) -2016-10-03T16:29:33.516Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(ZOOKEEPER_SERVER) -2016-10-03T16:29:33.562Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(INFRA_SOLR) -2016-10-03T16:29:33.629Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_GRAFANA) -2016-10-03T16:29:33.672Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_COLLECTOR) -2016-10-03T16:29:33.780Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(DATANODE) -2016-10-03T16:29:33.851Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(HDFS_CLIENT) -2016-10-03T16:29:33.926Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(ZOOKEEPER_CLIENT) -2016-10-03T16:29:33.988Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(INFRA_SOLR_CLIENT) -2016-10-03T16:29:34.107Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_MONITOR) -2016-10-03T16:29:34.814Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services (myCluster)), RequestId(3), Status(Successfully queued) -2016-10-03T16:29:35.035Z, User(admin), Operation(Install Services), Status(IN_PROGRESS), RequestId(3) -2016-10-03T16:29:35.035Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(10), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(3), TaskId(11), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(QUEUED), RequestId(3), TaskId(12), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(3), TaskId(13), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.037Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(QUEUED), RequestId(3), TaskId(14), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(15), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL NAMENODE), Status(QUEUED), RequestId(3), TaskId(16), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(3), TaskId(17), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.045Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(18), Hostname(c6401.ambari.apache.org) -2016-10-03T16:29:35.046Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(19), Hostname(c6402.ambari.apache.org) -2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(20), Hostname(c6402.ambari.apache.org) -2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL SECONDARY_NAMENODE), Status(QUEUED), RequestId(3), TaskId(21), Hostname(c6402.ambari.apache.org) -2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(22), Hostname(c6402.ambari.apache.org) -2016-10-03T16:29:35.048Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(23), Hostname(c6403.ambari.apache.org) -2016-10-03T16:29:35.051Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(QUEUED), RequestId(3), TaskId(24), Hostname(c6403.ambari.apache.org) -2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(25), Hostname(c6403.ambari.apache.org) -2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(26), Hostname(c6403.ambari.apache.org) -2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(27), Hostname(c6404.ambari.apache.org) -2016-10-03T16:29:35.053Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(3), TaskId(28), Hostname(c6404.ambari.apache.org) -2016-10-03T16:29:35.053Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(3), TaskId(29), Hostname(c6404.ambari.apache.org) -2016-10-03T16:29:35.057Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(30), Hostname(c6404.ambari.apache.org) -2016-10-03T16:29:35.059Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(3), TaskId(31), Hostname(c6404.ambari.apache.org) -2016-10-03T16:29:35.629Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T16:53:55.860Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(3), TaskId(10), Hostname(c6401.ambari.apache.org) -2016-10-03T16:53:56.837Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(3), TaskId(11), Hostname(c6401.ambari.apache.org) -2016-10-03T16:58:35.834Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(3), TaskId(19), Hostname(c6402.ambari.apache.org) -2016-10-03T17:00:08.822Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(3), TaskId(20), Hostname(c6402.ambari.apache.org) -2016-10-03T17:00:09.827Z, User(admin), Operation(INSTALL SECONDARY_NAMENODE), Status(COMPLETED), RequestId(3), TaskId(21), Hostname(c6402.ambari.apache.org) -2016-10-03T17:00:10.831Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(3), TaskId(22), Hostname(c6402.ambari.apache.org) -2016-10-03T17:00:15.818Z, User(admin), Operation(Install Services), Status(FAILED), RequestId(3) -2016-10-03T17:00:15.818Z, User(admin), Operation(INSTALL DATANODE), Status(FAILED), RequestId(3), TaskId(23), Hostname(c6403.ambari.apache.org) -2016-10-03T17:00:16.820Z, User(admin), Operation(INSTALL DATANODE), Status(FAILED), RequestId(3), TaskId(27), Hostname(c6404.ambari.apache.org) -2016-10-03T17:00:23.828Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(3), TaskId(28), Hostname(c6404.ambari.apache.org) -2016-10-03T17:00:54.818Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(3), TaskId(29), Hostname(c6404.ambari.apache.org) -2016-10-03T17:01:39.826Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(3), TaskId(30), Hostname(c6404.ambari.apache.org) -2016-10-03T17:01:41.825Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(3), TaskId(31), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:31.822Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(FAILED), RequestId(3), TaskId(12), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:31.839Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(FAILED), RequestId(3), TaskId(24), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:32.252Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(ABORTED), RequestId(3), TaskId(13), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(ABORTED), RequestId(3), TaskId(14), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(ABORTED), RequestId(3), TaskId(15), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL NAMENODE), Status(ABORTED), RequestId(3), TaskId(16), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(ABORTED), RequestId(3), TaskId(17), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(ABORTED), RequestId(3), TaskId(18), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(ABORTED), RequestId(3), TaskId(25), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(ABORTED), RequestId(3), TaskId(26), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:48.818Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services on all hosts (myCluster)), RequestId(4), Status(Successfully queued) -2016-10-03T17:02:48.875Z, User(admin), Operation(Install Components), Status(IN_PROGRESS), RequestId(4) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(4), TaskId(32), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(QUEUED), RequestId(4), TaskId(33), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(4), TaskId(34), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(QUEUED), RequestId(4), TaskId(35), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(4), TaskId(36), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL NAMENODE), Status(QUEUED), RequestId(4), TaskId(37), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(4), TaskId(38), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(4), TaskId(39), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(4), TaskId(40), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(QUEUED), RequestId(4), TaskId(41), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(4), TaskId(42), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(4), TaskId(43), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(4), TaskId(44), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(4), TaskId(45), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(4), TaskId(46), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(4), TaskId(47), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:50.836Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(4), TaskId(44), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:51.825Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(4), TaskId(32), Hostname(c6401.ambari.apache.org) -2016-10-03T17:02:51.836Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(4), TaskId(45), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:52.821Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(4), TaskId(46), Hostname(c6404.ambari.apache.org) -2016-10-03T17:02:52.855Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(4), TaskId(40), Hostname(c6403.ambari.apache.org) -2016-10-03T17:02:53.817Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(4), TaskId(47), Hostname(c6404.ambari.apache.org) -2016-10-03T17:03:38.840Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(COMPLETED), RequestId(4), TaskId(33), Hostname(c6401.ambari.apache.org) -2016-10-03T17:03:39.821Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(4), TaskId(34), Hostname(c6401.ambari.apache.org) -2016-10-03T17:04:32.819Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(COMPLETED), RequestId(4), TaskId(35), Hostname(c6401.ambari.apache.org) -2016-10-03T17:04:34.827Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(4), TaskId(36), Hostname(c6401.ambari.apache.org) -2016-10-03T17:04:35.841Z, User(admin), Operation(INSTALL NAMENODE), Status(COMPLETED), RequestId(4), TaskId(37), Hostname(c6401.ambari.apache.org) -2016-10-03T17:04:37.835Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(4), TaskId(38), Hostname(c6401.ambari.apache.org) -2016-10-03T17:04:37.836Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(4), TaskId(39), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:09.821Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(COMPLETED), RequestId(4), TaskId(41), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:09.844Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(4), TaskId(42), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:11.839Z, User(admin), Operation(Install Components), Status(COMPLETED), RequestId(4) -2016-10-03T17:07:11.839Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(4), TaskId(43), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:14.709Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: all services (myCluster)), RequestId(5), Status(Successfully queued) -2016-10-03T17:07:14.751Z, User(admin), Operation(Start Services), Status(IN_PROGRESS), RequestId(5) -2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(48), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(49), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(50), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(51), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(52), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(53), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(54), Hostname(c6404.ambari.apache.org) -2016-10-03T17:07:18.817Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(52), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:18.823Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(50), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:18.830Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(54), Hostname(c6404.ambari.apache.org) -2016-10-03T17:07:19.900Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(48), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:19.918Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(53), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:19.935Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(51), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:21.820Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(49), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:22.836Z, User(admin), Operation(START INFRA_SOLR), Status(QUEUED), RequestId(5), TaskId(55), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:22.836Z, User(admin), Operation(SERVICE_CHECK ZOOKEEPER_QUORUM_SERVICE_CHECK), Details(SERVICE_CHECK ZOOKEEPER), Status(QUEUED), RequestId(5), TaskId(56), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:37.819Z, User(admin), Operation(START INFRA_SOLR), Status(COMPLETED), RequestId(5), TaskId(55), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:46.825Z, User(admin), Operation(SERVICE_CHECK ZOOKEEPER_QUORUM_SERVICE_CHECK), Details(SERVICE_CHECK ZOOKEEPER), Status(COMPLETED), RequestId(5), TaskId(56), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:47.041Z, User(admin), Operation(SERVICE_CHECK AMBARI_INFRA_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_INFRA), Status(QUEUED), RequestId(5), TaskId(57), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(58), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:47.041Z, User(admin), Operation(START NAMENODE), Status(QUEUED), RequestId(5), TaskId(59), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(60), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(61), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:47.042Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(62), Hostname(c6404.ambari.apache.org) -2016-10-03T17:07:48.823Z, User(admin), Operation(SERVICE_CHECK AMBARI_INFRA_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_INFRA), Status(COMPLETED), RequestId(5), TaskId(57), Hostname(c6401.ambari.apache.org) -2016-10-03T17:07:52.828Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(61), Hostname(c6403.ambari.apache.org) -2016-10-03T17:07:52.844Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(60), Hostname(c6402.ambari.apache.org) -2016-10-03T17:07:53.820Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(62), Hostname(c6404.ambari.apache.org) -2016-10-03T17:07:53.833Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(58), Hostname(c6401.ambari.apache.org) -2016-10-03T17:08:13.818Z, User(admin), Operation(START NAMENODE), Status(COMPLETED), RequestId(5), TaskId(59), Hostname(c6401.ambari.apache.org) -2016-10-03T17:08:14.280Z, User(admin), Operation(START SECONDARY_NAMENODE), Status(QUEUED), RequestId(5), TaskId(63), Hostname(c6402.ambari.apache.org) -2016-10-03T17:08:20.821Z, User(admin), Operation(START SECONDARY_NAMENODE), Status(COMPLETED), RequestId(5), TaskId(63), Hostname(c6402.ambari.apache.org) -2016-10-03T17:08:21.333Z, User(admin), Operation(SERVICE_CHECK HDFS_SERVICE_CHECK), Details(SERVICE_CHECK HDFS), Status(QUEUED), RequestId(5), TaskId(64), Hostname(c6401.ambari.apache.org) -2016-10-03T17:08:21.334Z, User(admin), Operation(START METRICS_COLLECTOR), Status(QUEUED), RequestId(5), TaskId(65), Hostname(c6403.ambari.apache.org) -2016-10-03T17:08:29.815Z, User(admin), Operation(SERVICE_CHECK HDFS_SERVICE_CHECK), Details(SERVICE_CHECK HDFS), Status(COMPLETED), RequestId(5), TaskId(64), Hostname(c6401.ambari.apache.org) -2016-10-03T17:09:17.819Z, User(admin), Operation(START METRICS_COLLECTOR), Status(COMPLETED), RequestId(5), TaskId(65), Hostname(c6403.ambari.apache.org) -2016-10-03T17:09:18.621Z, User(admin), Operation(SERVICE_CHECK AMBARI_METRICS_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_METRICS), Status(QUEUED), RequestId(5), TaskId(66), Hostname(c6401.ambari.apache.org) -2016-10-03T17:09:18.621Z, User(admin), Operation(START METRICS_GRAFANA), Status(QUEUED), RequestId(5), TaskId(67), Hostname(c6401.ambari.apache.org) -2016-10-03T17:09:39.816Z, User(admin), Operation(SERVICE_CHECK AMBARI_METRICS_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_METRICS), Status(COMPLETED), RequestId(5), TaskId(66), Hostname(c6401.ambari.apache.org) -2016-10-03T17:09:43.823Z, User(admin), Operation(Start Services), Status(COMPLETED), RequestId(5) -2016-10-03T17:09:43.824Z, User(admin), Operation(START METRICS_GRAFANA), Status(COMPLETED), RequestId(5), TaskId(67), Hostname(c6401.ambari.apache.org) -2016-10-03T17:14:46.087Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created) -2016-10-03T17:14:56.802Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(Vnull), VersionNote(null) -2016-10-04T05:55:42.000Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.020Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.024Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.027Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.029Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.033Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.045Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.047Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.049Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.051Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.054Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.057Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.061Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.064Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.068Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.070Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.072Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.074Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.078Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.080Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.081Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.083Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.085Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.087Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.088Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.090Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.091Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.092Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.100Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.100Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.102Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.103Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.104Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.105Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.107Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.108Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.109Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.110Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:42.130Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T05:55:44.919Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:02:57.511Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:02:59.309Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles( - Ambari: Ambari Administrator -), Status(Success) -2016-10-04T08:05:40.063Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/widget_layouts), ResultStatus(201 Created) -2016-10-04T08:05:40.085Z, User(admin), RemoteIp(192.168.64.1), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/users/admin/activeWidgetLayouts/), ResultStatus(200 OK) -2016-10-04T08:11:45.150Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: METRICS_COLLECTOR/AMBARI_METRICS on c6403.ambari.apache.org (myCluster)), Host name(c6403.ambari.apache.org), RequestId(6), Status(Successfully queued) -2016-10-04T08:11:45.221Z, User(admin), Operation(Start Metrics Collector), Status(IN_PROGRESS), RequestId(6) -2016-10-04T08:11:45.222Z, User(admin), Operation(START METRICS_COLLECTOR), Status(QUEUED), RequestId(6), TaskId(102), Hostname(c6403.ambari.apache.org) -2016-10-04T08:15:30.413Z, User(admin), Operation(Start Metrics Collector), Status(COMPLETED), RequestId(6) -2016-10-04T08:15:30.414Z, User(admin), Operation(START METRICS_COLLECTOR), Status(COMPLETED), RequestId(6), TaskId(102), Hostname(c6403.ambari.apache.org) -2016-10-04T08:17:44.550Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.552Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.554Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.556Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.557Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.559Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.561Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.564Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:44.595Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:45.370Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-04T08:17:45.495Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T12:06:48.400Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T12:13:51.541Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles( - Ambari: Ambari Administrator -), Status(Success) -2016-10-05T12:14:27.945Z, User(admin), RemoteIp(192.168.64.1), Operation(null), RequestId(null), Status(Successfully queued) -2016-10-05T12:14:28.414Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(V1), VersionNote(Initial configurations for Log Search) -2016-10-05T12:14:28.562Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=LOGSEARCH), ResultStatus(201 Created) -2016-10-05T12:14:28.630Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(LOGSEARCH_SERVER) -2016-10-05T12:14:28.744Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(LOGSEARCH_LOGFEEDER) -2016-10-05T12:14:29.066Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services (myCluster)), RequestId(7), Status(Successfully queued) -2016-10-05T12:14:29.138Z, User(admin), Operation(Install Services), Status(IN_PROGRESS), RequestId(7) -2016-10-05T12:14:29.146Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(152), Hostname(c6401.ambari.apache.org) -2016-10-05T12:14:29.147Z, User(admin), Operation(INSTALL LOGSEARCH_SERVER), Status(QUEUED), RequestId(7), TaskId(153), Hostname(c6401.ambari.apache.org) -2016-10-05T12:14:29.147Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(154), Hostname(c6402.ambari.apache.org) -2016-10-05T12:14:29.148Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(155), Hostname(c6403.ambari.apache.org) -2016-10-05T12:14:29.151Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(156), Hostname(c6404.ambari.apache.org) -2016-10-05T12:14:32.964Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(152), Hostname(c6401.ambari.apache.org) -2016-10-05T12:14:34.925Z, User(admin), Operation(INSTALL LOGSEARCH_SERVER), Status(COMPLETED), RequestId(7), TaskId(153), Hostname(c6401.ambari.apache.org) -2016-10-05T12:14:52.942Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(155), Hostname(c6403.ambari.apache.org) -2016-10-05T12:14:52.965Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(156), Hostname(c6404.ambari.apache.org) -2016-10-05T12:15:06.922Z, User(admin), Operation(Install Services), Status(COMPLETED), RequestId(7) -2016-10-05T12:15:06.928Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(154), Hostname(c6402.ambari.apache.org) -2016-10-05T12:15:11.172Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: all services (myCluster)), RequestId(8), Status(Successfully queued) -2016-10-05T12:15:11.180Z, User(admin), Operation(Start Added Services), Status(IN_PROGRESS), RequestId(8) -2016-10-05T12:15:11.180Z, User(admin), Operation(START LOGSEARCH_SERVER), Status(QUEUED), RequestId(8), TaskId(157), Hostname(c6401.ambari.apache.org) -2016-10-05T12:15:20.953Z, User(admin), Operation(START LOGSEARCH_SERVER), Status(COMPLETED), RequestId(8), TaskId(157), Hostname(c6401.ambari.apache.org) -2016-10-05T12:15:21.328Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(158), Hostname(c6401.ambari.apache.org) -2016-10-05T12:15:21.328Z, User(admin), Operation(SERVICE_CHECK LOGSEARCH_SERVICE_CHECK), Details(SERVICE_CHECK LOGSEARCH), Status(QUEUED), RequestId(8), TaskId(159), Hostname(c6401.ambari.apache.org) -2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(160), Hostname(c6402.ambari.apache.org) -2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(161), Hostname(c6403.ambari.apache.org) -2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(162), Hostname(c6404.ambari.apache.org) -2016-10-05T12:15:22.968Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(160), Hostname(c6402.ambari.apache.org) -2016-10-05T12:15:23.025Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(162), Hostname(c6404.ambari.apache.org) -2016-10-05T12:15:26.941Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(158), Hostname(c6401.ambari.apache.org) -2016-10-05T12:15:48.950Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(161), Hostname(c6403.ambari.apache.org) -2016-10-05T12:15:55.920Z, User(admin), Operation(Start Added Services), Status(COMPLETED), RequestId(8) -2016-10-05T12:15:55.920Z, User(admin), Operation(SERVICE_CHECK LOGSEARCH_SERVICE_CHECK), Details(SERVICE_CHECK LOGSEARCH), Status(COMPLETED), RequestId(8), TaskId(159), Hostname(c6401.ambari.apache.org) -2016-10-05T13:09:33.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T13:09:33.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T13:09:33.124Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T13:09:34.863Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-05T13:09:55.468Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles( - Ambari: Ambari Administrator -), Status(Success) -2016-10-06T07:44:56.131Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-06T07:44:56.219Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-06T07:44:56.326Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) -2016-10-06T07:45:01.686Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles( -), Status(Failed), Reason(Authentication required) diff --git a/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log b/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log deleted file mode 100644 index a646cc410c0..00000000000 --- a/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log +++ /dev/null @@ -1,4 +0,0 @@ -2016-03-18 10:00:47,252 INFO FSNamesystem.audit: allowed=true ugi=ambari-qa (auth:SIMPLE) ip=/192.168.64.102 cmd=getfileinfo src=/ats/active dst=null perm=null proto=rpc callerContext=HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f -2016-03-18 10:00:48,939 INFO FSNamesystem.audit: allowed=true ugi=ambari-qa (auth:SIMPLE) ip=/192.168.64.102 cmd=delete src=/tmp/hive/ambari-qa/resource1 dst=null perm=null proto=rpc -2016-03-18 10:00:49,242 INFO FSNamesystem.audit: allowed=true ugi=ambari-qa (auth:SIMPLE) ip=/192.168.64.102 cmd=getfileinfo src=/tmp/hive/ambari-qa/resource2 dst=null perm=null proto=rpc -2016-03-18 10:00:49,277 INFO FSNamesystem.audit: allowed=true ugi=ambari-qa (auth:SIMPLE) ip=/192.168.64.102 cmd=getfileinfo src=/tmp/hive/ambari-qa/resource2 dst=null perm=null proto=rpc diff --git a/ambari-logsearch/docker/test-logs/logsearch/logsearch-test-log.json b/ambari-logsearch/docker/test-logs/logsearch/logsearch-test-log.json deleted file mode 100644 index 708d383991d..00000000000 --- a/ambari-logsearch/docker/test-logs/logsearch/logsearch-test-log.json +++ /dev/null @@ -1 +0,0 @@ -{"level":"WARN","file":"ClientCnxn.java","thread_name":"zkCallback-6-thread-10-SendThread(c6402.ambari.apache.org:2181)","line_number":1102,"log_message":"Session 0x355e0023b38001d for server null, unexpected error, closing socket connection and attempting reconnect\njava.net.SocketException: Network is unreachable\n\tat sun.nio.ch.Net.connect0(Native Method)\n\tat sun.nio.ch.Net.connect(Net.java:454)\n\tat sun.nio.ch.Net.connect(Net.java:446)\n\tat sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:648)\n\tat org.apache.zookeeper.ClientCnxnSocketNIO.registerAndConnect(ClientCnxnSocketNIO.java:277)\n\tat org.apache.zookeeper.ClientCnxnSocketNIO.connect(ClientCnxnSocketNIO.java:287)\n\tat org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:967)\n\tat org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1003)\n","logger_name":"org.apache.zookeeper.ClientCnxn","logtime":"1468406756757"} \ No newline at end of file diff --git a/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt b/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt deleted file mode 100644 index de7fee6d262..00000000000 --- a/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt +++ /dev/null @@ -1,11 +0,0 @@ -Aug 9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0) -Aug 9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa -Aug 9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0) -Aug 9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa -Aug 9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0) -Aug 9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa -Aug 9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0) -Aug 9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa -Aug 9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session opened for user yarn by (uid=0) -Aug 9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session closed for user yarn -Aug 9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session opened for user yarn by (uid=0) diff --git a/ambari-logsearch/docker/test-logs/smartsense/hst-agent-test-log.txt b/ambari-logsearch/docker/test-logs/smartsense/hst-agent-test-log.txt deleted file mode 100644 index 8efbe2ecdc3..00000000000 --- a/ambari-logsearch/docker/test-logs/smartsense/hst-agent-test-log.txt +++ /dev/null @@ -1,4 +0,0 @@ -18 Jul 2016 17:06:46 DEBUG [MainThread] lock.py:97 - Got the handle of lock file. -18 Jul 2016 17:06:46 INFO [MainThread] security.py:175 - Server certificate not exists, downloading -18 Jul 2016 17:06:46 INFO [MainThread] security.py:188 - Downloading server cert from https://myurl:9440/cert/ca/ -18 Jul 2016 17:06:46 WARNING [MainThread] lock.py:60 - Releasing the lock. \ No newline at end of file diff --git a/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6700/worker.log b/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6700/worker.log deleted file mode 100644 index b6a59ecd9b8..00000000000 --- a/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6700/worker.log +++ /dev/null @@ -1,5 +0,0 @@ -2017-10-23 13:41:43.481 o.a.s.d.executor Thread-11-__acker-executor[7 8] [INFO] Preparing bolt __acker:(1) -2017-10-23 13:41:43.483 o.a.s.d.executor Thread-11-__acker-executor[7 8] [WARN] Prepared bolt __acker:(2) -2017-10-23 13:41:48.834 c.h.s.s.n.EmailNotifier Thread-7-8-NOTIFICATION-executor[3 3] [ERROR] Got exception while initializing transport -2017-10-23 13:41:58.242 o.a.s.d.executor main [INFO] Loading executor 3-NOTIFICATION:[9 1] -2017-10-23 13:41:59.242 o.a.s.d.executor Thread-11-__acker-executor[7 8] [WARN] Prepared bolt __acker:(3) diff --git a/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6701/worker.log b/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6701/worker.log deleted file mode 100644 index 5f2d20e16b3..00000000000 --- a/ambari-logsearch/docker/test-logs/storm/worker-logs/streamline-1-TestAgg-2-3/6701/worker.log +++ /dev/null @@ -1,5 +0,0 @@ -2017-10-23 13:41:43.481 o.a.s.d.executor Thread-11-__acker-executor[5 5] [INFO] Preparing bolt __acker:(4) -2017-10-23 13:41:43.483 o.a.s.d.executor Thread-11-__acker-executor[5 5] [WARN] Prepared bolt __acker:(5) -2017-10-23 13:41:48.834 c.h.s.s.n.EmailNotifier Thread-5-3-NOTIFICATION-executor[3 3] [ERROR] Got exception while initializing transport -2017-10-23 13:41:58.242 o.a.s.d.executor main [INFO] Loading executor 3-NOTIFICATION:[3 1] -2017-10-23 13:41:59.242 o.a.s.d.executor Thread-11-__acker-executor[5 5] [WARN] Prepared bolt __acker:(6) diff --git a/ambari-logsearch/docker/test-logs/system_message/message_logs.txt b/ambari-logsearch/docker/test-logs/system_message/message_logs.txt deleted file mode 100644 index a898622761f..00000000000 --- a/ambari-logsearch/docker/test-logs/system_message/message_logs.txt +++ /dev/null @@ -1,17 +0,0 @@ -Aug 21 03:09:02 logsearch.apache.org rsyslogd: [origin software="rsyslogd" swVersion="5.8.10" x-pid="955" x-info="http://www.rsyslog.com"] rsyslogd was HUPed -Aug 21 10:53:02 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad) -Aug 21 10:53:02 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad) -Aug 21 10:53:03 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 37432 seconds. -Aug 21 21:16:55 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad) -Aug 21 21:16:55 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad) -Aug 21 21:16:56 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 35453 seconds. -Aug 22 06:39:48 logsearch.apache.org rsyslogd-2177: imuxsock lost 369 messages from pid 5254 due to rate-limiting -Aug 22 07:07:49 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad) -Aug 22 07:07:49 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad) -Aug 22 07:07:50 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 36355 seconds. -Aug 22 17:13:45 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad) -Aug 22 17:13:45 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad) -Aug 22 17:13:46 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 37482 seconds. -Aug 23 03:38:28 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad) -Aug 23 03:38:28 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad) -Aug 23 03:38:29 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 31187 seconds. diff --git a/ambari-logsearch/docker/test-logs/zookeeper/zookeeper-test-log.txt b/ambari-logsearch/docker/test-logs/zookeeper/zookeeper-test-log.txt deleted file mode 100644 index 904c420cc8a..00000000000 --- a/ambari-logsearch/docker/test-logs/zookeeper/zookeeper-test-log.txt +++ /dev/null @@ -1,3 +0,0 @@ -2016-07-13 10:45:49,640 - WARN [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:2181:NIOServerCnxn@362] - Exception causing close of session 0x0 due to java.io.IOException: ZooKeeperServer not running -2016-07-13 10:45:49,640 - INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:2181:NIOServerCnxn@1007] - Closed socket connection for client /192.168.64.101:39626 (no session established for client) -2016-07-13 10:45:50,351 - INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:2181:NIOServerCnxnFactory@197] - Accepted socket connection from /192.168.64.101:39632 \ No newline at end of file diff --git a/ambari-logsearch/docker/zookeeper.yml b/ambari-logsearch/docker/zookeeper.yml deleted file mode 100644 index d616c926af4..00000000000 --- a/ambari-logsearch/docker/zookeeper.yml +++ /dev/null @@ -1,52 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License -version: '3.3' -services: - zoo1: - image: zookeeper:3.4.10 - restart: always - hostname: zoo1 - networks: - - logsearch-network - ports: - - 2181:2181 - environment: - ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=zoo1:2888:3888 server.2=zoo2:2888:3888 server.3=zoo3:2888:3888 - zoo2: - image: zookeeper:3.4.10 - restart: always - hostname: zoo2 - networks: - - logsearch-network - ports: - - 2182:2181 - environment: - ZOO_MY_ID: 2 - ZOO_SERVERS: server.1=zoo1:2888:3888 server.2=zoo2:2888:3888 server.3=zoo3:2888:3888 - zoo3: - image: zookeeper:3.4.10 - restart: always - hostname: zoo3 - networks: - - logsearch-network - ports: - - 2183:2181 - environment: - ZOO_MY_ID: 3 - ZOO_SERVERS: server.1=zoo1:2888:3888 server.2=zoo2:2888:3888 server.3=zoo3:2888:3888 -networks: - logsearch-network: - driver: bridge \ No newline at end of file diff --git a/ambari-logsearch/pom.xml b/ambari-logsearch/pom.xml deleted file mode 100644 index 3fc59744252..00000000000 --- a/ambari-logsearch/pom.xml +++ /dev/null @@ -1,361 +0,0 @@ - - - - org.apache.ambari - 4.0.0 - ambari-logsearch - 2.0.0.0-SNAPSHOT - pom - - http://maven.apache.org - - ambari-logsearch-assembly - ambari-logsearch-appender - ambari-logsearch-server - ambari-logsearch-web - ambari-logsearch-logfeeder - ambari-logsearch-config-api - ambari-logsearch-config-json - ambari-logsearch-config-zookeeper - ambari-logsearch-it - ambari-logsearch-logfeeder-plugin-api - ambari-logsearch-logfeeder-container-registry - ambari-logsearch-config-local - ambari-logsearch-config-solr - - - 1.8 - UTF-8 - python >= 2.6 - python (>= 2.6) - amd64 - ${deb.python.ver} - 7.4.0 - 3.0.0 - 2.5 - 3.4.6.2.3.0.0-2557 - 4 - false - -Xmx1024m -Xms512m - false - - - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - Apache Software Foundation - http://www.apache.org - - - jira - https://issues.apache.org/jira/browse/AMBARI - - - - - apache-hadoop - hdp - http://repo.hortonworks.com/content/groups/public/ - - - oss.sonatype.org - OSS Sonatype Staging - https://oss.sonatype.org/content/groups/staging - - - spring-milestones - Spring Milestones - http://repo.spring.io/milestone - - false - - - - ASF Staging - https://repository.apache.org/content/groups/staging/ - - - ASF Snapshots - https://repository.apache.org/content/repositories/snapshots/ - - true - - - false - - - - - - - - org.apache.maven.plugins - maven-enforcer-plugin - 1.4.1 - - - org.codehaus.mojo - properties-maven-plugin - 1.0.0 - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - - - org.apache.maven.plugins - maven-failsafe-plugin - 2.20 - - - - - - org.codehaus.mojo - rpm-maven-plugin - 2.1.4 - - - - none - - attached-rpm - - - - - 2012, Apache Software Foundation - Development - Maven Recipe: RPM Package. - ${package-release} - ${package-version} - - - - - org.vafer - jdeb - 1.4 - - - - stub-execution - none - - jdeb - - - - - true - false - false - ${project.basedir}/../src/main/package/deb/control - - - - false - maven-assembly-plugin - - - ../ambari-project/src/main/assemblies/empty.xml - - - - - build-tarball - none - - single - - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - ${skipSurefireTests} - - - - maven-compiler-plugin - 3.2 - - ${jdk.version} - ${jdk.version} - - - - org.apache.maven.plugins - maven-clean-plugin - 2.5 - - - - ${basedir} - false - - **/*.pyc - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.8 - - - parse-version - validate - - parse-version - - - - regex-property - - regex-property - - - ambariVersion - ${project.version} - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).* - $1.$2.$3.$4 - false - - - - parse-package-version - - regex-property - - - package-version - ${project.version} - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).* - $1.$2.$3.$4 - true - - - - parse-package-release - - regex-property - - - package-release - ${project.version} - - ^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-)((([a-zA-Z]+)?([0-9]+))|(SNAPSHOT)).* - - $6 - true - - - - - - org.apache.rat - apache-rat-plugin - 0.12 - - - **/README.md - **/*.json - **/*.log - **/*.txt - **/*.story - **/*.editorconfig - **/*.iml - **/src/vendor/** - **/yarn.lock - **/docker/Profile - **/docker/.env - **/docker/knox/** - **/node_modules/** - **/dist/** - - - - - test - - check - - - - - - - - - - junit - junit - 4.10 - - - commons-fileupload - commons-fileupload - 1.3.3 - - - com.fasterxml.jackson.core - jackson-databind - 2.9.4 - - - com.fasterxml.jackson.core - jackson-annotations - 2.9.4 - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - 2.9.4 - - - com.fasterxml.woodstox - woodstox-core - - - - - io.netty - netty - 3.10.5.Final - - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - - diff --git a/pom.xml b/pom.xml index 61300d63a40..e648c571fcd 100644 --- a/pom.xml +++ b/pom.xml @@ -442,8 +442,6 @@ ambari-server ambari-funtest ambari-agent - ambari-logsearch - ambari-infra @@ -476,8 +474,6 @@ ambari-server ambari-funtest ambari-agent - ambari-logsearch - ambari-infra ambari-serviceadvisor @@ -497,8 +493,6 @@ ambari-server ambari-funtest ambari-agent - ambari-logsearch - ambari-infra ambari-serviceadvisor
    ").addClass("cw").text("#"));c.isBefore(f.clone().endOf("w"));)b.append(a("").addClass("dow").text(c.format("dd"))),c.add(1,"d");o.find(".datepicker-days thead").append(b)},N=function(a){return d.disabledDates[a.format("YYYY-MM-DD")]===!0},O=function(a){return d.enabledDates[a.format("YYYY-MM-DD")]===!0},P=function(a){return d.disabledHours[a.format("H")]===!0},Q=function(a){return d.enabledHours[a.format("H")]===!0},R=function(b,c){if(!b.isValid())return!1;if(d.disabledDates&&"d"===c&&N(b))return!1;if(d.enabledDates&&"d"===c&&!O(b))return!1;if(d.minDate&&b.isBefore(d.minDate,c))return!1;if(d.maxDate&&b.isAfter(d.maxDate,c))return!1;if(d.daysOfWeekDisabled&&"d"===c&&d.daysOfWeekDisabled.indexOf(b.day())!==-1)return!1;if(d.disabledHours&&("h"===c||"m"===c||"s"===c)&&P(b))return!1;if(d.enabledHours&&("h"===c||"m"===c||"s"===c)&&!Q(b))return!1;if(d.disabledTimeIntervals&&("h"===c||"m"===c||"s"===c)){var e=!1;if(a.each(d.disabledTimeIntervals,function(){if(b.isBetween(this[0],this[1]))return e=!0,!1}),e)return!1}return!0},S=function(){for(var b=[],c=f.clone().startOf("y").startOf("d");c.isSame(f,"y");)b.push(a("").attr("data-action","selectMonth").addClass("month").text(c.format("MMM"))),c.add(1,"M");o.find(".datepicker-months td").empty().append(b)},T=function(){var b=o.find(".datepicker-months"),c=b.find("th"),g=b.find("tbody").find("span");c.eq(0).find("span").attr("title",d.tooltips.prevYear),c.eq(1).attr("title",d.tooltips.selectYear),c.eq(2).find("span").attr("title",d.tooltips.nextYear),b.find(".disabled").removeClass("disabled"),R(f.clone().subtract(1,"y"),"y")||c.eq(0).addClass("disabled"),c.eq(1).text(f.year()),R(f.clone().add(1,"y"),"y")||c.eq(2).addClass("disabled"),g.removeClass("active"),e.isSame(f,"y")&&!m&&g.eq(e.month()).addClass("active"),g.each(function(b){R(f.clone().month(b),"M")||a(this).addClass("disabled")})},U=function(){var a=o.find(".datepicker-years"),b=a.find("th"),c=f.clone().subtract(5,"y"),g=f.clone().add(6,"y"),h="";for(b.eq(0).find("span").attr("title",d.tooltips.prevDecade),b.eq(1).attr("title",d.tooltips.selectDecade),b.eq(2).find("span").attr("title",d.tooltips.nextDecade),a.find(".disabled").removeClass("disabled"),d.minDate&&d.minDate.isAfter(c,"y")&&b.eq(0).addClass("disabled"),b.eq(1).text(c.year()+"-"+g.year()),d.maxDate&&d.maxDate.isBefore(g,"y")&&b.eq(2).addClass("disabled");!c.isAfter(g,"y");)h+=''+c.year()+"",c.add(1,"y");a.find("td").html(h)},V=function(){var a,c=o.find(".datepicker-decades"),g=c.find("th"),h=b({y:f.year()-f.year()%100-1}),i=h.clone().add(100,"y"),j=h.clone(),k=!1,l=!1,m="";for(g.eq(0).find("span").attr("title",d.tooltips.prevCentury),g.eq(2).find("span").attr("title",d.tooltips.nextCentury),c.find(".disabled").removeClass("disabled"),(h.isSame(b({y:1900}))||d.minDate&&d.minDate.isAfter(h,"y"))&&g.eq(0).addClass("disabled"),g.eq(1).text(h.year()+"-"+i.year()),(h.isSame(b({y:2e3}))||d.maxDate&&d.maxDate.isBefore(i,"y"))&&g.eq(2).addClass("disabled");!h.isAfter(i,"y");)a=h.year()+12,k=d.minDate&&d.minDate.isAfter(h,"y")&&d.minDate.year()<=a,l=d.maxDate&&d.maxDate.isAfter(h,"y")&&d.maxDate.year()<=a,m+=''+(h.year()+1)+" - "+(h.year()+12)+"",h.add(12,"y");m+="",c.find("td").html(m),g.eq(1).text(j.year()+1+"-"+h.year())},W=function(){var b,c,g,h=o.find(".datepicker-days"),i=h.find("th"),j=[],k=[];if(B()){for(i.eq(0).find("span").attr("title",d.tooltips.prevMonth),i.eq(1).attr("title",d.tooltips.selectMonth),i.eq(2).find("span").attr("title",d.tooltips.nextMonth),h.find(".disabled").removeClass("disabled"),i.eq(1).text(f.format(d.dayViewHeaderFormat)),R(f.clone().subtract(1,"M"),"M")||i.eq(0).addClass("disabled"),R(f.clone().add(1,"M"),"M")||i.eq(2).addClass("disabled"),b=f.clone().startOf("M").startOf("w").startOf("d"),g=0;g<42;g++)0===b.weekday()&&(c=a("
    '+b.week()+"'+b.date()+"
    '+c.format(h?"HH":"hh")+"
    '+c.format("mm")+"
    '+c.format("ss")+"