Skip to content

Commit

Permalink
Merge pull request #10 from apache/kylin3
Browse files Browse the repository at this point in the history
Kylin3
  • Loading branch information
Sidonet committed Aug 20, 2021
2 parents 8fd393d + fef7803 commit b9b2f4f
Show file tree
Hide file tree
Showing 126 changed files with 2,586 additions and 157 deletions.
48 changes: 32 additions & 16 deletions .travis.yml
Expand Up @@ -15,9 +15,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
language: java
---
language: generic

addons:
apt:
packages:
- openjdk-8-jdk
- unzip
sonarcloud:
organization: "kylin"
token:
Expand All @@ -27,27 +32,37 @@ cache:
directories:
- $HOME/.m2

jdk:
- openjdk8

before_script:
- echo "Downloading Maven 3.5.3"
&& wget https://archive.apache.org/dist/maven/maven-3/3.5.3/binaries/apache-maven-3.5.3-bin.zip
&& unzip -qq apache-maven-3.5.3-bin.zip
&& export M2_HOME=$PWD/apache-maven-3.5.3
&& export PATH=$M2_HOME/bin:$PATH
&& mvn -version
jobs:
include:
- name: Linux ARM64
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
- name: Linux x86
arch: amd64

before_install:
- export JAVA_HOME="/usr/lib/jvm/java-8-openjdk-${TRAVIS_CPU_ARCH}";
- export PATH="$JAVA_HOME/bin:$PATH";
- echo "Downloading Maven 3.8.1"
- wget --quiet https://archive.apache.org/dist/maven/maven-3/3.8.1/binaries/apache-maven-3.8.1-bin.zip
- unzip -qq -o apache-maven-3.8.1-bin.zip
- export M2_HOME=$PWD/apache-maven-3.8.1
- export PATH=$M2_HOME/bin:$PATH
- mvn -version
- echo "MAVEN_OPTS='-Xms1024m -Xmx3072m -XX:MetaspaceSize=128m -XX:MaxMetaspaceSize=384m'" > ~/.mavenrc
- sed -i 's/log4j.logger.org.apache.kylin=INFO/log4j.logger.org.apache.kylin=WARN/g' build/conf/kylin-server-log4j.properties
- sed -i 's/log4j.logger.org.apache.kylin=INFO/log4j.logger.org.apache.kylin=WARN/g' build/conf/kylin-tools-log4j.properties

script:
- mvn clean org.jacoco:jacoco-maven-plugin:prepare-agent test coveralls:report -e
- if [[ -n "${TRAVIS_PULL_REQUEST_SLUG}" && "${TRAVIS_PULL_REQUEST_SLUG}" != "${TRAVIS_REPO_SLUG}" ]]; then
- mvn --no-transfer-progress clean org.jacoco:jacoco-maven-plugin:prepare-agent test coveralls:report -e
- if [[ -n "${TRAVIS_PULL_REQUEST_SLUG}" && "${TRAVIS_PULL_REQUEST_SLUG}" != "${TRAVIS_REPO_SLUG}" ]];
then
echo "The pull request from ${TRAVIS_PULL_REQUEST_SLUG} is an EXTERNAL pull request. Skip sonar analysis.";
else
git fetch --unshallow --quiet;
mvn sonar:sonar -e -Dsonar.host.url=https://sonarcloud.io -Dsonar.login=2ca24e5a04ad1fca4ca956953810421aa8bd1470 -Dsonar.organization=kylin;
echo "Skip sonar temporarily because sonarcloud requires jdk11. To re-enable see Git history!";
fi

notification:
Expand All @@ -58,12 +73,13 @@ notification:

after_success:
- bash <(curl -s https://codecov.io/bash)

before_cache:
# Remove project's local artifacts to force maven reactor resolve
- rm -rf $HOME/.m2/repository/org/apache/kylin

# blocklist
branches:
except:
- document
- document

2 changes: 1 addition & 1 deletion assembly/pom.xml
Expand Up @@ -26,7 +26,7 @@
<parent>
<artifactId>kylin</artifactId>
<groupId>org.apache.kylin</groupId>
<version>3.1.2-SNAPSHOT</version>
<version>3.1.3-SNAPSHOT</version>
</parent>

<properties>
Expand Down
9 changes: 5 additions & 4 deletions build/bin/download-flink.sh
Expand Up @@ -37,9 +37,10 @@ fi

flink_version="1.11.1"
scala_version="2.11"
flink_shaded_hadoop_version="3.1.1.7.1.1.0-565-9.0"
flink_shaded_version="10.0"
hadoop_version="2.7.5"
flink_pkg_md5="3b7aa59b44add1a0625737f6516e0929"
flink_shaded_hadoop_md5="7b78e546dd93f4facd322921f29de1eb"
flink_shaded_hadoop_md5="4287a314bfb09a3dc957cbda3f91d7ca"

if [ ! -f "flink-${flink_version}-bin-scala_${scala_version}.tgz" ]; then
echo "No binary file found, start to download package to ${flink_package_dir}"
Expand All @@ -52,8 +53,8 @@ else
fi
fi

flink_shaded_hadoop_jar="flink-shaded-hadoop-3-uber-${flink_shaded_hadoop_version}.jar"
flink_shaded_hadoop_path="https://repository.cloudera.com/artifactory/libs-release-local/org/apache/flink/flink-shaded-hadoop-3-uber/${flink_shaded_hadoop_version}/${flink_shaded_hadoop_jar}"
flink_shaded_hadoop_jar="flink-shaded-hadoop-2-uber-${hadoop_version}-${flink_shaded_version}.jar"
flink_shaded_hadoop_path="https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/${hadoop_version}-${flink_shaded_version}/${flink_shaded_hadoop_jar}"

if [ ! -f $flink_shaded_hadoop_jar ]; then
echo "Start to download $flink_shaded_hadoop_jar"
Expand Down
6 changes: 3 additions & 3 deletions build/bin/kylin.sh
Expand Up @@ -288,7 +288,7 @@ then

# KYLIN_EXTRA_START_OPTS is for customized settings, checkout bin/setenv.sh
${JAVA_HOME}/bin/java -cp $STREAM_CLASSPATH ${KYLIN_EXTRA_START_OPTS} \
-Dlog4j.configuration=stream-receiver-log4j.properties\
-Dlog4j.configuration=file:${KYLIN_HOME}/conf/stream-receiver-log4j.properties\
-DKYLIN_HOME=${KYLIN_HOME}\
-Dkylin.hbase.dependency=${hbase_dependency} \
org.apache.kylin.stream.server.StreamingReceiver $@ > ${KYLIN_HOME}/logs/streaming_receiver.out 2>&1 & echo $! > ${KYLIN_HOME}/streaming_receiver_pid &
Expand Down Expand Up @@ -318,7 +318,7 @@ then

# KYLIN_EXTRA_START_OPTS is for customized settings, checkout bin/setenv.sh
${JAVA_HOME}/bin/java -cp $STREAM_CLASSPATH ${KYLIN_EXTRA_START_OPTS} \
-Dlog4j.configuration=stream-receiver-log4j.properties\
-Dlog4j.configuration=file:${KYLIN_HOME}/conf/stream-receiver-log4j.properties\
-DKYLIN_HOME=${KYLIN_HOME}\
-Dkylin.hbase.dependency=${hbase_dependency} \
org.apache.kylin.stream.server.StreamingReceiver $@
Expand Down Expand Up @@ -385,7 +385,7 @@ then
shift
# KYLIN_EXTRA_START_OPTS is for customized settings, checkout bin/setenv.sh
${JAVA_HOME}/bin/java -cp $STREAM_CLASSPATH ${KYLIN_EXTRA_START_OPTS} \
-Dlog4j.configuration=stream-receiver-log4j.properties\
-Dlog4j.configuration=file:${KYLIN_HOME}/conf/stream-receiver-log4j.properties\
-DKYLIN_HOME=${KYLIN_HOME}\
-Dkylin.hbase.dependency=${hbase_dependency} \
"$@"
Expand Down
18 changes: 18 additions & 0 deletions build/bin/set-kylin-home.sh
@@ -1,3 +1,21 @@
#!/bin/bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
if [ -z $KYLIN_HOME ];
then
export KYLIN_HOME=`cd -P -- "$(dirname -- "$0")" && dirname $(pwd)`
Expand Down
44 changes: 44 additions & 0 deletions build/conf/stream-receiver-log4j.properties
@@ -0,0 +1,44 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#


#define appenders
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.File=${KYLIN_HOME}/logs/kylin_streaming_receiver.log
log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}:%L : %m%n
log4j.appender.file.Append=true


log4j.appender.statechange=org.apache.log4j.DailyRollingFileAppender
log4j.appender.statechange.layout=org.apache.log4j.PatternLayout
log4j.appender.statechange.File=${KYLIN_HOME}/logs/kylin_streaming_statechange.log
log4j.appender.statechange.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}:%L : %m%n
log4j.appender.statechange.Append=true

#overall config
log4j.rootLogger=INFO,file
log4j.logger.org.apache.kylin=DEBUG
log4j.logger.io.ebay.rheos.kafka.security.iaf=DEBUG
log4j.logger.org.springframework=WARN
log4j.logger.org.springframework.security=INFO

#statechange config
log4j.logger.org.apache.kylin.stream.server.StreamingServer=DEBUG, statechange
log4j.logger.org.apache.kylin.stream.server.ReplicaSetLeaderSelector=DEBUG, statechange
log4j.logger.org.apache.kylin.stream.server.rest.controller.AdminController=DEBUG, statechange
log4j.logger.org.apache.kylin.stream.core.storage.StreamingSegmentManager=DEBUG, statechange
1 change: 1 addition & 0 deletions build/script/build.sh
Expand Up @@ -31,4 +31,5 @@ npm install -g bower || { exit 1; }
bower --allow-root install || { exit 1; }
npm install || { exit 1; }
npm install -g grunt-cli || { exit 1; }
PHANTOMJS_CDNURL=https://npm.taobao.org/mirrors/phantomjs npm install phantomjs-prebuilt || { exit 1; }
grunt dev --buildEnv=dev --buildNumber=`date "+%Y%m%d%H%M%S"` || { exit 1; }
2 changes: 1 addition & 1 deletion cache/pom.xml
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>org.apache.kylin</groupId>
<artifactId>kylin</artifactId>
<version>3.1.2-SNAPSHOT</version>
<version>3.1.3-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
2 changes: 1 addition & 1 deletion core-common/pom.xml
Expand Up @@ -28,7 +28,7 @@
<parent>
<groupId>org.apache.kylin</groupId>
<artifactId>kylin</artifactId>
<version>3.1.2-SNAPSHOT</version>
<version>3.1.3-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
Expand Up @@ -1731,6 +1731,14 @@ public boolean isSparCreateHiveTableViaSparkEnable() {
return Boolean.parseBoolean(getOptional("kylin.engine.spark-create-table-enabled", FALSE));
}

public boolean isSparkOptimizeCubeViaSparkEnable() {
return Boolean.parseBoolean(getOptional("kylin.engine.spark-optimize-cube-enabled", TRUE));
}

public boolean isUseSparkCalculateStatsEnable() {
return Boolean.parseBoolean(getOptional("kylin.engine.spark-calculate-stats-enabled", TRUE));
}

public boolean isFlinkSanityCheckEnabled() {
return Boolean.parseBoolean(getOptional("kylin.engine.flink.sanity-check-enabled", FALSE));
}
Expand Down
Expand Up @@ -393,6 +393,10 @@ public void checkCompatibility(String jsonRequest, boolean ifHiveCheck) throws I
checkCompatibility(jsonRequest, baseUrl + "/cubes/checkCompatibility");
}

public void checkStreamTableCompatibility(String jsonRequest) throws IOException {
checkCompatibility(jsonRequest, baseUrl+"/cubes/checkStreamTableCompatibility");
}

private void checkCompatibility(String jsonRequest, String url) throws IOException {
HttpPost post = newPost(url);
try {
Expand Down
Expand Up @@ -166,7 +166,7 @@ private Pair<Integer, String> runNativeCommand(String command, Logger logAppende
public static final String COMMAND_BLOCK_LIST = "[ &`>|{}()$;\\-#~!+*\\\\]+";
public static final String COMMAND_WHITE_LIST = "[^\\w%,@/:=?.\"\\[\\]]";
public static final String HIVE_BLOCK_LIST = "[ <>()$;\\-#!+*\"'/=%@]+";

public static final String HOST_NAME_WHITE_LIST = "[^-.a-zA-Z0-9]";

/**
* <pre>
Expand Down Expand Up @@ -201,6 +201,14 @@ public static String checkHiveProperty(String hiveProperty) {
return checkParameter(hiveProperty, HIVE_BLOCK_LIST);
}

public static void checkHostName(String nodeName) {
String repaired = nodeName.replaceAll(HOST_NAME_WHITE_LIST, "");
if (repaired.length() != nodeName.length()) {
throw new IllegalArgumentException("Detected illegal character in host name " + nodeName + " by "
+ HOST_NAME_WHITE_LIST + ", operation not allowed.");
}
}

private static String checkParameter(String commandParameter, String rex) {
String repaired = commandParameter.replaceAll(rex, "");
if (repaired.length() != commandParameter.length()) {
Expand Down
Expand Up @@ -46,8 +46,7 @@ public boolean canServeJobBuild() {
}

public boolean canServeStreamingCoordinator() {
return serverModes.contains(SERVER_MODE_ALL)
|| serverModes.contains(SERVER_MODE_STREAM_COORDINATOR);
return serverModes.contains(SERVER_MODE_STREAM_COORDINATOR);
}

public boolean canServeAll() {
Expand Down
2 changes: 1 addition & 1 deletion core-cube/pom.xml
Expand Up @@ -28,7 +28,7 @@
<parent>
<groupId>org.apache.kylin</groupId>
<artifactId>kylin</artifactId>
<version>3.1.2-SNAPSHOT</version>
<version>3.1.3-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
Expand Up @@ -773,4 +773,8 @@ public static CubeSegment findSegmentWithJobId(String jobID, CubeInstance cubeIn
throw new IllegalStateException("No segment's last build job ID equals " + jobID);
}

public boolean isStreamLambdaCube() {
return getModel().getRootFactTable().getTableDesc().isLambdaTable();
}

}
Expand Up @@ -18,6 +18,7 @@

package org.apache.kylin.cube.cuboid;

import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import java.util.Set;
Expand All @@ -32,7 +33,7 @@
/**
* Defines a cuboid tree, rooted by the base cuboid. A parent cuboid generates its child cuboids.
*/
abstract public class CuboidScheduler {
abstract public class CuboidScheduler implements Serializable {

public static CuboidScheduler getInstance(CubeDesc cubeDesc) {
String clzName = cubeDesc.getConfig().getCuboidScheduler();
Expand Down
Expand Up @@ -32,6 +32,7 @@

public class CuboidUtil {

// get the i cuboid the j '1' `s index
public static Integer[][] getCuboidBitSet(Long[] cuboidIds, int nRowKey) {
Preconditions.checkArgument(nRowKey < Long.SIZE,
"the size of row key could not be large than " + (Long.SIZE - 1));
Expand Down
Expand Up @@ -19,6 +19,7 @@
package org.apache.kylin.cube.cuboid;

import java.io.PrintWriter;
import java.io.Serializable;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
Expand Down Expand Up @@ -71,7 +72,7 @@ public boolean isValid(long requestCuboid) {
return cuboidTree.isValid(requestCuboid);
}

public static class CuboidTree {
public static class CuboidTree implements Serializable {
private int treeLevels;

private TreeNode root;
Expand Down Expand Up @@ -232,7 +233,7 @@ private boolean canDerive(long cuboidId, long parentCuboid) {
}
}

public static class TreeNode {
public static class TreeNode implements Serializable {
@JsonProperty("cuboid_id")
long cuboidId;
@JsonIgnore
Expand Down Expand Up @@ -290,7 +291,7 @@ public boolean equals(Object obj) {
/**
* Compare cuboid according to the cuboid data row count
*/
public static class CuboidCostComparator implements Comparator<Long> {
public static class CuboidCostComparator implements Comparator<Long>, Serializable {
private Map<Long, Long> cuboidStatistics;

public CuboidCostComparator(Map<Long, Long> cuboidStatistics) {
Expand Down
Expand Up @@ -19,6 +19,7 @@
package org.apache.kylin.cube.kv;

import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

Expand All @@ -34,7 +35,7 @@
* @author xjiang
*
*/
public class RowKeyDecoder {
public class RowKeyDecoder implements Serializable {

private final CubeDesc cubeDesc;
private final RowKeyColumnIO colIO;
Expand Down
2 changes: 1 addition & 1 deletion core-dictionary/pom.xml
Expand Up @@ -28,7 +28,7 @@
<parent>
<groupId>org.apache.kylin</groupId>
<artifactId>kylin</artifactId>
<version>3.1.2-SNAPSHOT</version>
<version>3.1.3-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down

0 comments on commit b9b2f4f

Please sign in to comment.