Skip to content
Permalink
Browse files
rebuild ci to compatible apache (#184)
* rebuild ci to compatible apache
  • Loading branch information
coderzc committed May 19, 2022
1 parent 50b6147 commit 0d2e6dfd4d5138760cc9f77e2683ea93af238a56
Showing 5 changed files with 82 additions and 28 deletions.
@@ -17,7 +17,17 @@ on:
jobs:
computer-ci:
runs-on: ubuntu-20.04
env:
TRAVIS_DIR: computer-dist/src/assembly/travis
KUBERNETES_VERSION: 1.20.1
HUGEGRAPH_SERVER_COMMIT_ID: 848b8fb17804ddc561af832dff0bdc3f6221c904

steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 2

- name: Install JDK 8
uses: actions/setup-java@v2
with:
@@ -28,44 +38,29 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Setup Hdfs
uses: beyondstorage/setup-hdfs@master
with:
hdfs-version: '3.3.1'
- name: Setup HDFS
run: $TRAVIS_DIR/install-hdfs.sh

- name: Setup Minikube-Kubernetes
uses: manusa/actions-setup-minikube@v2.4.2
with:
minikube version: v1.21.0
kubernetes version: v1.20.1
run: $TRAVIS_DIR/install-k8s.sh

- name: Check Component
run: |
sleep 5
curl ${{ env.HDFS_NAMENODE_ADDR }}
curl localhost:9000
kubectl get nodes
- name: Cache Maven packages
uses: actions/cache@v2
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2

- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 2

- name: Prepare env and service
env:
TRAVIS_DIR: computer-dist/src/assembly/travis
COMMIT_ID: 848b8fb17804ddc561af832dff0bdc3f6221c904
run: |
$TRAVIS_DIR/install-env.sh
$TRAVIS_DIR/install-hugegraph-from-source.sh $COMMIT_ID | grep -v "Downloading\|Downloaded"
$TRAVIS_DIR/install-hugegraph-from-source.sh $HUGEGRAPH_SERVER_COMMIT_ID | grep -v "Downloading\|Downloaded"
$TRAVIS_DIR/load-data-into-hugegraph.sh
- name: Install JDK 11
uses: actions/setup-java@v2
with:
@@ -0,0 +1,50 @@
#!/bin/bash

set -ev

ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
ssh-keyscan -H localhost >> ~/.ssh/known_hosts
chmod 0600 ~/.ssh/known_hosts
eval `ssh-agent`
ssh-add ~/.ssh/id_rsa

sudo wget http://archive.apache.org/dist/hadoop/common/hadoop-3.3.2/hadoop-3.3.2.tar.gz

tar -zxf hadoop-3.3.2.tar.gz -C /opt
cd /opt
mv hadoop-3.3.2 hadoop
cd hadoop
pwd

echo "export HADOOP_HOME=/opt/hadoop" >> ~/.bashrc
echo "export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin" >> ~/.bashrc

source ~/.bashrc

tee etc/hadoop/core-site.xml <<EOF
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
EOF

tee etc/hadoop/hdfs-site.xml <<EOF
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.secondary.http.address</name>
<value>localhost:9100</value>
</property>
</configuration>
EOF

bin/hdfs namenode -format
sbin/start-dfs.sh
jps
@@ -8,7 +8,7 @@ if [[ $# -ne 1 ]]; then
fi

COMMIT_ID=$1
HUGEGRAPH_GIT_URL="https://github.com/hugegraph/hugegraph.git"
HUGEGRAPH_GIT_URL="https://github.com/apache/hugegraph.git"

git clone --depth 100 ${HUGEGRAPH_GIT_URL}
cd hugegraph
@@ -0,0 +1,9 @@
#!/bin/bash

set -ev

curl -Lo minikube https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 && chmod +x minikube
sudo mkdir -p /usr/local/bin/
sudo install minikube /usr/local/bin/

minikube start --vm-driver=docker --kubernetes-version ${KUBERNETES_VERSION} || exit 1
@@ -5,25 +5,25 @@ set -ev
TRAVIS_DIR=`dirname $0`
DATASET_DIR=${TRAVIS_DIR}/../dataset

HUGEGRAPH_LOADER_GIT_URL="https://github.com/hugegraph/hugegraph-loader.git"
HUGEGRAPH_LOADER_GIT_URL="https://github.com/apache/hugegraph-toolchain.git"

git clone --depth 10 ${HUGEGRAPH_LOADER_GIT_URL}

cd hugegraph-loader
cd hugegraph-toolchain/hugegraph-loader
mvn install:install-file -Dfile=assembly/static/lib/ojdbc8-12.2.0.1.jar -DgroupId=com.oracle -DartifactId=ojdbc8 -Dversion=12.2.0.1 -Dpackaging=jar || exit 1
mvn package -DskipTests || exit 1
tar -zxf hugegraph-loader-*.tar.gz || exit 1
cd ../
cd ../../

wget http://files.grouplens.org/datasets/movielens/ml-latest-small.zip
unzip -d ${DATASET_DIR} ml-latest-small.zip

hugegraph-loader/hugegraph-loader-*/bin/hugegraph-loader.sh -g hugegraph -f ${DATASET_DIR}/struct.json -s ${DATASET_DIR}/schema.groovy || exit 1
hugegraph-toolchain/hugegraph-loader/hugegraph-loader-*/bin/hugegraph-loader.sh -g hugegraph -f ${DATASET_DIR}/struct.json -s ${DATASET_DIR}/schema.groovy || exit 1

# load dataset to hdfs
sort -t , -k1n -u "${DATASET_DIR}"/ml-latest-small/ratings.csv | cut -d "," -f 1 > "${DATASET_DIR}"/ml-latest-small/user_id.csv || exit 1
hadoop fs -mkdir -p /dataset/ml-latest-small || exit 1
hadoop fs -put "${DATASET_DIR}"/ml-latest-small/* /dataset/ml-latest-small || exit 1
hadoop fs -ls /dataset/ml-latest-small
/opt/hadoop/bin/hadoop fs -mkdir -p /dataset/ml-latest-small || exit 1
/opt/hadoop/bin/hadoop fs -put "${DATASET_DIR}"/ml-latest-small/* /dataset/ml-latest-small || exit 1
/opt/hadoop/bin/hadoop fs -ls /dataset/ml-latest-small

echo "Load finished, continue to next step"

0 comments on commit 0d2e6df

Please sign in to comment.