Skip to content

Commit

Permalink
upgrade CI scripts, local-cluster mode is enabled again
Browse files Browse the repository at this point in the history
  • Loading branch information
tribbloid committed Oct 19, 2023
1 parent 1b7e68a commit d8bcba8
Show file tree
Hide file tree
Showing 8 changed files with 18 additions and 17 deletions.
14 changes: 7 additions & 7 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
version: 2.1

executors:
openjdk11:
main:
docker:
- image: cimg/openjdk:11.0
resource_class: large

jobs:
build:
executor: openjdk11
executor: main
steps:
- checkout
- run:
Expand All @@ -17,22 +17,22 @@ jobs:
echo "Starting ..."
- restore_cache:
keys:
- profile-{{ checksum "./dev/profiles/apache-local/.common.sh" }}
- profile-{{ checksum "./dev/profiles/apache-latest/.common.sh" }}
- run:
name: Prepare
command: |
./dev/CI-apache-local.sh prepare
./dev/CI-apache-latest.sh prepare
- save_cache:
key: profile-{{ checksum "./dev/profiles/apache-local/.common.sh" }}
key: profile-{{ checksum "./dev/profiles/apache-latest/.common.sh" }}
paths:
- ~/.ci
- run:
name: Run
command: |
./dev/CI-apache-local.sh
./dev/CI-apache-latest.sh
workflows:
main-local: # This is the name of the workflow, feel free to change it to better match your workflow.
main:
# Inside the workflow, you define the jobs you want to run.
# For more details on extending your workflow, see the configuration docs: https://circleci.com/docs/2.0/configuration-reference/#workflows
jobs:
Expand Down
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@
*.R text eol=lf

# from apache spark
*.sh text eol=lf
1 change: 0 additions & 1 deletion .scalafix.conf
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ RemoveUnused {
imports = true
privates = true
locals = true
// the following 2 are not compatible with scala 2.11
patternvars = true
params = true
}
Expand Down
2 changes: 1 addition & 1 deletion dev/CI/main.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ if [ "${2}" = "prepare" ]; then
exit 0
fi

"$FWDIR"/CI/update-submodules.sh && \
"$FWDIR"/update-submodules.sh && \
"$FWDIR"/CI/pipeline.sh "-PnotLocal" "${BUILD_PROFILES[@]}"
5 changes: 3 additions & 2 deletions dev/profiles/apache-latest/.common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13
export SPARK_SCALA_VERSION="2.13"
SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala"${SPARK_SCALA_VERSION}"

SPARK_URL="http://archive.apache.org/dist/spark/${SPARK_NAME}/${SPARK_DIR_NAME}.tgz"

Expand All @@ -13,4 +14,4 @@ tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
export BUILD_PROFILES=("-PsparkVersion=3.5.0")
7 changes: 3 additions & 4 deletions dev/profiles/apache-local/.common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,13 @@
SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13
export SPARK_SCALA_VERSION="2.13"
SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala"${SPARK_SCALA_VERSION}"

SPARK_URL="http://archive.apache.org/dist/spark/${SPARK_NAME}/${SPARK_DIR_NAME}.tgz"

# Download Spark
wget -N "$SPARK_URL" -P "$SPARK_DIR_ROOT"
tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

#export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
export BUILD_PROFILES=("-PsparkVersion=3.5.0")
5 changes: 3 additions & 2 deletions dev/profiles/apache-stable/.common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13
export SPARK_SCALA_VERSION="2.13"
SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala"${SPARK_SCALA_VERSION}"

SPARK_URL="http://archive.apache.org/dist/spark/${SPARK_NAME}/${SPARK_DIR_NAME}.tgz"

Expand All @@ -13,4 +14,4 @@ tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
export BUILD_PROFILES=("-PsparkVersion=3.5.0")
File renamed without changes.

0 comments on commit d8bcba8

Please sign in to comment.