diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1fbec5c1..75170696 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -29,6 +29,11 @@ jobs: SPARK_LOCAL_IP: 127.0.0.1 JAVA_TOOL_OPTIONS: -Dfile.encoding=UTF8 run: mvn -B -P scala_${{ matrix.scala }} clean install + uses: actions/upload-artifact@v3 + if: matrix.os == 'ubuntu-latest' + with: + name: metalus-core_${{ matrix.scala }} + path: ~/.m2/repository/com/acxiom/metalus-core_${{ matrix.scala }} aws: needs: core name: AWS @@ -50,6 +55,10 @@ jobs: java-version: 8.0.362+8 cache: 'maven' - name: Build and Test + uses: actions/download-artifact@v3 + with: + name: metalus-core_${{ matrix.scala }} + path: ~/.m2/repository/com/acxiom/metalus-core_${{ matrix.scala }} env: SPARK_LOCAL_IP: 127.0.0.1 JAVA_TOOL_OPTIONS: -Dfile.encoding=UTF8 @@ -81,38 +90,42 @@ jobs: echo "::set-output name=spark::$spark" echo "::set-output name=scala::$scala" - name: Build and Test - env: - SPARK_LOCAL_IP: 127.0.0.1 - JAVA_TOOL_OPTIONS: -Dfile.encoding=UTF8 - run: mvn -B -P spark_${{ steps.get-versions.outputs.spark }},scala_${{ steps.get-versions.outputs.scala }} clean install - spark-aws: - needs: [spark, aws] - name: Spark AWS - strategy: - matrix: - os: [ ubuntu-latest, macos-latest ] - spark: [ '3.1_2.12', '3.2_2.12', '3.3_2.12', '3.1_2.13', '3.2_2.13', '3.3_2.13' ] - runs-on: ${{ matrix.os }} - defaults: - run: - working-directory: ./metalus-aws-spark - steps: - - name: Source Checkout - uses: actions/checkout@v2 - - name: Set up JDK 1.8 - uses: actions/setup-java@v3 + uses: actions/download-artifact@v3 with: - distribution: zulu - java-version: 8.0.362+8 - cache: 'maven' - - id: get-versions - run: | - spark=$(echo ${{matrix.spark}} | cut -d_ -f1) - scala=$(echo ${{matrix.spark}} | cut -d_ -f2) - echo "::set-output name=spark::$spark" - echo "::set-output name=scala::$scala" - - name: Build and Test + name: metalus-core_${{ steps.get-versions.outputs.scala }} + path: ~/.m2/repository/com/acxiom/metalus-core_${{ steps.get-versions.outputs.scala }} env: SPARK_LOCAL_IP: 127.0.0.1 JAVA_TOOL_OPTIONS: -Dfile.encoding=UTF8 run: mvn -B -P spark_${{ steps.get-versions.outputs.spark }},scala_${{ steps.get-versions.outputs.scala }} clean install +# spark-aws: +# needs: [spark, aws] +# name: Spark AWS +# strategy: +# matrix: +# os: [ ubuntu-latest, macos-latest ] +# spark: [ '3.1_2.12', '3.2_2.12', '3.3_2.12', '3.1_2.13', '3.2_2.13', '3.3_2.13' ] +# runs-on: ${{ matrix.os }} +# defaults: +# run: +# working-directory: ./metalus-aws-spark +# steps: +# - name: Source Checkout +# uses: actions/checkout@v2 +# - name: Set up JDK 1.8 +# uses: actions/setup-java@v3 +# with: +# distribution: zulu +# java-version: 8.0.362+8 +# cache: 'maven' +# - id: get-versions +# run: | +# spark=$(echo ${{matrix.spark}} | cut -d_ -f1) +# scala=$(echo ${{matrix.spark}} | cut -d_ -f2) +# echo "::set-output name=spark::$spark" +# echo "::set-output name=scala::$scala" +# - name: Build and Test +# env: +# SPARK_LOCAL_IP: 127.0.0.1 +# JAVA_TOOL_OPTIONS: -Dfile.encoding=UTF8 +# run: mvn -B -P spark_${{ steps.get-versions.outputs.spark }},scala_${{ steps.get-versions.outputs.scala }} clean install