Skip to content
This repository was archived by the owner on Jun 14, 2024. It is now read-only.
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 64 additions & 23 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,34 +6,23 @@ trigger:
- master

jobs:
- job: Build
displayName: 'Build sources and run unit tests'
- job: Build_2_11
displayName: 'Build sources and run unit tests for Scala 2.11'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sbt +clean
displayName: 'Running $sbt +clean'
- script: sbt +update
displayName: 'Running $sbt +update'
- script: sbt +compile
displayName: 'Running $sbt +compile'
- script: sbt +test
displayName: 'Running $sbt +test'
- task: Bash@3
inputs:
filePath: 'script/download_spark.sh'
displayName: 'Downloading spark'
- task: PythonScript@0
inputs:
scriptSource: 'filePath'
scriptPath: 'run-tests.py'
displayName: 'Running python tests'
env:
SPARK_HOME: $(Build.SourcesDirectory)/spark-2.4.2-bin-hadoop2.7
- script: sbt ++2.11.12 clean
displayName: 'Running $sbt clean'
- script: sbt ++2.11.12 update
displayName: 'Running $sbt update'
- script: sbt ++2.11.12 compile
displayName: 'Running $sbt compile'
- script: sbt ++2.11.12 test
displayName: 'Running $sbt test'
# If not a pull request, publish artifacts.
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- script: sbt +package
displayName: 'Running $sbt +package'
- script: sbt ++2.11.12 package
displayName: 'Running $sbt package'
- task: CopyFiles@2
displayName: 'Copy hyperspace-core JAR'
inputs:
Expand All @@ -45,3 +34,55 @@ jobs:
inputs:
artifactName: 'hyperspace-core'
pathtoPublish: '$(Build.ArtifactStagingDirectory)/hyperspace-core/'

- job: Build_2_12
displayName: 'Build sources and run unit tests for Scala 2.12'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sbt ++2.12.8 clean
displayName: 'Running $sbt clean'
- script: sbt ++2.12.8 update
displayName: 'Running $sbt update'
- script: sbt ++2.12.8 compile
displayName: 'Running $sbt compile'
- script: sbt ++2.12.8 test
displayName: 'Running $sbt test'
# If not a pull request, publish artifacts.
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- script: sbt ++2.12.8 package
displayName: 'Running $sbt package'
- task: CopyFiles@2
displayName: 'Copy hyperspace-core JAR'
inputs:
sourceFolder: '$(Build.SourcesDirectory)/target/'
contents: '**/*.jar'
targetFolder: '$(Build.ArtifactStagingDirectory)/hyperspace-core/'
- task: PublishBuildArtifacts@1
displayName: 'Publish Hyperspace artifacts'
inputs:
artifactName: 'hyperspace-core'
pathtoPublish: '$(Build.ArtifactStagingDirectory)/hyperspace-core/'

- job: PythonTest
displayName: 'Run Python tests'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sbt ++2.11.12 clean
displayName: 'Running $sbt clean'
- script: sbt ++2.11.12 update
displayName: 'Running $sbt update'
- script: sbt ++2.11.12 compile
displayName: 'Running $sbt compile'
- task: Bash@3
inputs:
filePath: 'script/download_spark.sh'
displayName: 'Downloading spark'
- task: PythonScript@0
inputs:
scriptSource: 'filePath'
scriptPath: 'run-tests.py'
displayName: 'Running python tests'
env:
SPARK_HOME: $(Build.SourcesDirectory)/spark-2.4.2-bin-hadoop2.7