Skip to content

Commit

Permalink
WIP.
Browse files Browse the repository at this point in the history
  • Loading branch information
klueska committed Aug 22, 2017
1 parent f07ef6c commit 0f0f14c
Show file tree
Hide file tree
Showing 5 changed files with 1,581 additions and 0 deletions.
File renamed without changes.
315 changes: 315 additions & 0 deletions ci/Jenkinsfile-linux-integration
Original file line number Diff line number Diff line change
@@ -0,0 +1,315 @@
#!/usr/bin/env groovy

/**
* This Jenkinsfile runs a set of parallel builders for the dcos-cli across
* multiple platforms (linux/mac/windows).
*
* One set of builders builds the CLI into a binary on each platform. The other
* set of builders runs integration tests on each platform. Under the hood, the
* integration test builders use `dcos_launch` to create the DC/OS clusters for
* each platform to run their tests against. Unfortunately, `dcos_luanch` only
* works reliably on Linux, so we use a single linux instance to create all of
* the clusters and separate linux/mac/windows instances to run the actual
* tests.
*/

/**
* These are the platforms we are building against.
*/
def platforms = ["linux"]

/**
* This generates the `dcos_launch` config for a particular build.
*/
def generateConfig(deploymentName, templateUrl) {
return """
---
launch_config_version: 1
deployment_name: ${deploymentName}
template_url: ${templateUrl}
provider: aws
aws_region: us-west-2
template_parameters:
KeyName: default
AdminLocation: 0.0.0.0/0
PublicSlaveInstanceCount: 1
SlaveInstanceCount: 1
"""
}


/**
* This class abstracts away the functions required to create a test cluster
* for each of our platforms.
*/
class TestCluster implements Serializable {
WorkflowScript script
String platform

TestCluster(WorkflowScript script, String platform) {
this.script = script
this.platform = platform
}

/**
* Creates a new test cluster for the given platform using dcos-launch.
* Waits (blocks) for the cluster to be ready to use.
* Exits if interrupted by user.
* Retries 3 times.
* Destroys cluster on failure.
*/
def create() {
def createAttempts = 0
while (true) {
try {
createAttempts++
script.sh "rm -rf ${platform}_config.yaml"
script.sh "rm -rf ${platform}_cluster_info.json"
script.writeFile([
"file": "${platform}_config.yaml",
"text" : script.generateConfig(
"dcos-cli-${platform}-${script.env.BRANCH_NAME}-${script.env.BUILD_ID}-${createAttempts}",
"${script.env.CF_TEMPLATE_URL}")])

script.sh "./dcos-launch create -c ${platform}_config.yaml -i ${platform}_cluster_info.json"
script.sh "./dcos-launch wait -i ${platform}_cluster_info.json"
break
} catch(InterruptedException | hudson.AbortException e) {
script.echo("Build interrupted. Destroying cluster....")
destroy()
script.echo("Cluster destroyed. Exiting...")
throw e
} catch(Exception e) {
script.echo("Exception:" + e.toString());
script.echo("Cluster creation failed. Destroying cluster....");
destroy()
if (createAttempts >= 3) {
script.echo("Cluster destroyed. Maximum number of creation attempts exceeded. Exiting...")
throw e
}
script.echo("Cluster destroyed. Retrying...")
}
}
}

/**
* Destroys a test cluster previously created using `create()`.
*/
def destroy() {
script.sh "./dcos-launch delete -i ${platform}_cluster_info.json"
}

/**
* Retrieves the URL of a cluster previously created using `create()`.
*/
def getDcosUrl() {
/* In the future, consider doing the following with jq instead of
inline python (however, jq is not installed on our windows machines
at the moment). */
script.sh """
./dcos-launch describe -i ${platform}_cluster_info.json \
| python -c \
'import sys, json; \
contents = json.load(sys.stdin); \
print(contents["masters"][0]["public_ip"], end="")' \
> ${platform}_dcos_url"""

return script.readFile("${platform}_dcos_url")
}

/**
* Retrieves the ACS Token of a cluster previously created using `create()`.
*/
def getAcsToken() {
def dcosUrl = this.getDcosUrl()

/* In the future, consider doing the following with curl / jq instead
of inline python (however, jq is not installed on our windows
machines at the moment). */
script.sh """
python -c \
'import requests; \
requests.packages.urllib3.disable_warnings(); \
js={"uid":"${script.env.DCOS_ADMIN_USERNAME}", \
"password": "${script.env.DCOS_ADMIN_PASSWORD}"}; \
r=requests.post("http://${dcosUrl}/acs/api/v1/auth/login", \
json=js, \
verify=False); \
print(r.json()["token"], end="")' \
> ${platform}_acs_token"""

return script.readFile("${platform}_acs_token")
}
}



/**
* This function returns a closure that prepares a test environment for a
* specific platform on a specific node in a specific workspace.
*/
def testBuilder(String platform, String nodeId, String workspace = null) {
return { Closure _body ->
def body = _body

return {
def destroyCluster = true
def cluster = new TestCluster(this, platform)

// stage ("Create ${platform} cluster") {
// cluster.create()
// }

try {
def dcosUrl = cluster.getDcosUrl()
def acsToken = cluster.getAcsToken()

node(nodeId) {
if (!workspace) {
workspace = "${env.WORKSPACE}"
}

ws (workspace) {
stage ('Cleanup workspace') {
deleteDir()
}

stage ("Unstash dcos-cli repository") {
unstash('dcos-cli')
}

withCredentials(
[[$class: 'FileBinding',
credentialsId: '1c206779-acc0-4844-97f6-7b3ed081a456',
variable: 'DCOS_SNAKEOIL_CRT_PATH'],
[$class: 'FileBinding',
credentialsId: '23743034-1ac4-49f7-b2e6-a661aee2d11b',
variable: 'CLI_TEST_SSH_KEY_PATH']]) {

withEnv(["DCOS_URL=${dcosUrl}",
"DCOS_ACS_TOKEN=${acsToken}"]) {
try {
body()
} catch(InterruptedException | hudson.AbortException e) {
echo(
"Build interrupted. The DC/OS cluster at" +
" ${dcosUrl} will be destroyed.")
destroyCluster = true
throw e
} catch (Exception e) {
echo(
"Build failed. The DC/OS cluster at" +
" ${dcosUrl} will remain temporarily" +
" active so you can debug what went" +
" wrong.")
destroyCluster = false
throw e
}
}
}
}
}

} finally {
if (destroyCluster) {
stage ("Destroy ${platform} cluster") {
try { cluster.destroy() }
catch (Exception e) {}
}
}
}
}
}
}

/**
* These are the builds that can be run in parallel.
*/
def builders = [:]


builders['linux-tests'] = testBuilder('linux', 'py35', '/workspace')({
stage ("Run dcos-cli tests") {
sh '''
rm -rf ~/.dcos; \
grep -q "^.* dcos.snakeoil.mesosphere.com$" /etc/hosts && \
sed -iold "s/^.* dcos.snakeoil.mesosphere.com$/${DCOS_URL} dcos.snakeoil.mesosphere.com/" /etc/hosts || \
echo ${DCOS_URL} dcos.snakeoil.mesosphere.com >> /etc/hosts'''

dir('dcos-cli/cli') {
sh '''
export PYTHONIOENCODING=utf-8; \
export DCOS_CONFIG=tests/data/dcos.toml; \
chmod 600 ${DCOS_CONFIG}; \
echo dcos_acs_token = \\\"${DCOS_ACS_TOKEN}\\\" >> ${DCOS_CONFIG}; \
cat ${DCOS_CONFIG}; \
unset DCOS_URL; \
unset DCOS_ACS_TOKEN; \
make test-binary'''
}
}
})


/**
* This node bootstraps everything including creating all the test clusters,
* starting the builders, and finally destroying all the clusters once they
* are done.
*/
throttle(['dcos-cli']) {
node('py35') {
stage('Cleanup workspace') {
deleteDir()
}

stage ('Update node') {
sh 'pip install requests'
}

stage ('Download dcos-launch') {
sh 'wget https://downloads.dcos.io/dcos-test-utils/bin/linux/dcos-launch'
sh 'chmod a+x dcos-launch'
}

stage ('Pull dcos-cli repository') {
dir('dcos-cli') {
checkout([
$class: 'GitSCM',
userRemoteConfigs: scm.userRemoteConfigs,
branches: scm.branches,
doGenerateSubmoduleConfigurations: scm.doGenerateSubmoduleConfigurations,
submoduleCfg: scm.submoduleCfg,
extensions: [
[
$class: 'CloneOption',
shallow: true,
depth: 0,
noTags: true,
timeout: 10
]
]
])
}
}

stage ('Stash dcos-cli repository') {
stash(['includes': 'dcos-cli/**', name: 'dcos-cli'])
}

withCredentials(
[[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: '7155bd15-767d-4ae3-a375-e0d74c90a2c4',
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'],
[$class: 'StringBinding',
credentialsId: '8d7d3d95-fc1f-42f4-941a-ad43487fcff7',
variable: 'CF_TEMPLATE_URL'],
[$class: 'UsernamePasswordMultiBinding',
credentialsId: '323df884-742b-4099-b8b7-d764e5eb9674',
usernameVariable: 'DCOS_ADMIN_USERNAME',
passwordVariable: 'DCOS_ADMIN_PASSWORD']]) {

parallel builders
}
}
}

0 comments on commit 0f0f14c

Please sign in to comment.