Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
resources:
jobs:
some_other_job:
name: "[${bundle.target}] Test Wheel Job $UNIQUE_NAME"
tasks:
- task_key: TestTask
new_cluster:
num_workers: 1
spark_version: $DEFAULT_SPARK_VERSION
node_type_id: $NODE_TYPE_ID
data_security_mode: USER_ISOLATION
instance_pool_id: $TEST_INSTANCE_POOL_ID
python_wheel_task:
package_name: my_test_code
entry_point: run
parameters:
- "one"
- "two"
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@

>>> [CLI] bundle validate -o json
[
{
"new_cluster": {
"data_security_mode": "USER_ISOLATION",
"instance_pool_id": "$TEST_INSTANCE_POOL_ID",
"node_type_id": "",
"num_workers": 1,
"spark_version": "$DEFAULT_SPARK_VERSION"
},
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code",
"parameters": [
"one",
"two"
]
},
"task_key": "TestTask"
}
]

>>> [CLI] bundle summary -o json
[
{
"new_cluster": {
"data_security_mode": "USER_ISOLATION",
"instance_pool_id": "$TEST_INSTANCE_POOL_ID",
"node_type_id": "",
"num_workers": 1,
"spark_version": "$DEFAULT_SPARK_VERSION"
},
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code",
"parameters": [
"one",
"two"
]
},
"task_key": "TestTask"
}
]

>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

>>> jq -s .[] | select(.path=="/api/2.2/jobs/create") | .body.tasks out.requests.txt
[
{
"new_cluster": {
"data_security_mode": "USER_ISOLATION",
"instance_pool_id": "$TEST_INSTANCE_POOL_ID",
"num_workers": 1,
"spark_version": "$DEFAULT_SPARK_VERSION"
},
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code",
"parameters": [
"one",
"two"
]
},
"task_key": "TestTask"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
trace $CLI bundle validate -o json | jq .resources.jobs.some_other_job.tasks
trace $CLI bundle summary -o json | jq .resources.jobs.some_other_job.tasks

trace $CLI bundle deploy

trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt
rm out.requests.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
RecordRequests = true

# Fails on direct with
# --- FAIL: TestAccept/bundle/resources/jobs/instance_pool_and_node_type (0.00s)
# --- FAIL: TestAccept/bundle/resources/jobs/instance_pool_and_node_type/DATABRICKS_CLI_DEPLOYMENT=direct-exp (1.60s)
# acceptance_test.go:1178: Writing updated bundle config to databricks.yml. BundleConfig sections: default_name
# acceptance_test.go:722: Diff:
# --- bundle/resources/jobs/instance_pool_and_node_type/output.txt
# +++ /var/folders/5y/9kkdnjw91p11vsqwk0cvmk200000gp/T/TestAcceptbundleresourcesjobsinstance_pool_and_node_typeDATABRICKS_CLI_DEPLOYMENT=direct-exp3221363519/001/output.txt
# @@ -55,6 +55,7 @@
# "new_cluster": {
# "data_security_mode": "USER_ISOLATION",
# "instance_pool_id": "$TEST_INSTANCE_POOL_ID",
# + "node_type_id": "",
# "num_workers": 1,
# "spark_version": "$DEFAULT_SPARK_VERSION"
# },
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
Loading