diff --git a/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.direct-exp.json b/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..dba21fa4df --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.direct-exp.json @@ -0,0 +1,75 @@ +{ + "plan": { + "resources.jobs.test_job": { + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "environments": [ + { + "environment_key": "test_env", + "spec": { + "client": "1", + "dependencies": [ + "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + ] + } + } + ], + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "[default] My Wheel Job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "environment_key": "test_env", + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "ServerlessTestTask" + }, + { + "existing_cluster_id": "0717-132531-5opeqon1", + "for_each_task": { + "inputs": "[1]", + "task": { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "SubTask" + } + }, + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + }, + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } + ] + } + } + } + } +} diff --git a/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.terraform.json b/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.terraform.json new file mode 100644 index 0000000000..20f16691a5 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dynamic/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.test_job": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.direct-exp.json b/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..2d4d666cfe --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.direct-exp.json @@ -0,0 +1,88 @@ +{ + "plan": { + "resources.jobs.test_job": { + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "environments": [ + { + "environment_key": "test_env", + "spec": { + "client": "1", + "dependencies": [ + "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + ] + } + } + ], + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "[default] My Wheel Job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "environment_key": "test_env", + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "ServerlessTestTask" + }, + { + "existing_cluster_id": "0717-132531-5opeqon1", + "for_each_task": { + "inputs": "[1]", + "task": { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "SubTask" + } + }, + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" + }, + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } + ] + } + }, + "changes": { + "local": { + "environments[0].spec.dependencies[0]": { + "action": "update" + }, + "tasks[1].for_each_task.task.libraries[0].whl": { + "action": "update" + }, + "tasks[1].libraries[0].whl": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.terraform.json b/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.terraform.json new file mode 100644 index 0000000000..cc5e7f65b8 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dynamic/out.plan_update.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.test_job": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/artifacts/whl_dynamic/output.txt b/acceptance/bundle/artifacts/whl_dynamic/output.txt index ec8da013d3..af0090141a 100644 --- a/acceptance/bundle/artifacts/whl_dynamic/output.txt +++ b/acceptance/bundle/artifacts/whl_dynamic/output.txt @@ -23,6 +23,7 @@ "type": "whl" } } +Building my_test_code... >>> [CLI] bundle deploy Building my_test_code... @@ -103,6 +104,8 @@ my_test_code-0.0.1+[UNIX_TIME_NANOS][1].dist-info/RECORD "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/prebuilt/other_test_code-0.0.1-py3-none-any.whl" === Updating the local wheel and deploying again +Building my_test_code... + >>> [CLI] bundle deploy Building my_test_code... Uploading .databricks/bundle/default/patched_wheels/my_prebuilt_whl_other_test_code/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl... diff --git a/acceptance/bundle/artifacts/whl_dynamic/script b/acceptance/bundle/artifacts/whl_dynamic/script index cbc61f2f12..b9319bae50 100644 --- a/acceptance/bundle/artifacts/whl_dynamic/script +++ b/acceptance/bundle/artifacts/whl_dynamic/script @@ -6,6 +6,7 @@ cp -r $TESTDIR/../whl_prebuilt_multiple/dist/lib/other_test_code-0.0.1-py3-none- trace $CLI bundle validate -o json | jq .artifacts +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy title "There are 2 original wheels and 2 patched ones" @@ -22,8 +23,9 @@ trace jq .path < out.requests.txt | grep import | grep whl | sort rm out.requests.txt -title "Updating the local wheel and deploying again" +title "Updating the local wheel and deploying again\n" touch my_test_code/src/new_module.py +$CLI bundle debug plan > out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy title "Verify contents, it should now have new_module.py" diff --git a/acceptance/bundle/bundle_tag/url_ref/out.deploy.direct-exp.txt b/acceptance/bundle/bundle_tag/url_ref/out.deploy.direct-exp.txt index 3b0c852fb9..40a35fe09b 100644 --- a/acceptance/bundle/bundle_tag/url_ref/out.deploy.direct-exp.txt +++ b/acceptance/bundle/bundle_tag/url_ref/out.deploy.direct-exp.txt @@ -1,7 +1,5 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... -Error: cannot resolve "${resources.jobs.foo.url}": schema mismatch: url: field "url" not found in resources.Job; url: field "url" not found in jobs.Job - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.foo +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.foo.url}": schema mismatch: url: field "url" not found in jobs.JobSettings; url: field "url" not found in jobs.Job Error: planning failed diff --git a/acceptance/bundle/bundle_tag/url_ref/out.plan.direct-exp.txt b/acceptance/bundle/bundle_tag/url_ref/out.plan.direct-exp.txt index 5a5d2e7646..d640b2d1e4 100644 --- a/acceptance/bundle/bundle_tag/url_ref/out.plan.direct-exp.txt +++ b/acceptance/bundle/bundle_tag/url_ref/out.plan.direct-exp.txt @@ -1,6 +1,4 @@ -Error: cannot resolve "${resources.jobs.foo.url}": schema mismatch: url: field "url" not found in resources.Job; url: field "url" not found in jobs.Job - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.foo +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.foo.url}": schema mismatch: url: field "url" not found in jobs.JobSettings; url: field "url" not found in jobs.Job Error: planning failed diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_description.direct-exp.json b/acceptance/bundle/deploy/models/basic/out.plan_update_description.direct-exp.json new file mode 100644 index 0000000000..c021c3ad56 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_description.direct-exp.json @@ -0,0 +1,26 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "update", + "new_state": { + "config": { + "description": "new-description-[UNIQUE_NAME]", + "name": "original-name-[UNIQUE_NAME]", + "tags": [ + { + "key": "key1", + "value": "value1" + } + ] + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_description.terraform.json b/acceptance/bundle/deploy/models/basic/out.plan_update_description.terraform.json new file mode 100644 index 0000000000..28e9cc1333 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_description.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_name.direct-exp.json b/acceptance/bundle/deploy/models/basic/out.plan_update_name.direct-exp.json new file mode 100644 index 0000000000..fa79312ba1 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_name.direct-exp.json @@ -0,0 +1,26 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "recreate", + "new_state": { + "config": { + "description": "new-description-[UNIQUE_NAME]", + "name": "new-name-[UNIQUE_NAME]", + "tags": [ + { + "key": "key1", + "value": "value1" + } + ] + } + }, + "changes": { + "local": { + "name": { + "action": "recreate" + } + } + } + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_name.terraform.json b/acceptance/bundle/deploy/models/basic/out.plan_update_name.terraform.json new file mode 100644 index 0000000000..7e083ec1f3 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_name.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "recreate" + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.json b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.json new file mode 100644 index 0000000000..cf823a7f96 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.json @@ -0,0 +1,14 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "skip", + "changes": { + "local": { + "tags": { + "action": "skip" + } + } + } + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.txt b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.txt new file mode 100644 index 0000000000..c54c9d511c --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.direct-exp.txt @@ -0,0 +1 @@ +Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.json b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.json new file mode 100644 index 0000000000..28e9cc1333 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.models.my_model": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.txt b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.txt new file mode 100644 index 0000000000..2949936e68 --- /dev/null +++ b/acceptance/bundle/deploy/models/basic/out.plan_update_tag.terraform.txt @@ -0,0 +1,3 @@ +update models.my_model + +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged diff --git a/acceptance/bundle/deploy/models/basic/output.txt b/acceptance/bundle/deploy/models/basic/output.txt index 19c0f9d610..3b4b4ba6c1 100644 --- a/acceptance/bundle/deploy/models/basic/output.txt +++ b/acceptance/bundle/deploy/models/basic/output.txt @@ -79,11 +79,8 @@ Deployment complete! ] } -=== add a new tag, this should be a no-op +=== add a new tag, this should be a no-op; terraform does make an update request but since update request has no tags in it, it does nothing; direct skips request completely >>> [CLI] bundle plan -update models.my_model - -Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/deploy-models-basic-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deploy/models/basic/script b/acceptance/bundle/deploy/models/basic/script index df1aacda57..dc1a8b8561 100644 --- a/acceptance/bundle/deploy/models/basic/script +++ b/acceptance/bundle/deploy/models/basic/script @@ -15,6 +15,7 @@ trace export MODEL_DESCRIPTION=new-description-$UNIQUE_NAME envsubst < templates/one_tag.tmpl > databricks.yml title "update the description, this should update the description remotely as well" trace $CLI bundle plan +$CLI bundle debug plan > out.plan_update_description.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI model-registry get-model $MODEL_NAME | jq '.registered_model_databricks | {name, description, tags}' @@ -22,11 +23,13 @@ trace export MODEL_NAME=new-name-$UNIQUE_NAME envsubst < templates/one_tag.tmpl > databricks.yml title "update the name, this should recreate the model with the new name" trace $CLI bundle plan +$CLI bundle debug plan > out.plan_update_name.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI model-registry get-model $MODEL_NAME | jq '.registered_model_databricks | {name, description, tags}' -title "add a new tag, this should be a no-op" +title "add a new tag, this should be a no-op; terraform does make an update request but since update request has no tags in it, it does nothing; direct skips request completely" envsubst < templates/two_tag.tmpl > databricks.yml -trace $CLI bundle plan +trace $CLI bundle plan > out.plan_update_tag.$DATABRICKS_BUNDLE_ENGINE.txt +$CLI bundle debug plan > out.plan_update_tag.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI model-registry get-model $MODEL_NAME | jq '.registered_model_databricks | {name, description, tags}' diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/databricks.yml b/acceptance/bundle/resource_deps/bad_ref_string_to_int/databricks.yml new file mode 100644 index 0000000000..775466236a --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/databricks.yml @@ -0,0 +1,10 @@ +resources: + jobs: + bar: + name: job bar + foo: + name: job foo + tasks: + - task_key: job_task + run_job_task: + job_id: ${resources.jobs.bar.name} diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.direct-exp.txt new file mode 100644 index 0000000000..676512c4d0 --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.direct-exp.txt @@ -0,0 +1,8 @@ + +>>> musterr [CLI] bundle plan +Error: cannot plan resources.jobs.foo: cannot update tasks[0].run_job_task.job_id with value of "${resources.jobs.bar.name}": cannot set (*jobs.JobSettings).tasks[0].run_job_task.job_id to string ("job bar"): cannot parse "job bar" as int64: strconv.ParseInt: parsing "job bar": invalid syntax + +Error: planning failed + + +Exit code (musterr): 1 diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.terraform.txt b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.terraform.txt new file mode 100644 index 0000000000..854ed10c58 --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.plan.terraform.txt @@ -0,0 +1,16 @@ + +>>> musterr [CLI] bundle plan +Error: exit status 1 + +Error: Incorrect attribute value type + + on bundle.tf.json line 43, in resource.databricks_job.foo.task[0].run_job_task: + 43: "job_id": "${databricks_job.bar.name}" + ├──────────────── + │ databricks_job.bar.name is "job bar" + +Inappropriate value for attribute "job_id": a number is required. + + + +Exit code (musterr): 1 diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt new file mode 100644 index 0000000000..a9e8fbec28 --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt @@ -0,0 +1,12 @@ +{ + "method": "GET", + "path": "/api/2.0/preview/scim/v2/Me" +} +{ + "method": "GET", + "path": "/api/2.0/workspace/get-status" +} +{ + "method": "GET", + "path": "/api/2.0/workspace/get-status" +} diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.test.toml b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.test.toml new file mode 100644 index 0000000000..e092fd5ed6 --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct-exp"] diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/output.txt b/acceptance/bundle/resource_deps/bad_ref_string_to_int/output.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/script b/acceptance/bundle/resource_deps/bad_ref_string_to_int/script new file mode 100644 index 0000000000..3471c56628 --- /dev/null +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/script @@ -0,0 +1 @@ +trace musterr $CLI bundle plan &> out.plan.$DATABRICKS_BUNDLE_ENGINE.txt diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_create.direct-exp.json b/acceptance/bundle/resource_deps/id_chain/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..a6ab9e11c4 --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_create.direct-exp.json @@ -0,0 +1,135 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "aa_desc", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "aa", + "queue": { + "enabled": true + } + } + } + }, + "resources.jobs.b": { + "depends_on": [ + { + "node": "resources.jobs.a", + "label": "${resources.jobs.a.id}" + } + ], + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "prefix ${resources.jobs.a.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "bb", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "prefix ${resources.jobs.a.id}" + } + } + }, + "resources.jobs.c": { + "depends_on": [ + { + "node": "resources.jobs.b", + "label": "${resources.jobs.b.id}" + } + ], + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "prefix ${resources.jobs.b.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "cc", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "prefix ${resources.jobs.b.id}" + } + } + }, + "resources.jobs.d": { + "depends_on": [ + { + "node": "resources.jobs.c", + "label": "${resources.jobs.c.id}" + } + ], + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "prefix ${resources.jobs.c.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "dd", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "prefix ${resources.jobs.c.id}" + } + } + }, + "resources.jobs.e": { + "depends_on": [ + { + "node": "resources.jobs.d", + "label": "${resources.jobs.d.id}" + } + ], + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "prefix ${resources.jobs.d.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "ee", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "prefix ${resources.jobs.d.id}" + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_create.terraform.json b/acceptance/bundle/resource_deps/id_chain/out.plan_create.terraform.json new file mode 100644 index 0000000000..49611621e9 --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_create.terraform.json @@ -0,0 +1,19 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "create" + }, + "resources.jobs.b": { + "action": "create" + }, + "resources.jobs.c": { + "action": "create" + }, + "resources.jobs.d": { + "action": "create" + }, + "resources.jobs.e": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_skip.direct-exp.json b/acceptance/bundle/resource_deps/id_chain/out.plan_skip.direct-exp.json new file mode 100644 index 0000000000..8ecdec43af --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_skip.direct-exp.json @@ -0,0 +1,43 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "skip" + }, + "resources.jobs.b": { + "depends_on": [ + { + "node": "resources.jobs.a", + "label": "${resources.jobs.a.id}" + } + ], + "action": "skip" + }, + "resources.jobs.c": { + "depends_on": [ + { + "node": "resources.jobs.b", + "label": "${resources.jobs.b.id}" + } + ], + "action": "skip" + }, + "resources.jobs.d": { + "depends_on": [ + { + "node": "resources.jobs.c", + "label": "${resources.jobs.c.id}" + } + ], + "action": "skip" + }, + "resources.jobs.e": { + "depends_on": [ + { + "node": "resources.jobs.d", + "label": "${resources.jobs.d.id}" + } + ], + "action": "skip" + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_skip.terraform.json b/acceptance/bundle/resource_deps/id_chain/out.plan_skip.terraform.json new file mode 100644 index 0000000000..c0034fb45c --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_skip.terraform.json @@ -0,0 +1,19 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "skip" + }, + "resources.jobs.b": { + "action": "skip" + }, + "resources.jobs.c": { + "action": "skip" + }, + "resources.jobs.d": { + "action": "skip" + }, + "resources.jobs.e": { + "action": "skip" + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_update.direct-exp.json b/acceptance/bundle/resource_deps/id_chain/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..dd2d9f5ab5 --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_update.direct-exp.json @@ -0,0 +1,158 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "aa_new_desc", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "aa", + "queue": { + "enabled": true + } + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + }, + "resources.jobs.b": { + "depends_on": [ + { + "node": "resources.jobs.a", + "label": "${resources.jobs.a.id}" + } + ], + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "new_prefix [NUMID]", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "bb", + "queue": { + "enabled": true + } + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + }, + "resources.jobs.c": { + "depends_on": [ + { + "node": "resources.jobs.b", + "label": "${resources.jobs.b.id}" + } + ], + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "new_prefix [NUMID]", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "cc", + "queue": { + "enabled": true + } + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + }, + "resources.jobs.d": { + "depends_on": [ + { + "node": "resources.jobs.c", + "label": "${resources.jobs.c.id}" + } + ], + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "new_prefix [NUMID]", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "dd", + "queue": { + "enabled": true + } + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + }, + "resources.jobs.e": { + "depends_on": [ + { + "node": "resources.jobs.d", + "label": "${resources.jobs.d.id}" + } + ], + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "new_prefix [NUMID]", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "ee", + "queue": { + "enabled": true + } + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/out.plan_update.terraform.json b/acceptance/bundle/resource_deps/id_chain/out.plan_update.terraform.json new file mode 100644 index 0000000000..02d95ce06a --- /dev/null +++ b/acceptance/bundle/resource_deps/id_chain/out.plan_update.terraform.json @@ -0,0 +1,19 @@ +{ + "plan": { + "resources.jobs.a": { + "action": "update" + }, + "resources.jobs.b": { + "action": "update" + }, + "resources.jobs.c": { + "action": "update" + }, + "resources.jobs.d": { + "action": "update" + }, + "resources.jobs.e": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/resource_deps/id_chain/output.txt b/acceptance/bundle/resource_deps/id_chain/output.txt index 7e6019b6ea..62dfb09f18 100644 --- a/acceptance/bundle/resource_deps/id_chain/output.txt +++ b/acceptance/bundle/resource_deps/id_chain/output.txt @@ -10,6 +10,8 @@ Plan: 5 to add, 0 to change, 0 to delete, 0 unchanged >>> print_requests +>>> print_requests + >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... @@ -27,6 +29,8 @@ Deployment complete! >>> update_file.py databricks.yml prefix new_prefix +>>> print_requests + >>> [CLI] bundle plan update jobs.a update jobs.b @@ -50,3 +54,5 @@ Deployment complete! "cc" "dd" "ee" + +>>> print_requests diff --git a/acceptance/bundle/resource_deps/id_chain/script b/acceptance/bundle/resource_deps/id_chain/script index 9f49d0554b..cc9e490f5a 100644 --- a/acceptance/bundle/resource_deps/id_chain/script +++ b/acceptance/bundle/resource_deps/id_chain/script @@ -16,14 +16,23 @@ print_requests_short() { trace $CLI bundle plan trace print_requests +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests + trace $CLI bundle deploy trace print_requests_short trace update_file.py databricks.yml aa_desc aa_new_desc trace update_file.py databricks.yml prefix new_prefix +$CLI bundle debug plan > out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests + trace $CLI bundle plan trace print_requests trace $CLI bundle deploy trace print_requests_short + +$CLI bundle debug plan > out.plan_skip.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests diff --git a/acceptance/bundle/resource_deps/missing_ingestion_definition/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/missing_ingestion_definition/out.plan.direct-exp.txt index ad36d23c6c..1102c97983 100644 --- a/acceptance/bundle/resource_deps/missing_ingestion_definition/out.plan.direct-exp.txt +++ b/acceptance/bundle/resource_deps/missing_ingestion_definition/out.plan.direct-exp.txt @@ -1,6 +1,4 @@ -Error: cannot resolve "${resources.pipelines.foo.ingestion_definition.connection_name}": field not set: ingestion_definition: cannot access nil value - -Error: cannot plan resources.pipelines.bar: dependency failed: resources.pipelines.foo +Error: cannot plan resources.pipelines.bar: cannot resolve "${resources.pipelines.foo.ingestion_definition.connection_name}": field not set: ingestion_definition: cannot access nil value Error: planning failed diff --git a/acceptance/bundle/resource_deps/missing_map_key/out.deploy.direct-exp.txt b/acceptance/bundle/resource_deps/missing_map_key/out.deploy.direct-exp.txt index 1cec359aad..e91b71487d 100644 --- a/acceptance/bundle/resource_deps/missing_map_key/out.deploy.direct-exp.txt +++ b/acceptance/bundle/resource_deps/missing_map_key/out.deploy.direct-exp.txt @@ -1,7 +1,5 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/state/default/files... -Error: cannot resolve "${resources.jobs.test.tasks[0].new_cluster.custom_tags.missing_tag}": field not set: tasks[0].new_cluster.custom_tags.missing_tag: key "missing_tag" not found in map - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.test +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.test.tasks[0].new_cluster.custom_tags.missing_tag}": field not set: tasks[0].new_cluster.custom_tags.missing_tag: key "missing_tag" not found in map Error: planning failed diff --git a/acceptance/bundle/resource_deps/missing_map_key/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/missing_map_key/out.plan.direct-exp.txt index 93588a9da2..f9455fab35 100644 --- a/acceptance/bundle/resource_deps/missing_map_key/out.plan.direct-exp.txt +++ b/acceptance/bundle/resource_deps/missing_map_key/out.plan.direct-exp.txt @@ -1,6 +1,4 @@ -Error: cannot resolve "${resources.jobs.test.tasks[0].new_cluster.custom_tags.missing_tag}": field not set: tasks[0].new_cluster.custom_tags.missing_tag: key "missing_tag" not found in map - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.test +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.test.tasks[0].new_cluster.custom_tags.missing_tag}": field not set: tasks[0].new_cluster.custom_tags.missing_tag: key "missing_tag" not found in map Error: planning failed diff --git a/acceptance/bundle/resource_deps/missing_map_key_tffix/out.deploy.direct-exp.txt b/acceptance/bundle/resource_deps/missing_map_key_tffix/out.deploy.direct-exp.txt index fa6972aac6..f6a4eb663c 100644 --- a/acceptance/bundle/resource_deps/missing_map_key_tffix/out.deploy.direct-exp.txt +++ b/acceptance/bundle/resource_deps/missing_map_key_tffix/out.deploy.direct-exp.txt @@ -1,7 +1,5 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/state/default/files... -Error: cannot resolve "${resources.jobs.test.task[0].new_cluster[0].custom_tags.missing_tag}": schema mismatch: task: field "task" not found in resources.Job; task: field "task" not found in jobs.Job - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.test +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.test.task[0].new_cluster[0].custom_tags.missing_tag}": schema mismatch: task: field "task" not found in jobs.JobSettings; task: field "task" not found in jobs.Job Error: planning failed diff --git a/acceptance/bundle/resource_deps/missing_map_key_tffix/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/missing_map_key_tffix/out.plan.direct-exp.txt index 75fbe63d1e..20c9395e3a 100644 --- a/acceptance/bundle/resource_deps/missing_map_key_tffix/out.plan.direct-exp.txt +++ b/acceptance/bundle/resource_deps/missing_map_key_tffix/out.plan.direct-exp.txt @@ -1,6 +1,4 @@ -Error: cannot resolve "${resources.jobs.test.task[0].new_cluster[0].custom_tags.missing_tag}": schema mismatch: task: field "task" not found in resources.Job; task: field "task" not found in jobs.Job - -Error: cannot plan resources.jobs.bar: dependency failed: resources.jobs.test +Error: cannot plan resources.jobs.bar: cannot resolve "${resources.jobs.test.task[0].new_cluster[0].custom_tags.missing_tag}": schema mismatch: task: field "task" not found in jobs.JobSettings; task: field "task" not found in jobs.Job Error: planning failed diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.direct-exp.txt b/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.direct-exp.txt deleted file mode 100644 index ff9aa8c0de..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.direct-exp.txt +++ /dev/null @@ -1,15 +0,0 @@ - ->>> print_requests -{ - "body": { - "channel": "CURRENT", - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edition": "ADVANCED", - "name": "pfoo" - }, - "method": "POST", - "path": "/api/2.0/pipelines" -} diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.terraform.txt b/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.terraform.txt deleted file mode 100644 index 5c591d5d66..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.deploy-requests.terraform.txt +++ /dev/null @@ -1,28 +0,0 @@ - ->>> print_requests -{ - "body": { - "channel": "CURRENT", - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edition": "ADVANCED", - "name": "pfoo" - }, - "method": "POST", - "path": "/api/2.0/pipelines" -} -{ - "body": { - "channel": "CURRENT", - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edition": "ADVANCED", - "name": "pbar" - }, - "method": "POST", - "path": "/api/2.0/pipelines" -} diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.deploy.direct-exp.txt b/acceptance/bundle/resource_deps/missing_string_field/out.deploy.direct-exp.txt deleted file mode 100644 index 9fcfc91695..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.deploy.direct-exp.txt +++ /dev/null @@ -1,13 +0,0 @@ -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... -Warning: expected a string value, found null - at resources.pipelines.bar.catalog - in databricks.yml:6:16 - -Deploying resources... -Error: failed to resolve reference "${resources.pipelines.foo.catalog}" for resources.pipelines.foo after deployment: field not set in remote state: catalog: field "catalog" not found in pipelines.GetPipelineResponse - -Error: cannot create resources.pipelines.bar: dependency failed: resources.pipelines.foo - -Updating deployment state... - -Exit code: 1 diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.deploy.terraform.txt b/acceptance/bundle/resource_deps/missing_string_field/out.deploy.terraform.txt deleted file mode 100644 index 56e08e87b2..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.deploy.terraform.txt +++ /dev/null @@ -1,4 +0,0 @@ -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... -Deploying resources... -Updating deployment state... -Deployment complete! diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/missing_string_field/out.plan.direct-exp.txt deleted file mode 100644 index 35e13365cd..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.plan.direct-exp.txt +++ /dev/null @@ -1,8 +0,0 @@ -Warning: expected a string value, found null - at resources.pipelines.bar.catalog - in databricks.yml:6:16 - -create pipelines.bar -create pipelines.foo - -Plan: 2 to add, 0 to change, 0 to delete, 0 unchanged diff --git a/acceptance/bundle/resource_deps/missing_string_field/out.plan.terraform.txt b/acceptance/bundle/resource_deps/missing_string_field/out.plan.terraform.txt deleted file mode 100644 index 11226504b4..0000000000 --- a/acceptance/bundle/resource_deps/missing_string_field/out.plan.terraform.txt +++ /dev/null @@ -1,4 +0,0 @@ -create pipelines.bar -create pipelines.foo - -Plan: 2 to add, 0 to change, 0 to delete, 0 unchanged diff --git a/acceptance/bundle/resource_deps/missing_string_field/output.txt b/acceptance/bundle/resource_deps/missing_string_field/output.txt index 90383c7746..94373c8086 100644 --- a/acceptance/bundle/resource_deps/missing_string_field/output.txt +++ b/acceptance/bundle/resource_deps/missing_string_field/output.txt @@ -25,5 +25,41 @@ } } } +create pipelines.bar +create pipelines.foo + +Plan: 2 to add, 0 to change, 0 to delete, 0 unchanged + +>>> print_requests +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! >>> print_requests +{ + "body": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pfoo" + }, + "method": "POST", + "path": "/api/2.0/pipelines" +} +{ + "body": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pbar" + }, + "method": "POST", + "path": "/api/2.0/pipelines" +} diff --git a/acceptance/bundle/resource_deps/missing_string_field/script b/acceptance/bundle/resource_deps/missing_string_field/script index 2af812fac5..2d6dc4fc1b 100644 --- a/acceptance/bundle/resource_deps/missing_string_field/script +++ b/acceptance/bundle/resource_deps/missing_string_field/script @@ -4,8 +4,8 @@ print_requests() { } trace $CLI bundle validate -o json | jq .resources -errcode $CLI bundle plan &> out.plan.$DATABRICKS_BUNDLE_ENGINE.txt +errcode $CLI bundle plan trace print_requests -errcode $CLI bundle deploy &> out.deploy.$DATABRICKS_BUNDLE_ENGINE.txt -trace print_requests &> out.deploy-requests.$DATABRICKS_BUNDLE_ENGINE.txt +errcode $CLI bundle deploy +trace print_requests diff --git a/acceptance/bundle/resource_deps/non_existent_field/out.deploy.direct-exp.txt b/acceptance/bundle/resource_deps/non_existent_field/out.deploy.direct-exp.txt index 1a1ebeadb1..bc046d557c 100644 --- a/acceptance/bundle/resource_deps/non_existent_field/out.deploy.direct-exp.txt +++ b/acceptance/bundle/resource_deps/non_existent_field/out.deploy.direct-exp.txt @@ -1,7 +1,5 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... -Error: cannot resolve "${resources.volumes.bar.non_existent}": schema mismatch: non_existent: field "non_existent" not found in resources.Volume; non_existent: field "non_existent" not found in catalog.VolumeInfo - -Error: cannot plan resources.volumes.foo: dependency failed: resources.volumes.bar +Error: cannot plan resources.volumes.foo: cannot resolve "${resources.volumes.bar.non_existent}": schema mismatch: non_existent: field "non_existent" not found in catalog.CreateVolumeRequestContent; non_existent: field "non_existent" not found in catalog.VolumeInfo Error: planning failed diff --git a/acceptance/bundle/resource_deps/non_existent_field/out.plan.direct-exp.txt b/acceptance/bundle/resource_deps/non_existent_field/out.plan.direct-exp.txt index 090ff5ddd2..3dd0eeb24d 100644 --- a/acceptance/bundle/resource_deps/non_existent_field/out.plan.direct-exp.txt +++ b/acceptance/bundle/resource_deps/non_existent_field/out.plan.direct-exp.txt @@ -1,6 +1,4 @@ -Error: cannot resolve "${resources.volumes.bar.non_existent}": schema mismatch: non_existent: field "non_existent" not found in resources.Volume; non_existent: field "non_existent" not found in catalog.VolumeInfo - -Error: cannot plan resources.volumes.foo: dependency failed: resources.volumes.bar +Error: cannot plan resources.volumes.foo: cannot resolve "${resources.volumes.bar.non_existent}": schema mismatch: non_existent: field "non_existent" not found in catalog.CreateVolumeRequestContent; non_existent: field "non_existent" not found in catalog.VolumeInfo Error: planning failed diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.direct-exp.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..e257d6f180 --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.direct-exp.json @@ -0,0 +1,47 @@ +{ + "plan": { + "resources.jobs.bar": { + "depends_on": [ + { + "node": "resources.pipelines.foo", + "label": "${resources.pipelines.foo.id}" + } + ], + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "depends on foo id ${resources.pipelines.foo.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "job bar", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "depends on foo id ${resources.pipelines.foo.id}" + } + } + }, + "resources.pipelines.foo": { + "action": "create", + "new_state": { + "config": { + "catalog": "mycatalog", + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pipeline foo" + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.terraform.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.terraform.json new file mode 100644 index 0000000000..3b2a84ef16 --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_create.terraform.json @@ -0,0 +1,10 @@ +{ + "plan": { + "resources.jobs.bar": { + "action": "create" + }, + "resources.pipelines.foo": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.direct-exp.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.direct-exp.json new file mode 100644 index 0000000000..70dc564ef6 --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.direct-exp.json @@ -0,0 +1,16 @@ +{ + "plan": { + "resources.jobs.bar": { + "depends_on": [ + { + "node": "resources.pipelines.foo", + "label": "${resources.pipelines.foo.id}" + } + ], + "action": "skip" + }, + "resources.pipelines.foo": { + "action": "skip" + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.terraform.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.terraform.json new file mode 100644 index 0000000000..413c03650d --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_noop.terraform.json @@ -0,0 +1,10 @@ +{ + "plan": { + "resources.jobs.bar": { + "action": "skip" + }, + "resources.pipelines.foo": { + "action": "skip" + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct-exp.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..56b621de68 --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct-exp.json @@ -0,0 +1,61 @@ +{ + "plan": { + "resources.jobs.bar": { + "depends_on": [ + { + "node": "resources.pipelines.foo", + "label": "${resources.pipelines.foo.id}" + } + ], + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "description": "depends on foo id ${resources.pipelines.foo.id}", + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "job bar", + "queue": { + "enabled": true + } + }, + "vars": { + "description": "depends on foo id ${resources.pipelines.foo.id}" + } + }, + "changes": { + "local": { + "description": { + "action": "update" + } + } + } + }, + "resources.pipelines.foo": { + "action": "recreate", + "new_state": { + "config": { + "catalog": "mynewcatalog", + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pipeline foo" + } + }, + "changes": { + "local": { + "catalog": { + "action": "recreate" + } + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.terraform.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.terraform.json new file mode 100644 index 0000000000..67167aa76d --- /dev/null +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.terraform.json @@ -0,0 +1,10 @@ +{ + "plan": { + "resources.jobs.bar": { + "action": "update" + }, + "resources.pipelines.foo": { + "action": "recreate" + } + } +} diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/script b/acceptance/bundle/resource_deps/pipelines_recreate/script index 1a22960b91..c3e9e209c2 100644 --- a/acceptance/bundle/resource_deps/pipelines_recreate/script +++ b/acceptance/bundle/resource_deps/pipelines_recreate/script @@ -10,6 +10,7 @@ print_sorted_requests() { echo "*" > .gitignore trace $CLI bundle plan +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace print_requests @@ -24,6 +25,7 @@ trace $CLI bundle plan # empty title "Update catalog, triggering recreate for pipeline; this means updating downstream deps" trace update_file.py databricks.yml mycatalog mynewcatalog trace $CLI bundle plan +$CLI bundle debug plan > out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy --auto-approve trace print_requests @@ -39,6 +41,7 @@ rm out.requests.txt title "Follow up plan & deploy do nothing" trace $CLI bundle plan +$CLI bundle debug plan > out.plan_noop.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace print_requests diff --git a/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.direct-exp.json b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..f74727981e --- /dev/null +++ b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.direct-exp.json @@ -0,0 +1,87 @@ +{ + "plan": { + "resources.pipelines.foo1": { + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo1 name" + } + } + }, + "resources.pipelines.foo2": { + "depends_on": [ + { + "node": "resources.pipelines.foo1", + "label": "${resources.pipelines.foo1.creator_user_name}" + } + ], + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo2 foo1.creator_user_name=${resources.pipelines.foo1.creator_user_name}" + }, + "vars": { + "name": "foo2 foo1.creator_user_name=${resources.pipelines.foo1.creator_user_name}" + } + } + }, + "resources.pipelines.foo3": { + "depends_on": [ + { + "node": "resources.pipelines.foo2", + "label": "${resources.pipelines.foo2.state}" + } + ], + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo3 foo2.state=${resources.pipelines.foo2.state}" + }, + "vars": { + "name": "foo3 foo2.state=${resources.pipelines.foo2.state}" + } + } + }, + "resources.pipelines.foo4": { + "depends_on": [ + { + "node": "resources.pipelines.foo3", + "label": "${resources.pipelines.foo3.last_modified}" + } + ], + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo4 foo3.last_modified=${resources.pipelines.foo3.last_modified}" + }, + "vars": { + "name": "foo4 foo3.last_modified=${resources.pipelines.foo3.last_modified}" + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.terraform.json b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.terraform.json new file mode 100644 index 0000000000..ce01217a2e --- /dev/null +++ b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_create.terraform.json @@ -0,0 +1,16 @@ +{ + "plan": { + "resources.pipelines.foo1": { + "action": "create" + }, + "resources.pipelines.foo2": { + "action": "create" + }, + "resources.pipelines.foo3": { + "action": "create" + }, + "resources.pipelines.foo4": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.direct-exp.json b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.direct-exp.json new file mode 100644 index 0000000000..2808200ab3 --- /dev/null +++ b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.direct-exp.json @@ -0,0 +1,97 @@ +{ + "plan": { + "resources.pipelines.foo1": { + "action": "skip" + }, + "resources.pipelines.foo2": { + "depends_on": [ + { + "node": "resources.pipelines.foo1", + "label": "${resources.pipelines.foo1.creator_user_name}" + } + ], + "action": "update", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo2 foo1.creator_user_name=${resources.pipelines.foo1.creator_user_name}" + }, + "vars": { + "name": "foo2 foo1.creator_user_name=${resources.pipelines.foo1.creator_user_name}" + } + }, + "changes": { + "local": { + "name": { + "action": "update" + } + } + } + }, + "resources.pipelines.foo3": { + "depends_on": [ + { + "node": "resources.pipelines.foo2", + "label": "${resources.pipelines.foo2.state}" + } + ], + "action": "update", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo3 foo2.state=${resources.pipelines.foo2.state}" + }, + "vars": { + "name": "foo3 foo2.state=${resources.pipelines.foo2.state}" + } + }, + "changes": { + "local": { + "name": { + "action": "update" + } + } + } + }, + "resources.pipelines.foo4": { + "depends_on": [ + { + "node": "resources.pipelines.foo3", + "label": "${resources.pipelines.foo3.last_modified}" + } + ], + "action": "update", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "foo4 foo3.last_modified=${resources.pipelines.foo3.last_modified}" + }, + "vars": { + "name": "foo4 foo3.last_modified=${resources.pipelines.foo3.last_modified}" + } + }, + "changes": { + "local": { + "name": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.terraform.json b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.terraform.json new file mode 100644 index 0000000000..bc326c9cd2 --- /dev/null +++ b/acceptance/bundle/resource_deps/remote_pipeline/out.plan_skip.terraform.json @@ -0,0 +1,16 @@ +{ + "plan": { + "resources.pipelines.foo1": { + "action": "skip" + }, + "resources.pipelines.foo2": { + "action": "skip" + }, + "resources.pipelines.foo3": { + "action": "skip" + }, + "resources.pipelines.foo4": { + "action": "skip" + } + } +} diff --git a/acceptance/bundle/resource_deps/remote_pipeline/output.txt b/acceptance/bundle/resource_deps/remote_pipeline/output.txt index b61c3bb16e..1f86775154 100644 --- a/acceptance/bundle/resource_deps/remote_pipeline/output.txt +++ b/acceptance/bundle/resource_deps/remote_pipeline/output.txt @@ -10,3 +10,5 @@ Deployment complete! "foo2 foo1.creator_user_name=[USERNAME]" "foo3 foo2.state=IDLE" "foo4 foo3.last_modified=[UNIX_TIME_MILLIS]" + +>>> print_requests diff --git a/acceptance/bundle/resource_deps/remote_pipeline/script b/acceptance/bundle/resource_deps/remote_pipeline/script index 6d33abe099..0449736e2a 100644 --- a/acceptance/bundle/resource_deps/remote_pipeline/script +++ b/acceptance/bundle/resource_deps/remote_pipeline/script @@ -3,5 +3,8 @@ print_requests() { rm out.requests.txt } +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace print_requests +$CLI bundle debug plan > out.plan_skip.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests diff --git a/acceptance/bundle/resource_deps/resources_var/out.plan.direct-exp.json b/acceptance/bundle/resource_deps/resources_var/out.plan.direct-exp.json new file mode 100644 index 0000000000..12eb8c32ff --- /dev/null +++ b/acceptance/bundle/resource_deps/resources_var/out.plan.direct-exp.json @@ -0,0 +1,68 @@ +{ + "plan": { + "resources.pipelines.mypipeline": { + "depends_on": [ + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.catalog_name}" + }, + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.name}" + }, + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.schema_name}" + } + ], + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/dev/state/metadata.json" + }, + "development": true, + "edition": "ADVANCED", + "name": "[dev [USERNAME]] pipeline for mycatalog.myschema.myname", + "tags": { + "dev": "[USERNAME]" + } + } + } + }, + "resources.volumes.bar": { + "action": "create", + "new_state": { + "config": { + "catalog_name": "mycatalog", + "name": "barname", + "schema_name": "myschema", + "volume_type": "MANAGED" + } + } + }, + "resources.volumes.foo": { + "depends_on": [ + { + "node": "resources.volumes.bar", + "label": "${resources.volumes.bar.catalog_name}" + }, + { + "node": "resources.volumes.bar", + "label": "${resources.volumes.bar.schema_name}" + } + ], + "action": "create", + "new_state": { + "config": { + "catalog_name": "mycatalog", + "name": "myname", + "schema_name": "myschema", + "volume_type": "MANAGED" + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/resources_var/out.plan.terraform.json b/acceptance/bundle/resource_deps/resources_var/out.plan.terraform.json new file mode 100644 index 0000000000..2231126738 --- /dev/null +++ b/acceptance/bundle/resource_deps/resources_var/out.plan.terraform.json @@ -0,0 +1,13 @@ +{ + "plan": { + "resources.pipelines.mypipeline": { + "action": "create" + }, + "resources.volumes.bar": { + "action": "create" + }, + "resources.volumes.foo": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resource_deps/resources_var/script b/acceptance/bundle/resource_deps/resources_var/script index 5991f0d68a..b1b7eefc62 100644 --- a/acceptance/bundle/resource_deps/resources_var/script +++ b/acceptance/bundle/resource_deps/resources_var/script @@ -1,4 +1,5 @@ trace $CLI bundle validate -t dev -o json | jq .resources +$CLI bundle debug plan > out.plan.$DATABRICKS_BUNDLE_ENGINE.json trace errcode $CLI bundle deploy -t dev &> out.deploy.txt trace jq -s '.[] | select(.path=="/api/2.0/pipelines") | .body.name' out.requests.txt trace print_telemetry_bool_values diff --git a/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.direct-exp.json b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.direct-exp.json new file mode 100644 index 0000000000..65df21a075 --- /dev/null +++ b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.direct-exp.json @@ -0,0 +1,66 @@ +{ + "plan": { + "resources.pipelines.mypipeline": { + "depends_on": [ + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.catalog_name}" + }, + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.name}" + }, + { + "node": "resources.volumes.foo", + "label": "${resources.volumes.foo.schema_name}" + } + ], + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/dev/state/metadata.json" + }, + "development": true, + "edition": "ADVANCED", + "name": "[dev [USERNAME]] pipeline for mycatalog.dev_[USERNAME]_myschema.myname", + "tags": { + "dev": "[USERNAME]" + } + } + } + }, + "resources.schemas.bar": { + "action": "create", + "new_state": { + "config": { + "catalog_name": "mycatalog", + "name": "dev_[USERNAME]_myschema" + } + } + }, + "resources.volumes.foo": { + "depends_on": [ + { + "node": "resources.schemas.bar", + "label": "${resources.schemas.bar.catalog_name}" + }, + { + "node": "resources.schemas.bar", + "label": "${resources.schemas.bar.name}" + } + ], + "action": "create", + "new_state": { + "config": { + "catalog_name": "mycatalog", + "name": "myname", + "schema_name": "dev_[USERNAME]_myschema", + "volume_type": "MANAGED" + } + } + } + } +} diff --git a/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.terraform.json b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.terraform.json new file mode 100644 index 0000000000..164e053958 --- /dev/null +++ b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/out.plan.terraform.json @@ -0,0 +1,13 @@ +{ + "plan": { + "resources.pipelines.mypipeline": { + "action": "create" + }, + "resources.schemas.bar": { + "action": "create" + }, + "resources.volumes.foo": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/script b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/script index 8d92562801..b82ba5e649 100644 --- a/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/script +++ b/acceptance/bundle/resource_deps/resources_var_presets_implicit_deps/script @@ -1,4 +1,5 @@ trace $CLI bundle validate -t dev -o json | jq .resources +$CLI bundle debug plan -t dev > out.plan.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy -t dev trace jq -s '.[] | select(.path=="/api/2.0/pipelines") | .body.name' out.requests.txt rm out.requests.txt diff --git a/acceptance/bundle/resources/jobs/delete_task/databricks.yml b/acceptance/bundle/resources/jobs/delete_task/databricks.yml new file mode 100644 index 0000000000..6b01047f7a --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/databricks.yml @@ -0,0 +1,25 @@ +bundle: + name: test-bundle + +resources: + jobs: + job: + tasks: + - task_key: TestTask1 # TO_DELETE + existing_cluster_id: "0717-132531-5opeqon1" # TO_DELETE + python_wheel_task: # TO_DELETE + package_name: "whl" # TO_DELETE + entry_point: "run" # TO_DELETE + libraries: # TO_DELETE + - whl: /Workspace/Users/foo@bar.com/mywheel.whl # TO_DELETE + - task_key: TestTask2 + for_each_task: + inputs: "[1]" + task: + task_key: TestTask2 + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "whl" + entry_point: "run" + libraries: + - whl: /Workspace/Users/foo@bar.com/mywheel.whl diff --git a/acceptance/bundle/resources/jobs/delete_task/out.plan_create.direct-exp.json b/acceptance/bundle/resources/jobs/delete_task/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..8d9c1211a2 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/out.plan_create.direct-exp.json @@ -0,0 +1,56 @@ +{ + "plan": { + "resources.jobs.job": { + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "Untitled", + "queue": { + "enabled": true + }, + "tasks": [ + { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/foo@bar.com/mywheel.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "whl" + }, + "task_key": "TestTask1" + }, + { + "for_each_task": { + "inputs": "[1]", + "task": { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/foo@bar.com/mywheel.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "whl" + }, + "task_key": "TestTask2" + } + }, + "task_key": "TestTask2" + } + ] + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/delete_task/out.plan_create.terraform.json b/acceptance/bundle/resources/jobs/delete_task/out.plan_create.terraform.json new file mode 100644 index 0000000000..a44c5d27e1 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.job": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/jobs/delete_task/out.plan_update.direct-exp.json b/acceptance/bundle/resources/jobs/delete_task/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..af38ceb449 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/out.plan_update.direct-exp.json @@ -0,0 +1,50 @@ +{ + "plan": { + "resources.jobs.job": { + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "Untitled", + "queue": { + "enabled": true + }, + "tasks": [ + { + "for_each_task": { + "inputs": "[1]", + "task": { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/foo@bar.com/mywheel.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "whl" + }, + "task_key": "TestTask2" + } + }, + "task_key": "TestTask2" + } + ] + } + }, + "changes": { + "local": { + "tasks": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/delete_task/out.plan_update.terraform.json b/acceptance/bundle/resources/jobs/delete_task/out.plan_update.terraform.json new file mode 100644 index 0000000000..50ebd484c2 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/out.plan_update.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.job": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/resources/jobs/delete_task/out.test.toml b/acceptance/bundle/resources/jobs/delete_task/out.test.toml new file mode 100644 index 0000000000..e092fd5ed6 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct-exp"] diff --git a/acceptance/bundle/resources/jobs/delete_task/output.txt b/acceptance/bundle/resources/jobs/delete_task/output.txt new file mode 100644 index 0000000000..70723abc4e --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/output.txt @@ -0,0 +1,32 @@ + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> cat databricks.yml +bundle: + name: test-bundle + +resources: + jobs: + job: + tasks: + - task_key: TestTask2 + for_each_task: + inputs: "[1]" + task: + task_key: TestTask2 + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "whl" + entry_point: "run" + libraries: + - whl: /Workspace/Users/foo@bar.com/mywheel.whl + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! diff --git a/acceptance/bundle/resources/jobs/delete_task/script b/acceptance/bundle/resources/jobs/delete_task/script new file mode 100644 index 0000000000..a59fb3177a --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/script @@ -0,0 +1,6 @@ +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json +trace $CLI bundle deploy +grep -v TO_DELETE databricks.yml > tmp.yml && mv tmp.yml databricks.yml +trace cat databricks.yml +$CLI bundle debug plan > out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json +trace $CLI bundle deploy diff --git a/acceptance/bundle/resources/jobs/delete_task/test.toml b/acceptance/bundle/resources/jobs/delete_task/test.toml new file mode 100644 index 0000000000..a030353d57 --- /dev/null +++ b/acceptance/bundle/resources/jobs/delete_task/test.toml @@ -0,0 +1 @@ +RecordRequests = false diff --git a/acceptance/bundle/resources/jobs/update/out.plan_create.direct-exp.json b/acceptance/bundle/resources/jobs/update/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..2e84ac1f15 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update/out.plan_create.direct-exp.json @@ -0,0 +1,38 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "job_clusters": [ + { + "job_cluster_key": "key", + "new_cluster": { + "num_workers": 0, + "spark_version": "13.3.x-scala2.12" + } + } + ], + "max_concurrent_runs": 1, + "name": "foo", + "queue": { + "enabled": true + }, + "trigger": { + "pause_status": "UNPAUSED", + "periodic": { + "interval": 1, + "unit": "DAYS" + } + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/update/out.plan_create.terraform.json b/acceptance/bundle/resources/jobs/update/out.plan_create.terraform.json new file mode 100644 index 0000000000..0d9258d845 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/jobs/update/out.plan_update.direct-exp.json b/acceptance/bundle/resources/jobs/update/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..0c96ed19e0 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update/out.plan_update.direct-exp.json @@ -0,0 +1,45 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "job_clusters": [ + { + "job_cluster_key": "key", + "new_cluster": { + "num_workers": 0, + "spark_version": "13.3.x-scala2.12" + } + } + ], + "max_concurrent_runs": 1, + "name": "foo", + "queue": { + "enabled": true + }, + "trigger": { + "pause_status": "UNPAUSED", + "periodic": { + "interval": 1, + "unit": "HOURS" + } + } + } + }, + "changes": { + "local": { + "trigger.periodic.unit": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/update/out.plan_update.terraform.json b/acceptance/bundle/resources/jobs/update/out.plan_update.terraform.json new file mode 100644 index 0000000000..970b7d5539 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update/out.plan_update.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/resources/jobs/update/output.txt b/acceptance/bundle/resources/jobs/update/output.txt index 4ecfe5ef17..ee05b5f8b2 100644 --- a/acceptance/bundle/resources/jobs/update/output.txt +++ b/acceptance/bundle/resources/jobs/update/output.txt @@ -4,15 +4,6 @@ create jobs.foo Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.jobs.foo": { - "action": "create" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... @@ -75,15 +66,6 @@ update jobs.foo Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.jobs.foo": { - "action": "update" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... diff --git a/acceptance/bundle/resources/jobs/update/script b/acceptance/bundle/resources/jobs/update/script index 97f7c54b4a..3997fc6995 100644 --- a/acceptance/bundle/resources/jobs/update/script +++ b/acceptance/bundle/resources/jobs/update/script @@ -1,6 +1,6 @@ echo "*" > .gitignore trace $CLI bundle plan -trace $CLI bundle debug plan +$CLI bundle debug plan &> out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI bundle plan trace $CLI bundle debug plan @@ -16,7 +16,7 @@ trace print_requests title "Update trigger.periodic.unit and re-deploy" trace update_file.py databricks.yml DAYS HOURS trace $CLI bundle plan -trace $CLI bundle debug plan +$CLI bundle debug plan &> out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI bundle plan trace $CLI bundle debug plan diff --git a/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.direct-exp.json b/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..13ddbfdd15 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.direct-exp.json @@ -0,0 +1,45 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "create", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "job_clusters": [ + { + "job_cluster_key": "key", + "new_cluster": { + "custom_tags": { + "ResourceClass": "SingleNode" + }, + "num_workers": 0, + "spark_conf": { + "spark.databricks.cluster.profile": "singleNode", + "spark.master": "local[*]" + }, + "spark_version": "13.3.x-scala2.12" + } + } + ], + "max_concurrent_runs": 1, + "name": "foo", + "queue": { + "enabled": true + }, + "trigger": { + "pause_status": "UNPAUSED", + "periodic": { + "interval": 1, + "unit": "DAYS" + } + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.terraform.json b/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.terraform.json new file mode 100644 index 0000000000..0d9258d845 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update_single_node/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.direct-exp.json b/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.direct-exp.json new file mode 100644 index 0000000000..5ffbc7adee --- /dev/null +++ b/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.direct-exp.json @@ -0,0 +1,52 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "update", + "new_state": { + "config": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "job_clusters": [ + { + "job_cluster_key": "key", + "new_cluster": { + "custom_tags": { + "ResourceClass": "SingleNode" + }, + "num_workers": 0, + "spark_conf": { + "spark.databricks.cluster.profile": "singleNode", + "spark.master": "local[*]" + }, + "spark_version": "13.3.x-scala2.12" + } + } + ], + "max_concurrent_runs": 1, + "name": "foo", + "queue": { + "enabled": true + }, + "trigger": { + "pause_status": "UNPAUSED", + "periodic": { + "interval": 1, + "unit": "HOURS" + } + } + } + }, + "changes": { + "local": { + "trigger.periodic.unit": { + "action": "update" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.terraform.json b/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.terraform.json new file mode 100644 index 0000000000..970b7d5539 --- /dev/null +++ b/acceptance/bundle/resources/jobs/update_single_node/out.plan_update.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.jobs.foo": { + "action": "update" + } + } +} diff --git a/acceptance/bundle/resources/jobs/update_single_node/output.txt b/acceptance/bundle/resources/jobs/update_single_node/output.txt index 29a7f3785f..6e6b0a8e25 100644 --- a/acceptance/bundle/resources/jobs/update_single_node/output.txt +++ b/acceptance/bundle/resources/jobs/update_single_node/output.txt @@ -4,15 +4,6 @@ create jobs.foo Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.jobs.foo": { - "action": "create" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... @@ -79,15 +70,6 @@ update jobs.foo Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.jobs.foo": { - "action": "update" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... diff --git a/acceptance/bundle/resources/jobs/update_single_node/script b/acceptance/bundle/resources/jobs/update_single_node/script index fcf5ee3785..37fb768994 100644 --- a/acceptance/bundle/resources/jobs/update_single_node/script +++ b/acceptance/bundle/resources/jobs/update_single_node/script @@ -1,6 +1,6 @@ echo "*" > .gitignore trace $CLI bundle plan -trace $CLI bundle debug plan +$CLI bundle debug plan &> out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace $CLI bundle debug plan @@ -15,7 +15,7 @@ trace print_requests title "Update trigger.periodic.unit and re-deploy" trace update_file.py databricks.yml DAYS HOURS trace $CLI bundle plan -trace $CLI bundle debug plan +$CLI bundle debug plan &> out.plan_update.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace print_requests diff --git a/acceptance/bundle/resources/pipelines/recreate/_script b/acceptance/bundle/resources/pipelines/recreate/_script index e083bf6d22..6b71668be5 100644 --- a/acceptance/bundle/resources/pipelines/recreate/_script +++ b/acceptance/bundle/resources/pipelines/recreate/_script @@ -2,7 +2,7 @@ trace cat databricks.yml touch foo.py touch bar.py trace $CLI bundle plan # should show 'create' -trace $CLI bundle debug plan +$CLI bundle debug plan > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy ppid1=`read_id.py pipelines my` @@ -17,7 +17,7 @@ trace print_requests trace update_file.py databricks.yml $CONFIG_UPDATE trace $CLI bundle plan # should show 'recreate' -trace $CLI bundle debug plan +$CLI bundle debug plan > out.plan_recreate.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy --auto-approve trace print_requests diff --git a/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..8d62311a09 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.direct-exp.json @@ -0,0 +1,26 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create", + "new_state": { + "config": { + "catalog": "mycatalog1", + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]" + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.terraform.json new file mode 100644 index 0000000000..416f50ddf5 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.direct-exp.json new file mode 100644 index 0000000000..d7621209ce --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.direct-exp.json @@ -0,0 +1,33 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate", + "new_state": { + "config": { + "catalog": "mycatalog2", + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]" + } + }, + "changes": { + "local": { + "catalog": { + "action": "recreate" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.terraform.json new file mode 100644 index 0000000000..c183c81dd2 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-catalog/out.plan_recreate.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-catalog/output.txt b/acceptance/bundle/resources/pipelines/recreate/change-catalog/output.txt index 490c109b5c..4df66efa43 100644 --- a/acceptance/bundle/resources/pipelines/recreate/change-catalog/output.txt +++ b/acceptance/bundle/resources/pipelines/recreate/change-catalog/output.txt @@ -19,15 +19,6 @@ create pipelines.my Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "create" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... Deploying resources... @@ -64,15 +55,6 @@ recreate pipelines.my Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "recreate" - } - } -} - >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..7cf160da74 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.direct-exp.json @@ -0,0 +1,31 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "ingestion_definition": { + "connection_name": "my_connection", + "objects": [ + {} + ] + }, + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]" + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.terraform.json new file mode 100644 index 0000000000..416f50ddf5 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.direct-exp.json new file mode 100644 index 0000000000..922e3b551e --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.direct-exp.json @@ -0,0 +1,38 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "ingestion_definition": { + "connection_name": "my_new_connection", + "objects": [ + {} + ] + }, + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]" + } + }, + "changes": { + "local": { + "ingestion_definition.connection_name": { + "action": "recreate" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.terraform.json new file mode 100644 index 0000000000..c183c81dd2 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/out.plan_recreate.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/output.txt b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/output.txt index ad3937f208..8b925fb054 100644 --- a/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/output.txt +++ b/acceptance/bundle/resources/pipelines/recreate/change-ingestion-definition/output.txt @@ -19,15 +19,6 @@ create pipelines.my Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "create" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... Deploying resources... @@ -69,15 +60,6 @@ recreate pipelines.my Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "recreate" - } - } -} - >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.direct-exp.json new file mode 100644 index 0000000000..808eb2cc90 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.direct-exp.json @@ -0,0 +1,26 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]", + "storage": "dbfs:/pipelines/custom" + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.terraform.json new file mode 100644 index 0000000000..416f50ddf5 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_create.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "create" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.direct-exp.json b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.direct-exp.json new file mode 100644 index 0000000000..aed63519a3 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.direct-exp.json @@ -0,0 +1,33 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate", + "new_state": { + "config": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/state/metadata.json" + }, + "edition": "ADVANCED", + "libraries": [ + { + "file": { + "path": "/Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files/foo.py" + } + } + ], + "name": "test-pipeline-[UNIQUE_NAME]", + "storage": "dbfs:/pipelines/newcustom" + } + }, + "changes": { + "local": { + "storage": { + "action": "recreate" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.terraform.json b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.terraform.json new file mode 100644 index 0000000000..c183c81dd2 --- /dev/null +++ b/acceptance/bundle/resources/pipelines/recreate/change-storage/out.plan_recreate.terraform.json @@ -0,0 +1,7 @@ +{ + "plan": { + "resources.pipelines.my": { + "action": "recreate" + } + } +} diff --git a/acceptance/bundle/resources/pipelines/recreate/change-storage/output.txt b/acceptance/bundle/resources/pipelines/recreate/change-storage/output.txt index dbdb39a493..47ee162d8c 100644 --- a/acceptance/bundle/resources/pipelines/recreate/change-storage/output.txt +++ b/acceptance/bundle/resources/pipelines/recreate/change-storage/output.txt @@ -19,15 +19,6 @@ create pipelines.my Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "create" - } - } -} - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... Deploying resources... @@ -64,15 +55,6 @@ recreate pipelines.my Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged ->>> [CLI] bundle debug plan -{ - "plan": { - "resources.pipelines.my": { - "action": "recreate" - } - } -} - >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.json b/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.json new file mode 100644 index 0000000000..4ce7dbaa24 --- /dev/null +++ b/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.json @@ -0,0 +1,23 @@ +{ + "plan": { + "resources.volumes.volume1": { + "action": "update_id", + "new_state": { + "config": { + "catalog_name": "main", + "comment": "COMMENT1", + "name": "mynewvolume", + "schema_name": "myschema", + "volume_type": "MANAGED" + } + }, + "changes": { + "local": { + "name": { + "action": "update_id" + } + } + } + } + } +} diff --git a/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.txt b/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.txt deleted file mode 100644 index 7e955f5250..0000000000 --- a/acceptance/bundle/resources/volumes/change-name/out.plan.direct-exp.txt +++ /dev/null @@ -1,7 +0,0 @@ -{ - "plan": { - "resources.volumes.volume1": { - "action": "update_id" - } - } -} diff --git a/acceptance/bundle/resources/volumes/change-name/out.plan.terraform.txt b/acceptance/bundle/resources/volumes/change-name/out.plan.terraform.json similarity index 100% rename from acceptance/bundle/resources/volumes/change-name/out.plan.terraform.txt rename to acceptance/bundle/resources/volumes/change-name/out.plan.terraform.json diff --git a/acceptance/bundle/resources/volumes/change-name/output.txt b/acceptance/bundle/resources/volumes/change-name/output.txt index d90b76866a..c8112b3293 100644 --- a/acceptance/bundle/resources/volumes/change-name/output.txt +++ b/acceptance/bundle/resources/volumes/change-name/output.txt @@ -41,8 +41,6 @@ update volumes.volume1 Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged ->>> [CLI] bundle debug plan - >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... diff --git a/acceptance/bundle/resources/volumes/change-name/script b/acceptance/bundle/resources/volumes/change-name/script index 4e5de9cf88..ba2b7eb5d5 100644 --- a/acceptance/bundle/resources/volumes/change-name/script +++ b/acceptance/bundle/resources/volumes/change-name/script @@ -15,7 +15,7 @@ trace update_file.py databricks.yml myvolume mynewvolume trace $CLI bundle plan # terraform marks this as "update", direct marks this as "update_with_id" -trace $CLI bundle debug plan > out.plan.$DATABRICKS_BUNDLE_ENGINE.txt +$CLI bundle debug plan > out.plan.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy trace print_requests diff --git a/bundle/deploy/terraform/showplanfile.go b/bundle/deploy/terraform/showplanfile.go index 4893d5156d..a3f6bd8b0b 100644 --- a/bundle/deploy/terraform/showplanfile.go +++ b/bundle/deploy/terraform/showplanfile.go @@ -47,7 +47,7 @@ func populatePlan(ctx context.Context, plan *deployplan.Plan, changes []*tfjson. } key := "resources." + group + "." + rc.Name - plan.Plan[key] = deployplan.PlanEntry{Action: actionType.String()} + plan.Plan[key] = &deployplan.PlanEntry{Action: actionType.String()} } } @@ -59,10 +59,7 @@ func ShowPlanFile(ctx context.Context, tf *tfexec.Terraform, planPath string) (* return nil, err } - plan := &deployplan.Plan{ - Plan: make(map[string]deployplan.PlanEntry), - } - + plan := deployplan.NewPlan() populatePlan(ctx, plan, tfPlan.ResourceChanges) return plan, nil diff --git a/bundle/deploy/terraform/showplanfile_test.go b/bundle/deploy/terraform/showplanfile_test.go index 1f5292d2ef..0ce7d049ef 100644 --- a/bundle/deploy/terraform/showplanfile_test.go +++ b/bundle/deploy/terraform/showplanfile_test.go @@ -42,10 +42,7 @@ func TestPopulatePlan(t *testing.T) { }, } - plan := &deployplan.Plan{ - Plan: make(map[string]deployplan.PlanEntry), - } - + plan := deployplan.NewPlan() populatePlan(ctx, plan, changes) actions := plan.GetActions() diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index 0b20c3c3ae..8f6c71868c 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -40,8 +40,10 @@ const ( ActionTypeDelete ) +const ActionTypeSkipString = "skip" + var actionName = map[ActionType]string{ - ActionTypeSkip: "skip", + ActionTypeSkip: ActionTypeSkipString, ActionTypeResize: "resize", ActionTypeUpdate: "update", ActionTypeUpdateWithID: "update_id", @@ -61,10 +63,6 @@ func init() { } } -func (a ActionType) IsNoop() bool { - return a == ActionTypeSkip -} - func (a ActionType) KeepsID() bool { switch a { case ActionTypeCreate, ActionTypeUpdateWithID, ActionTypeRecreate, ActionTypeDelete: diff --git a/bundle/deployplan/plan.go b/bundle/deployplan/plan.go index 4d54bc4668..4f07ade6cb 100644 --- a/bundle/deployplan/plan.go +++ b/bundle/deployplan/plan.go @@ -2,8 +2,12 @@ package deployplan import ( "cmp" + "fmt" "slices" "strings" + "sync" + + "github.com/databricks/cli/libs/structs/structvar" ) type Plan struct { @@ -12,14 +16,26 @@ type Plan struct { // TODO: // - CliVersion string `json:"cli_version"` // - Copy Serial / Lineage from the state file - Plan map[string]PlanEntry `json:"plan,omitzero"` + // - Store a path to state file + Plan map[string]*PlanEntry `json:"plan,omitzero"` + + mutex sync.Mutex `json:"-"` + locks map[string]bool `json:"-"` +} + +func NewPlan() *Plan { + return &Plan{ + Plan: make(map[string]*PlanEntry), + locks: make(map[string]bool), + } } type PlanEntry struct { - ID string `json:"id,omitempty"` - DependsOn []DependsOnEntry `json:"depends_on,omitempty"` - Action string `json:"action"` - Fields []Field `json:"fields,omitempty"` + ID string `json:"id,omitempty"` + DependsOn []DependsOnEntry `json:"depends_on,omitempty"` + Action string `json:"action,omitempty"` + NewState *structvar.StructVar `json:"new_state,omitempty"` + Changes *Changes `json:"changes,omitempty"` } type DependsOnEntry struct { @@ -27,15 +43,17 @@ type DependsOnEntry struct { Label string `json:"label,omitempty"` } -type Field struct { - Path string `json:"path"` - State any `json:"state,omitempty"` - Config any `json:"config"` - Remote any `json:"remote,omitempty"` +type Changes struct { + Local map[string]Trigger `json:"local,omitempty"` + Remote map[string]Trigger `json:"remote,omitempty"` +} + +type Trigger struct { Action string `json:"action"` + Reason string `json:"reason,omitempty"` } -func (p Plan) GetActions() []Action { +func (p *Plan) GetActions() []Action { actions := make([]Action, 0, len(p.Plan)) for key, entry := range p.Plan { at := ActionTypeFromString(entry.Action) @@ -55,3 +73,26 @@ func (p Plan) GetActions() []Action { return actions } + +// LockEntry returns *PlanEntry; subsequent calls before UnlockEntry() with the same resourceKey will panic. +func (p *Plan) LockEntry(resourceKey string) *PlanEntry { + p.mutex.Lock() + defer p.mutex.Unlock() + + entry, ok := p.Plan[resourceKey] + if ok { + if p.locks[resourceKey] { + panic(fmt.Sprintf("internal DAG error, concurrent access to %q", resourceKey)) + } + p.locks[resourceKey] = true + return entry + } + + return nil +} + +func (p *Plan) UnlockEntry(resourceKey string) { + p.mutex.Lock() + defer p.mutex.Unlock() + p.locks[resourceKey] = false +} diff --git a/bundle/direct/apply.go b/bundle/direct/apply.go index 154e299d88..f0dc0ad4c5 100644 --- a/bundle/direct/apply.go +++ b/bundle/direct/apply.go @@ -32,17 +32,7 @@ func (d *DeploymentUnit) Destroy(ctx context.Context, db *dstate.DeploymentState return nil } -func (d *DeploymentUnit) Deploy(ctx context.Context, db *dstate.DeploymentState, inputConfig any, actionType deployplan.ActionType) error { - if actionType == deployplan.ActionTypeSkip { - return nil - } - - // Note, newState may be different between plan and deploy due to resolved $resource references - newState, err := d.Adapter.PrepareState(inputConfig) - if err != nil { - return fmt.Errorf("reading config: %w", err) - } - +func (d *DeploymentUnit) Deploy(ctx context.Context, db *dstate.DeploymentState, newState any, actionType deployplan.ActionType) error { if actionType == deployplan.ActionTypeCreate { return d.Create(ctx, db, newState) } @@ -211,3 +201,14 @@ func typeConvert(destType reflect.Type, src any) (any, error) { return reflect.ValueOf(destPtr).Elem().Interface(), nil } + +func (d *DeploymentUnit) refreshRemoteState(ctx context.Context, id string) error { + if d.RemoteState != nil { + return nil + } + remoteState, err := d.Adapter.DoRefresh(ctx, id) + if err != nil { + return fmt.Errorf("failed to refresh remote state id=%s: %w", id, err) + } + return d.SetRemoteState(remoteState) +} diff --git a/bundle/direct/bundle_apply.go b/bundle/direct/bundle_apply.go index 01b43d5fb9..0d9c369a64 100644 --- a/bundle/direct/bundle_apply.go +++ b/bundle/direct/bundle_apply.go @@ -2,16 +2,20 @@ package direct import ( "context" + "encoding/json" + "errors" "fmt" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/logdiag" + "github.com/databricks/cli/libs/structs/structaccess" + "github.com/databricks/cli/libs/structs/structpath" "github.com/databricks/databricks-sdk-go" ) func (b *DeploymentBundle) Apply(ctx context.Context, client *databricks.WorkspaceClient, configRoot *config.Root, plan *deployplan.Plan) { - if b.Graph == nil { + if plan == nil { panic("Planning is not done") } @@ -22,38 +26,51 @@ func (b *DeploymentBundle) Apply(ctx context.Context, client *databricks.Workspa b.StateDB.AssertOpened() - b.Graph.Run(defaultParallelism, func(resourceKey string, failedDependency *string) bool { - entry, ok := plan.Plan[resourceKey] - if !ok { - // Nothing to do for this node - return true - } + g, err := makeGraph(plan) + if err != nil { + logdiag.LogError(ctx, err) + return + } + + g.Run(defaultParallelism, func(resourceKey string, failedDependency *string) bool { + entry := plan.LockEntry(resourceKey) + defer plan.UnlockEntry(resourceKey) - group := config.GetResourceTypeFromKey(resourceKey) - if group == "" { - logdiag.LogError(ctx, fmt.Errorf("internal error: bad node: %s", resourceKey)) + if entry == nil { + logdiag.LogError(ctx, fmt.Errorf("internal error: node not in graph: %q", resourceKey)) return false } - at := deployplan.ActionTypeFromString(entry.Action) + action := entry.Action + errorPrefix := fmt.Sprintf("cannot %s %s", action, resourceKey) + + at := deployplan.ActionTypeFromString(action) if at == deployplan.ActionTypeUnset { - logdiag.LogError(ctx, fmt.Errorf("unknown action %q for %s", entry.Action, resourceKey)) + logdiag.LogError(ctx, fmt.Errorf("cannot deploy %s: unknown action %q", resourceKey, action)) return false } - d := &DeploymentUnit{ - ResourceKey: resourceKey, - Adapter: b.Adapters[group], - } - errorPrefix := fmt.Sprintf("cannot %s %s", entry.Action, resourceKey) // If a dependency failed, report and skip execution for this node by returning false if failedDependency != nil { - logdiag.LogError(ctx, fmt.Errorf("%s: dependency failed: %s", errorPrefix, *failedDependency)) + if at != deployplan.ActionTypeSkip { + logdiag.LogError(ctx, fmt.Errorf("%s: dependency failed: %s", errorPrefix, *failedDependency)) + } + return false + } + + adapter, err := b.getAdapterForKey(resourceKey) + if adapter == nil { + logdiag.LogError(ctx, fmt.Errorf("%s: internal error: cannot get adapter: %w", errorPrefix, err)) return false } + d := &DeploymentUnit{ + ResourceKey: resourceKey, + Adapter: adapter, + } + if at == deployplan.ActionTypeDelete { - err := d.Destroy(ctx, &b.StateDB) + err = d.Destroy(ctx, &b.StateDB) if err != nil { logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) return false @@ -61,63 +78,93 @@ func (b *DeploymentBundle) Apply(ctx context.Context, client *databricks.Workspa return true } - // Fetch the references to ensure all are resolved - myReferences, err := extractReferences(configRoot.Value(), resourceKey) - if err != nil { - logdiag.LogError(ctx, fmt.Errorf("%s: reading references from config: %w", errorPrefix, err)) - return false - } + // We don't keep NewState around for 'skip' nodes - // At this point it's an error to have unresolved deps - if len(myReferences) > 0 { - // TODO: include the deps themselves in the message - logdiag.LogError(ctx, fmt.Errorf("%s: unresolved deps", errorPrefix)) - return false - } + if at != deployplan.ActionTypeSkip { + if !b.resolveReferences(ctx, entry, errorPrefix, false) { + return false + } - config, ok := configRoot.GetResourceConfig(resourceKey) - if !ok { - logdiag.LogError(ctx, fmt.Errorf("%s: internal error when reading config", errorPrefix)) - return false - } + if len(entry.NewState.Refs) > 0 { + logdiag.LogError(ctx, fmt.Errorf("%s: unresolved references: %s", errorPrefix, jsonDump(entry.NewState.Refs))) + return false + } - // TODO: redo calcDiff to downgrade planned action if possible (?) + // TODO: redo calcDiff to downgrade planned action if possible (?) - err = d.Deploy(ctx, &b.StateDB, config, at) - if err != nil { - logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) - return false + err = d.Deploy(ctx, &b.StateDB, entry.NewState.Config, at) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) + return false + } } - // We now process references of the form "resources..." and refers - // for the resource that was just deployed. We first look up those references (ResolveReferenceRemote) - // and the replace them across the whole bundle (replaceReferenceWithValue). - // Note, we've already replaced what we could in plan phase: - // - "id" for cases where id cannot change; - // - "field" for cases where field is part of the config. - // Now we're focussing on the remaining cases: - // - "id" for cases where id could have changed; - // - "field" for cases where field is part of the remote state. - for _, reference := range b.Graph.OutgoingLabels(resourceKey) { - value, err := d.ResolveReferenceRemote(ctx, &b.StateDB, reference) - if err != nil { - logdiag.LogError(ctx, fmt.Errorf("failed to resolve reference %q for %s after deployment: %w", reference, resourceKey, err)) + // TODO: Note, we only really need remote state if there are remote references. + // The graph includes edges for both local and remote references. The local references are + // already resolved and should not play a role here. + needRemoteState := len(g.Adj[resourceKey]) > 0 + if needRemoteState { + entry, _ := b.StateDB.GetResourceEntry(d.ResourceKey) + if entry.ID == "" { + logdiag.LogError(ctx, fmt.Errorf("%s: internal error: missing entry in state after deploy", errorPrefix)) return false } - err = replaceReferenceWithValue(ctx, configRoot, reference, value) + err = d.refreshRemoteState(ctx, entry.ID) if err != nil { - logdiag.LogError(ctx, fmt.Errorf("failed to replace reference %q with value %v for %s: %w", reference, value, resourceKey, err)) + logdiag.LogError(ctx, fmt.Errorf("%s: failed to read remote state: %w", errorPrefix, err)) return false } + b.RemoteStateCache.Store(resourceKey, d.RemoteState) } return true }) // This must run even if deploy failed: - err := b.StateDB.Finalize() + err = b.StateDB.Finalize() if err != nil { logdiag.LogError(ctx, err) } } + +func (b *DeploymentBundle) LookupReferenceRemote(ctx context.Context, path *structpath.PathNode) (any, error) { + targetResourceKey := path.Prefix(3).String() + fieldPath := path.SkipPrefix(3) + fieldPathS := fieldPath.String() + + targetEntry := b.Plan.LockEntry(targetResourceKey) + defer b.Plan.UnlockEntry(targetResourceKey) + + if targetEntry == nil { + return nil, fmt.Errorf("internal error: %s: missing entry in the plan", targetResourceKey) + } + + targetAction := deployplan.ActionTypeFromString(targetEntry.Action) + if targetAction == deployplan.ActionTypeUnset { + return nil, fmt.Errorf("internal error: %s: missing action in the plan", targetResourceKey) + } + + if fieldPathS == "id" { + dbentry, hasEntry := b.StateDB.GetResourceEntry(targetResourceKey) + if !hasEntry || dbentry.ID == "" { + return nil, errors.New("internal error: no db entry") + } + return dbentry.ID, nil + } + + remoteState, ok := b.RemoteStateCache.Load(targetResourceKey) + if !ok { + return nil, fmt.Errorf("internal error: %s: missing remote state", targetResourceKey) + } + + return structaccess.Get(remoteState, fieldPath) +} + +func jsonDump(obj map[string]string) string { + bytes, err := json.MarshalIndent(obj, "", " ") + if err != nil { + return err.Error() + } + return string(bytes) +} diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index 598f069928..e518ba1f2f 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -4,18 +4,27 @@ import ( "context" "errors" "fmt" + "reflect" + "slices" "strings" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/direct/dresources" - "github.com/databricks/cli/libs/dagrun" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/logdiag" + "github.com/databricks/cli/libs/structs/structaccess" + "github.com/databricks/cli/libs/structs/structdiff" + "github.com/databricks/cli/libs/structs/structpath" + "github.com/databricks/cli/libs/structs/structvar" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" ) +var errDelayed = errors.New("must be resolved after apply") + func (b *DeploymentBundle) OpenStateFile(statePath string) error { err := b.StateDB.Open(statePath) if err != nil { @@ -40,98 +49,97 @@ func (b *DeploymentBundle) CalculatePlanForDeploy(ctx context.Context, client *d return nil, err } - // TODO: make this --local option - localOnly := false - - // TODO: get this from --refresh / --norefresh option (default refresh for prod, norefresh for dev mode) - refresh := true - - b.Graph, err = makeResourceGraph(ctx, configRoot.Value()) + plan, err := b.makePlan(ctx, configRoot) if err != nil { return nil, fmt.Errorf("reading config: %w", err) } - err = b.Graph.DetectCycle() + b.Plan = plan + + g, err := makeGraph(plan) if err != nil { return nil, err } - plan := deployplan.Plan{ - Plan: make(map[string]deployplan.PlanEntry), + err = g.DetectCycle() + if err != nil { + return nil, err } - // We're processing resources in DAG order, because we're trying to get rid of all references like $resources.jobs.foo.id - // if jobs.foo is not going to be (re)created. This means by the time we get to resource depending on $resources.jobs.foo.id - // we might have already got rid of this reference, thus potentially downgrading actionType - // - // parallelism is set to 1, so there is no multi-threaded access there. TODO: increase parallism - b.Graph.Run(1, func(resourceKey string, failedDependency *string) bool { - group := config.GetResourceTypeFromKey(resourceKey) - if group == "" { - logdiag.LogError(ctx, fmt.Errorf("internal error: bad node: %s", resourceKey)) + // We're processing resources in DAG order because we're resolving refernces (that can be resolved at plan stage). + g.Run(1, func(resourceKey string, failedDependency *string) bool { + errorPrefix := "cannot plan " + resourceKey + + entry := plan.LockEntry(resourceKey) + defer plan.UnlockEntry(resourceKey) + + if entry == nil { + logdiag.LogError(ctx, fmt.Errorf("%s: internal error: node not in graph", resourceKey)) return false } - errorPrefix := "cannot plan " + resourceKey if failedDependency != nil { logdiag.LogError(ctx, fmt.Errorf("%s: dependency failed: %s", errorPrefix, *failedDependency)) return false } - adapter, ok := b.Adapters[group] - if !ok { - logdiag.LogError(ctx, fmt.Errorf("%s: resource type not supported on direct backend, available: %s", errorPrefix, strings.Join(utils.SortedKeys(b.Adapters), ", "))) + adapter, err := b.getAdapterForKey(resourceKey) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) return false } - config, ok := configRoot.GetResourceConfig(resourceKey) - if !ok { - logdiag.LogError(ctx, fmt.Errorf("%s: internal error: cannot read config", errorPrefix)) + // Process all references in the resource using Refs map + // Refs maps path inside resource to references e.g. "${resources.jobs.foo.id} ${resources.jobs.foo.name}" + if !b.resolveReferences(ctx, entry, errorPrefix, true) { return false } - d := &DeploymentUnit{ - ResourceKey: resourceKey, - Adapter: adapter, + dbentry, hasEntry := b.StateDB.GetResourceEntry(resourceKey) + if !hasEntry { + entry.Action = deployplan.ActionTypeCreate.String() + return true } - // This currently does not do API calls, so we can run this sequentially. Once we have remote diffs, we need to run in threadpool. - actionType, err := d.Plan(ctx, client, &b.StateDB, config, localOnly, refresh) - if err != nil { - logdiag.LogError(ctx, err) + if dbentry.ID == "" { + logdiag.LogError(ctx, fmt.Errorf("%s: invalid state empty id", errorPrefix)) return false } - hasDelayedResolutions := false - - for _, reference := range b.Graph.OutgoingLabels(resourceKey) { - value, err := d.ResolveReferenceLocalOrRemote(ctx, &b.StateDB, reference, actionType, config) - if errors.Is(err, ErrDelayed) { - hasDelayedResolutions = true - continue - } - if err != nil { - logdiag.LogError(ctx, fmt.Errorf("cannot resolve %q: %w", reference, err)) - return false - } - err = replaceReferenceWithValue(ctx, configRoot, reference, value) - if err != nil { - logdiag.LogError(ctx, fmt.Errorf("failed to replace %q with %v: %w", reference, value, err)) - return false - } + savedState, err := typeConvert(adapter.StateType(), dbentry.State) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: interpreting state: %w", errorPrefix, err)) + return false } - plan.Plan[resourceKey] = deployplan.PlanEntry{ - Action: actionType.String(), + // Note, currently we're diffing static structs, not dynamic value. + // This means for fields that contain references like ${resources.group.foo.id} we do one of the following: + // for strings: comparing unresolved string like "${resoures.group.foo.id}" with actual object id. As long as IDs do not have ${...} format we're good. + // for integers: compare 0 with actual object ID. As long as real object IDs are never 0 we're good. + // Once we add non-id fields or add per-field details to "bundle plan", we must read dynamic data and deal with references as first class citizen. + // This means distinguishing between 0 that are actually object ids and 0 that are there because typed struct integer cannot contain ${...} string. + localDiff, err := structdiff.GetStructDiff(savedState, entry.NewState.Config) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: diffing state: %w", errorPrefix, err)) + return false } - if actionType == deployplan.ActionTypeSkip { - if hasDelayedResolutions { - logdiag.LogError(ctx, fmt.Errorf("%s: internal error, action noop must not have delayed resolutions", errorPrefix)) - return false + localAction := deployplan.ActionTypeSkip + + for _, ch := range localDiff { + fieldAction := adapter.ClassifyByTriggers(ch) + if fieldAction > localAction { + localAction = fieldAction + } + if entry.Changes == nil { + entry.Changes = &deployplan.Changes{ + Local: make(map[string]deployplan.Trigger), + } } + entry.Changes.Local[ch.Path.String()] = deployplan.Trigger{Action: fieldAction.String()} } + entry.Action = localAction.String() return true }) @@ -139,6 +147,8 @@ func (b *DeploymentBundle) CalculatePlanForDeploy(ctx context.Context, client *d return nil, errors.New("planning failed") } + // Note, we cannot simply remove 'skip' entries here as then we'd need to ensure there are no edges to them + state := b.StateDB.ExportState(ctx) // Remained in state are resources that no longer present in the config @@ -151,15 +161,21 @@ func (b *DeploymentBundle) CalculatePlanForDeploy(ctx context.Context, client *d } for _, key := range utils.SortedKeys(state[group]) { n := "resources." + group + "." + key - if b.Graph.HasNode(n) { + _, exists := plan.Plan[n] + if exists { continue } - b.Graph.AddNode(n) - plan.Plan[n] = deployplan.PlanEntry{Action: deployplan.ActionTypeDelete.String()} + plan.Plan[n] = &deployplan.PlanEntry{Action: deployplan.ActionTypeDelete.String()} } } - return &plan, nil + for _, entry := range plan.Plan { + if entry.Action == deployplan.ActionTypeSkipString { + entry.NewState = nil + } + } + + return plan, nil } func (b *DeploymentBundle) CalculatePlanForDestroy(ctx context.Context, client *databricks.WorkspaceClient) (*deployplan.Plan, error) { @@ -170,8 +186,7 @@ func (b *DeploymentBundle) CalculatePlanForDestroy(ctx context.Context, client * return nil, err } - b.Graph = dagrun.NewGraph() - plan := deployplan.Plan{Plan: make(map[string]deployplan.PlanEntry)} + plan := deployplan.NewPlan() state := b.StateDB.ExportState(ctx) for group, groupData := range state { @@ -182,10 +197,297 @@ func (b *DeploymentBundle) CalculatePlanForDestroy(ctx context.Context, client * } for key := range groupData { n := "resources." + group + "." + key - b.Graph.AddNode(n) - plan.Plan[n] = deployplan.PlanEntry{Action: deployplan.ActionTypeDelete.String()} + plan.Plan[n] = &deployplan.PlanEntry{Action: deployplan.ActionTypeDelete.String()} + } + } + + return plan, nil +} + +func (b *DeploymentBundle) LookupReferenceLocal(ctx context.Context, path *structpath.PathNode) (any, error) { + targetResourceKey := path.Prefix(3).String() + fieldPath := path.SkipPrefix(3) + fieldPathS := fieldPath.String() + + targetEntry := b.Plan.LockEntry(targetResourceKey) + defer b.Plan.UnlockEntry(targetResourceKey) + + if targetEntry == nil { + return nil, fmt.Errorf("internal error: %s: missing entry in the plan", targetResourceKey) + } + + targetAction := deployplan.ActionTypeFromString(targetEntry.Action) + if targetAction == deployplan.ActionTypeUnset { + return nil, fmt.Errorf("internal error: %s: missing action in the plan", targetResourceKey) + } + + if fieldPathS == "id" { + if targetAction.KeepsID() { + dbentry, hasEntry := b.StateDB.GetResourceEntry(targetResourceKey) + idValue := dbentry.ID + if !hasEntry || idValue == "" { + return nil, errors.New("internal error: no db entry") + } + return idValue, nil } + // id may change after deployment, this needs to be done later + return nil, errDelayed + } + + if targetEntry.NewState == nil { + return nil, fmt.Errorf("internal error: %s: action is %q missing new_state", targetResourceKey, targetEntry.Action) + } + + _, isUnresolved := targetEntry.NewState.Refs[fieldPathS] + if isUnresolved { + // The value that is requested is itself a reference; this means it will be resolved after apply + return nil, errDelayed + } + + localConfig := targetEntry.NewState.Config + + targetGroup := config.GetResourceTypeFromKey(targetResourceKey) + adapter := b.Adapters[targetGroup] + if adapter == nil { + return nil, fmt.Errorf("internal error: %s: unknown resource type %q", targetResourceKey, targetGroup) + } + + configValidErr := structaccess.Validate(reflect.TypeOf(localConfig), fieldPath) + remoteValidErr := structaccess.Validate(adapter.RemoteType(), fieldPath) + // Note: using adapter.RemoteType() over reflect.TypeOf(remoteState) because remoteState might be untyped nil + + if configValidErr != nil && remoteValidErr != nil { + return nil, fmt.Errorf("schema mismatch: %w; %w", configValidErr, remoteValidErr) + } + + if configValidErr == nil && remoteValidErr != nil { + // The field is only present in local schema; it must be resolved here. + value, err := structaccess.Get(localConfig, fieldPath) + if err != nil { + return nil, fmt.Errorf("field not set: %w", err) + } + + return value, nil + } + + if configValidErr != nil && remoteValidErr == nil { + // The field is only present in remote state schema. + // TODO: If resource is unchanged in this plan, we can proceed with resolution. + // If resource is going to change, we need to postpone this until deploy. + return nil, errDelayed + } + + // Field is present in both: try local, fallback to delayed. + + value, err := structaccess.Get(localConfig, fieldPath) + + if err == nil && value != nil { + return value, nil + } + + return nil, errDelayed +} + +// resolveReferences processes all references in entry.NewState.Refs. +// If isLocal is true, uses LookupReferenceLocal (for planning phase). +// If isLocal is false, uses LookupReferenceRemote (for apply phase). +func (b *DeploymentBundle) resolveReferences(ctx context.Context, entry *deployplan.PlanEntry, errorPrefix string, isLocal bool) bool { + for fieldPathStr, refString := range entry.NewState.Refs { + refs, ok := dynvar.NewRef(dyn.V(refString)) + if !ok { + if !isLocal { + logdiag.LogError(ctx, fmt.Errorf("%s: cannot parse %q", errorPrefix, refString)) + return false + } + continue + } + + for _, pathString := range refs.References() { + ref := "${" + pathString + "}" + targetPath, err := structpath.Parse(pathString) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: cannot parse reference %q: %w", errorPrefix, ref, err)) + return false + } + + var value any + if isLocal { + value, err = b.LookupReferenceLocal(ctx, targetPath) + if err != nil { + if errors.Is(err, errDelayed) { + continue + } + logdiag.LogError(ctx, fmt.Errorf("%s: cannot resolve %q: %w", errorPrefix, ref, err)) + return false + } + } else { + value, err = b.LookupReferenceRemote(ctx, targetPath) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: cannot resolve %q: %w", errorPrefix, ref, err)) + return false + } + } + + err = entry.NewState.ResolveRef(ref, value) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: cannot update %s with value of %q: %w", errorPrefix, fieldPathStr, ref, err)) + return false + } + } + } + return true +} + +func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root) (*deployplan.Plan, error) { + p := deployplan.NewPlan() + + // Collect and sort nodes first, because MapByPattern gives them in randomized order + var nodes []string + + // Walk? + _, err := dyn.MapByPattern( + configRoot.Value(), + dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), + func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + group := p[1].Key() + + _, ok := dresources.SupportedResources[group] + if !ok { + return v, fmt.Errorf("unsupported resource: %s", group) + } + + nodes = append(nodes, p.String()) + return dyn.InvalidValue, nil + }, + ) + if err != nil { + return nil, fmt.Errorf("reading config: %w", err) + } + + slices.Sort(nodes) + + for _, node := range nodes { + prefix := "cannot plan " + node + inputConfig, ok := configRoot.GetResourceConfig(node) + if !ok { + return nil, fmt.Errorf("%s: failed to read config", prefix) + } + + adapter, err := b.getAdapterForKey(node) + if err != nil { + return nil, fmt.Errorf("%s: %w", prefix, err) + } + + newStateConfig, err := adapter.PrepareState(inputConfig) + if err != nil { + return nil, fmt.Errorf("%s: %w", prefix, err) + } + + // Note, we're extracting references in input config but resolving them in newStateConfig + // This means input and state must be compatible: input can have more fields, but existing fields should not be moved + // This means one cannot refer to fields not present in state (e.g. ${resources.jobs.foo.permissions}) + + refs, err := extractReferences(configRoot.Value(), node) + if err != nil { + return nil, fmt.Errorf("failed to read references from config for %s: %w", node, err) + } + + // Extract dependencies from references and populate depends_on + var dependsOn []deployplan.DependsOnEntry + for _, reference := range refs { + // Use NewRef to extract all references from the string + ref, ok := dynvar.NewRef(dyn.V(reference)) + if !ok { + continue + } + + // Process each reference in the string + for _, refStr := range ref.References() { + path, err := structpath.Parse(refStr) + if err != nil { + return nil, fmt.Errorf("failed to parse reference %q: %w", refStr, err) + } + + targetNode := path.Prefix(3).String() + fullRef := "${" + refStr + "}" + + // Add to depends_on if not already present + found := false + for _, dep := range dependsOn { + if dep.Node == targetNode && dep.Label == fullRef { + found = true + break + } + } + if !found { + dependsOn = append(dependsOn, deployplan.DependsOnEntry{ + Node: targetNode, + Label: fullRef, + }) + } + } + } + + // Sort dependsOn for consistent ordering + slices.SortFunc(dependsOn, func(a, b deployplan.DependsOnEntry) int { + if a.Node != b.Node { + return strings.Compare(a.Node, b.Node) + } + return strings.Compare(a.Label, b.Label) + }) + + e := deployplan.PlanEntry{ + DependsOn: dependsOn, + NewState: &structvar.StructVar{ + Config: newStateConfig, + Refs: refs, + }, + } + + p.Plan[node] = &e + } + + return p, nil +} + +func extractReferences(root dyn.Value, node string) (map[string]string, error) { + refs := make(map[string]string) + + path, err := dyn.NewPathFromString(node) + if err != nil { + return nil, fmt.Errorf("internal error: bad node key: %q: %w", node, err) + } + + val, err := dyn.GetByPath(root, path) + if err != nil { + return nil, err + } + + err = dyn.WalkReadOnly(val, func(p dyn.Path, v dyn.Value) error { + ref, ok := dynvar.NewRef(v) + if !ok { + return nil + } + // Store the original string that contains references, not individual references + refs[p.String()] = ref.Str + return nil + }) + if err != nil { + return nil, fmt.Errorf("parsing refs: %w", err) + } + return refs, nil +} + +func (b *DeploymentBundle) getAdapterForKey(resourceKey string) (*dresources.Adapter, error) { + group := config.GetResourceTypeFromKey(resourceKey) + if group == "" { + return nil, fmt.Errorf("internal error: bad node: %s", resourceKey) + } + + adapter, ok := b.Adapters[group] + if !ok { + return nil, fmt.Errorf("resource type %q not supported, available: %s", group, strings.Join(utils.SortedKeys(b.Adapters), ", ")) } - return &plan, nil + return adapter, nil } diff --git a/bundle/direct/dresources/adapter.go b/bundle/direct/dresources/adapter.go index c729e128ca..de340682de 100644 --- a/bundle/direct/dresources/adapter.go +++ b/bundle/direct/dresources/adapter.go @@ -447,26 +447,14 @@ func (a *Adapter) DoUpdateWithID(ctx context.Context, oldID string, newState any } } -// ClassifyByTriggers classifies a set of changes using FieldTriggers. -// Unspecified changed fields default to ActionTypeUpdate. Final action is the -// maximum by precedence. No changes yield ActionTypeSkip. -func (a *Adapter) ClassifyByTriggers(changes []structdiff.Change) deployplan.ActionType { - if len(changes) == 0 { - return deployplan.ActionTypeSkip - } - - // Default when there are changes but no explicit trigger is update. - result := deployplan.ActionTypeUpdate - for _, change := range changes { - action, ok := a.fieldTriggers[change.Path.String()] - if !ok { - action = deployplan.ActionTypeUpdate - } - if action > result { - result = action - } - } - return result +// ClassifyByTriggers classifies a single using FieldTriggers. +// Defaults to ActionTypeUpdate. +func (a *Adapter) ClassifyByTriggers(change structdiff.Change) deployplan.ActionType { + action, ok := a.fieldTriggers[change.Path.String()] + if ok { + return action + } + return deployplan.ActionTypeUpdate } // WaitAfterCreate waits for the resource to become ready after creation. diff --git a/bundle/direct/graph.go b/bundle/direct/graph.go index ecb03768eb..fc24f70388 100644 --- a/bundle/direct/graph.go +++ b/bundle/direct/graph.go @@ -1,207 +1,36 @@ package direct import ( - "context" - "errors" "fmt" - "reflect" - "slices" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/direct/dresources" + "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/dagrun" - "github.com/databricks/cli/libs/dyn" - "github.com/databricks/cli/libs/dyn/convert" - "github.com/databricks/cli/libs/dyn/dynvar" - "github.com/databricks/cli/libs/log" - "github.com/databricks/cli/libs/logdiag" + "github.com/databricks/cli/libs/utils" ) -type fieldRef struct { - Node string - Reference string // refrence in question e.g. ${resources.jobs.foo.id} -} - -// makeResourceGraph creates node graph based on ${resources.group.name.id} references. -func makeResourceGraph(ctx context.Context, configRoot dyn.Value) (*dagrun.Graph, error) { +func makeGraph(plan *deployplan.Plan) (*dagrun.Graph, error) { g := dagrun.NewGraph() - // Collect and sort nodes first, because MapByPattern gives them in randomized order - var nodes []string - - _, err := dyn.MapByPattern( - configRoot, - dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), - func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - group := p[1].Key() - name := p[2].Key() - - _, ok := dresources.SupportedResources[group] - if !ok { - return v, fmt.Errorf("unsupported resource: %s", group) - } - - nodes = append(nodes, "resources."+group+"."+name) - return dyn.InvalidValue, nil - }, - ) - if err != nil { - return nil, fmt.Errorf("reading config: %w", err) - } - - slices.Sort(nodes) - - for _, node := range nodes { - g.AddNode(node) - - fieldRefs, err := extractReferences(configRoot, node) - if err != nil { - return nil, fmt.Errorf("failed to read references from config for %s: %w", node, err) - } - - for _, fieldRef := range fieldRefs { - log.Debugf(ctx, "Adding resource edge: %s -> %s via %s", fieldRef.Node, node, fieldRef.Reference) - // TODO: this may add duplicate edges. Investigate if we need to prevent that - g.AddDirectedEdge( - fieldRef.Node, - node, - fieldRef.Reference, - ) - } - } - - return g, nil -} - -func extractReferences(root dyn.Value, node string) ([]fieldRef, error) { - var result []fieldRef - - path, err := dyn.NewPathFromString(node) - if err != nil { - return nil, fmt.Errorf("internal error: bad node key: %q: %w", node, err) - } - - val, err := dyn.GetByPath(root, path) - if err != nil { - return nil, err - } - - err = dyn.WalkReadOnly(val, func(p dyn.Path, v dyn.Value) error { - ref, ok := dynvar.NewRef(v) - if !ok { - return nil - } - for _, r := range ref.References() { - // validateRef will check resource exists in the config; this will reject references to deleted resources, no need to handle that case separately. - item, err := validateRef(root, r) - if err != nil { - return fmt.Errorf("cannot process reference %s: %w", r, err) - } - result = append(result, item) - } - - return nil - }) - if err != nil { - return nil, fmt.Errorf("parsing refs: %w", err) - } - return result, nil -} - -func validateRef(root dyn.Value, ref string) (fieldRef, error) { - path, err := dyn.NewPathFromString(ref) - if err != nil { - return fieldRef{}, err - } - if len(path) < 3 { // expecting "resources.jobs.foo.*" - return fieldRef{}, errors.New("reference too short") - } - if path[0].Key() != "resources" { - return fieldRef{}, errors.New("reference does not start with 'resources'") - } - _, err = dyn.GetByPath(root, path[0:3]) - if err != nil { - return fieldRef{}, err - } - return fieldRef{ - Node: "resources." + path[1].Key() + "." + path[2].Key(), - Reference: "${" + ref + "}", - }, nil -} - -// adaptValue converts arbitrary values to types that dyn library can handle. -// The dyn library supports basic Go types (string, bool, int, float) but not typedefs. -// This function normalizes SDK typedefs to their underlying representation. -func adaptValue(value any) (any, error) { - if value == nil { - return nil, nil + // Add all nodes first + for _, resourceKey := range utils.SortedKeys(plan.Plan) { + g.AddNode(resourceKey) } - rv := reflect.ValueOf(value) - - switch rv.Kind() { - case reflect.String: - return rv.String(), nil - case reflect.Bool: - return rv.Bool(), nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return rv.Int(), nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return int64(rv.Uint()), nil - case reflect.Float32, reflect.Float64: - return rv.Float(), nil - case reflect.Ptr: - if rv.IsNil() { - return nil, nil - } - return adaptValue(rv.Elem().Interface()) - case reflect.Interface: - if rv.IsNil() { - return nil, nil + // Add edges based on depends_on field exclusively + for resourceKey, entry := range plan.Plan { + if entry.DependsOn == nil { + continue } - return adaptValue(rv.Elem().Interface()) - default: - return nil, fmt.Errorf("unsupported type %T (kind %v)", value, rv.Kind()) - } -} - -func replaceReferenceWithValue(ctx context.Context, bundleConfig *config.Root, reference string, value any) error { - targetPath, ok := dynvar.PureReferenceToPath(reference) - if !ok { - return fmt.Errorf("internal error: bad reference: %q", reference) - } - // dyn modules does not work with typedefs, only original types; SDK have many typedefs, so we simplify type here - // adaptValue should also return for non-scalar types like structs and maps and slices - normValue, err := adaptValue(value) - if err != nil { - return fmt.Errorf("cannot resolve value of type %T: %w", value, err) - } - - err = bundleConfig.Mutate(func(root dyn.Value) (dyn.Value, error) { - root, err := dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) { - if slices.Equal(path, targetPath) { - return dyn.V(normValue), nil + for _, dep := range entry.DependsOn { + // Only add edge if target node exists in the plan + if _, exists := plan.Plan[dep.Node]; exists { + g.AddDirectedEdge(dep.Node, resourceKey, dep.Label) + } else { + return nil, fmt.Errorf("invalid dependency %q, no such node %q", dep.Label, dep.Node) } - return dyn.InvalidValue, dynvar.ErrSkipResolution - }) - if err != nil { - return root, err - } - // Following resolve_variable_references.go, normalize after variable substitution. - root, diags := convert.Normalize(bundleConfig, root) - for _, d := range diags { - logdiag.LogDiag(ctx, d) } - return root, nil - }) - if err != nil { - logdiag.LogError(ctx, err) } - if logdiag.HasError(ctx) { - return errors.New("failed to update bundle config") - } - - return nil + return g, nil } diff --git a/bundle/direct/pkg.go b/bundle/direct/pkg.go index ec460df689..907c2f8d32 100644 --- a/bundle/direct/pkg.go +++ b/bundle/direct/pkg.go @@ -3,11 +3,11 @@ package direct import ( "fmt" "reflect" + "sync" "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/direct/dresources" "github.com/databricks/cli/bundle/direct/dstate" - "github.com/databricks/cli/libs/dagrun" ) // How many parallel operations (API calls) are allowed @@ -33,9 +33,10 @@ type DeploymentUnit struct { // DeploymentBundle holds everything needed to deploy a bundle type DeploymentBundle struct { - StateDB dstate.DeploymentState - Graph *dagrun.Graph - Adapters map[string]*dresources.Adapter + StateDB dstate.DeploymentState + Adapters map[string]*dresources.Adapter + Plan *deployplan.Plan + RemoteStateCache sync.Map } // SetRemoteState updates the remote state with type validation and marks as fresh. diff --git a/bundle/direct/plan.go b/bundle/direct/plan.go deleted file mode 100644 index e4591c3443..0000000000 --- a/bundle/direct/plan.go +++ /dev/null @@ -1,210 +0,0 @@ -package direct - -import ( - "context" - "errors" - "fmt" - "reflect" - - "github.com/databricks/cli/bundle/deployplan" - "github.com/databricks/cli/bundle/direct/dresources" - "github.com/databricks/cli/bundle/direct/dstate" - "github.com/databricks/cli/libs/structs/structdiff" - "github.com/databricks/databricks-sdk-go" - - "github.com/databricks/cli/libs/log" - "github.com/databricks/cli/libs/structs/structaccess" - "github.com/databricks/cli/libs/structs/structpath" -) - -func (d *DeploymentUnit) Plan(ctx context.Context, client *databricks.WorkspaceClient, db *dstate.DeploymentState, inputConfig any, localOnly, refresh bool) (deployplan.ActionType, error) { - result, err := d.plan(ctx, client, db, inputConfig, localOnly, refresh) - if err != nil { - return deployplan.ActionTypeSkip, fmt.Errorf("planning: %s: %w", d.ResourceKey, err) - } - return result, err -} - -func (d *DeploymentUnit) plan(ctx context.Context, client *databricks.WorkspaceClient, db *dstate.DeploymentState, inputConfig any, localOnly, refresh bool) (deployplan.ActionType, error) { - entry, hasEntry := db.GetResourceEntry(d.ResourceKey) - if !hasEntry { - return deployplan.ActionTypeCreate, nil - } - if entry.ID == "" { - return deployplan.ActionTypeUnset, errors.New("invalid state: empty id") - } - - newState, err := d.Adapter.PrepareState(inputConfig) - if err != nil { - return deployplan.ActionTypeUnset, fmt.Errorf("reading config: %w", err) - } - - savedState, err := typeConvert(d.Adapter.StateType(), entry.State) - if err != nil { - return deployplan.ActionTypeUnset, fmt.Errorf("interpreting state: %w", err) - } - - // Note, currently we're diffing static structs, not dynamic value. - // This means for fields that contain references like ${resources.group.foo.id} we do one of the following: - // for strings: comparing unresolved string like "${resoures.group.foo.id}" with actual object id. As long as IDs do not have ${...} format we're good. - // for integers: compare 0 with actual object ID. As long as real object IDs are never 0 we're good. - // Once we add non-id fields or add per-field details to "bundle plan", we must read dynamic data and deal with references as first class citizen. - // This means distinguishing between 0 that are actually object ids and 0 that are there because typed struct integer cannot contain ${...} string. - return calcDiff(d.Adapter, savedState, newState) -} - -func (d *DeploymentUnit) refreshRemoteState(ctx context.Context, id string) error { - if d.RemoteState != nil { - return nil - } - remoteState, err := d.Adapter.DoRefresh(ctx, id) - if err != nil { - return fmt.Errorf("failed to refresh remote state id=%s: %w", id, err) - } - return d.SetRemoteState(remoteState) -} - -var ErrDelayed = errors.New("must be resolved after apply") - -func (d *DeploymentUnit) ResolveReferenceLocalOrRemote(ctx context.Context, db *dstate.DeploymentState, reference string, actionType deployplan.ActionType, config any) (any, error) { - path, ok := structpath.PureReferenceToPath(reference) - if !ok || path.Len() <= 3 || path.Prefix(3).String() != d.ResourceKey { - return nil, fmt.Errorf("internal error: expected reference to %q, got %q", d.ResourceKey, reference) - } - - fieldPath := path.SkipPrefix(3) - - if fieldPath.String() == "id" { - if actionType.KeepsID() { - entry, hasEntry := db.GetResourceEntry(d.ResourceKey) - idValue := entry.ID - if !hasEntry || idValue == "" { - return nil, errors.New("internal error: no db entry") - } - return idValue, nil - } - // id may change after deployment, this needs to be done later - return nil, ErrDelayed - } - - configValidErr := structaccess.Validate(reflect.TypeOf(config), fieldPath) - remoteValidErr := structaccess.Validate(d.Adapter.RemoteType(), fieldPath) - - if configValidErr != nil && remoteValidErr != nil { - return nil, fmt.Errorf("schema mismatch: %w; %w", configValidErr, remoteValidErr) - } - - if configValidErr == nil && remoteValidErr != nil { - // The field is only present in local schema; it must be resolved here. - value, err := structaccess.Get(config, fieldPath) - if err != nil { - return nil, fmt.Errorf("field not set: %w", err) - } - - return value, nil - } - - if configValidErr != nil && remoteValidErr == nil { - // The field is only present in remote state schema. - // If resource is unchanged in this plan, we can proceed with resolution. - // If resource is going to change, we need to postpone this until deploy. - if !actionType.IsNoop() { - return nil, ErrDelayed - } - - return d.ReadRemoteStateField(ctx, db, fieldPath) - - } - - // Field is present in both: try local, fallback to remote. - - value, err := structaccess.Get(config, fieldPath) - - if err == nil && value != nil { - return value, nil - } - - if !actionType.IsNoop() { - log.Infof(ctx, "Reference %q not found in config, delaying resolution after apply: %s", reference, err) - // Can only proceed with remote resolution if resource is not going to be deployed - return nil, ErrDelayed - } - - log.Infof(ctx, "Reference %q not found in config, trying remote state: %s", reference, err) - return d.ReadRemoteStateField(ctx, db, fieldPath) -} - -func (d *DeploymentUnit) ResolveReferenceRemote(ctx context.Context, db *dstate.DeploymentState, reference string) (any, error) { - path, ok := structpath.PureReferenceToPath(reference) - if !ok || path.Len() <= 3 || path.Prefix(3).String() != d.ResourceKey { - return nil, fmt.Errorf("internal error: expected reference to %q, got %q", d.ResourceKey, reference) - } - - fieldPath := path.SkipPrefix(3) - - // Handle "id" field separately - read from state, not remote state - if fieldPath.String() == "id" { - entry, hasEntry := db.GetResourceEntry(d.ResourceKey) - if !hasEntry || entry.ID == "" { - return nil, fmt.Errorf("internal error: no state entry or empty ID for %s", d.ResourceKey) - } - return entry.ID, nil - } - - // Read other fields from remote state - return d.ReadRemoteStateField(ctx, db, fieldPath) -} - -// ReadRemoteStateField reads a field from remote state with refresh if needed. -func (d *DeploymentUnit) ReadRemoteStateField(ctx context.Context, db *dstate.DeploymentState, fieldPath *structpath.PathNode) (any, error) { - // We have options: - // 1) Rely on the cached value; refresh if not cached. - // 2) Always refresh, read the value. - // 3) Consider this "unknown/variable" that always triggers a diff. - // Long term we'll do (1), for now going with (2). - // Not considering (3) because it would result in bad plans. - - entry, _ := db.GetResourceEntry(d.ResourceKey) - if entry.ID == "" { - return nil, errors.New("internal error: Missing state entry") - } - - err := d.refreshRemoteState(ctx, entry.ID) - if err != nil { - return nil, err - } - - // Use remote state tracked by deployer - remoteState := d.RemoteState - if remoteState == nil { - return nil, errors.New("no remote state available") - } - // remoteState cannot be nil there; but if it is, structaccess.Get will return an appropriate error - - value, errRemote := structaccess.Get(remoteState, fieldPath) - if errRemote != nil { - return nil, fmt.Errorf("field not set in remote state: %w", errRemote) - } - - return value, nil -} - -// TODO: return struct that has action but also individual differences and their effect (e.g. recreate) -func calcDiff(adapter *dresources.Adapter, savedState, config any) (deployplan.ActionType, error) { - localDiff, err := structdiff.GetStructDiff(savedState, config) - if err != nil { - return deployplan.ActionTypeUnset, err - } - - if len(localDiff) == 0 { - return deployplan.ActionTypeSkip, nil - } - - result := adapter.ClassifyByTriggers(localDiff) - - if result == deployplan.ActionTypeUpdateWithID && !adapter.HasDoUpdateWithID() { - return deployplan.ActionTypeUnset, errors.New("internal error: unexpected plan='update_with_id'") - } - - return result, nil -} diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index 915f9effdf..65decce058 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -101,7 +101,7 @@ func GetPlan(ctx context.Context, b *bundle.Bundle) (*deployplan.Plan, error) { for rKey := range group.Resources { resourceKey := "resources." + group.Description.PluralName + "." + rKey if _, ok := plan.Plan[resourceKey]; !ok { - plan.Plan[resourceKey] = deployplan.PlanEntry{ + plan.Plan[resourceKey] = &deployplan.PlanEntry{ Action: deployplan.ActionTypeSkip.String(), } } diff --git a/libs/dagrun/dagrun.go b/libs/dagrun/dagrun.go index 5e78298cdb..273c913ef3 100644 --- a/libs/dagrun/dagrun.go +++ b/libs/dagrun/dagrun.go @@ -7,58 +7,36 @@ import ( ) type adjEdge struct { - to string - label string + To string + Label string } type Graph struct { - adj map[string][]adjEdge - nodes []string // maintains insertion order of added nodes + Adj map[string][]adjEdge + Nodes []string // maintains insertion order of added nodes } func NewGraph() *Graph { - return &Graph{adj: make(map[string][]adjEdge)} + return &Graph{ + Adj: make(map[string][]adjEdge), + } } -func (g *Graph) Size() int { return len(g.nodes) } +func (g *Graph) Size() int { return len(g.Nodes) } func (g *Graph) AddNode(n string) { - if _, ok := g.adj[n]; !ok { - g.adj[n] = nil - g.nodes = append(g.nodes, n) + if _, ok := g.Adj[n]; !ok { + g.Adj[n] = nil + g.Nodes = append(g.Nodes, n) } } -func (g *Graph) HasNode(n string) bool { _, ok := g.adj[n]; return ok } - -// HasOutgoingEdges reports whether this node has at least one outgoing edge. -// In this graph, an outgoing edge from X->Y means Y references X. -func (g *Graph) HasOutgoingEdges(n string) bool { return len(g.adj[n]) > 0 } +func (g *Graph) HasNode(n string) bool { _, ok := g.Adj[n]; return ok } func (g *Graph) AddDirectedEdge(from, to, label string) { g.AddNode(from) g.AddNode(to) - g.adj[from] = append(g.adj[from], adjEdge{to: to, label: label}) -} - -// OutgoingLabels returns labels of all outgoing edges from the given node -// in the order the edges were added. If the node has no outgoing edges or is -// unknown to the graph, an empty slice is returned. -func (g *Graph) OutgoingLabels(node string) []string { - outs := g.adj[node] - if len(outs) == 0 { - return []string{} - } - labels := make([]string, 0, len(outs)) - seen := make(map[string]struct{}, len(outs)) - for _, e := range outs { - if _, ok := seen[e.label]; ok { - continue - } - seen[e.label] = struct{}{} - labels = append(labels, e.label) - } - return labels + g.Adj[from] = append(g.Adj[from], adjEdge{To: to, Label: label}) } type CycleError struct { @@ -90,13 +68,13 @@ func (e *CycleError) Error() string { } func (g *Graph) indegrees() map[string]int { - in := make(map[string]int, len(g.adj)) - for v := range g.adj { + in := make(map[string]int, len(g.Adj)) + for v := range g.Adj { in[v] = 0 } - for _, outs := range g.adj { + for _, outs := range g.Adj { for _, e := range outs { - in[e.to]++ + in[e.To]++ } } return in @@ -104,14 +82,14 @@ func (g *Graph) indegrees() map[string]int { func (g *Graph) DetectCycle() error { // Build list of roots in insertion order - roots := g.nodes + roots := g.Nodes const ( white = 0 grey = 1 black = 2 ) - color := make(map[string]int, len(g.adj)) + color := make(map[string]int, len(g.Adj)) type frame struct { node string @@ -129,17 +107,17 @@ func (g *Graph) DetectCycle() error { for st.len() > 0 { f := st.peek() - outs := g.adj[f.node] + outs := g.Adj[f.node] if f.next < len(outs) { edge := outs[f.next] st.peek().next++ - switch color[edge.to] { + switch color[edge.To] { case white: - color[edge.to] = grey - st.push(frame{node: edge.to, inLbl: edge.label}) + color[edge.To] = grey + st.push(frame{node: edge.To, inLbl: edge.Label}) case grey: - closeLbl := edge.label + closeLbl := edge.Label var nodes []string var edges []string for i := st.len() - 1; i >= 0; i-- { @@ -147,7 +125,7 @@ func (g *Graph) DetectCycle() error { if lbl := st.data[i].inLbl; lbl != "" { edges = append(edges, lbl) } - if st.data[i].node == edge.to { + if st.data[i].node == edge.To { break } } @@ -175,15 +153,15 @@ func (g *Graph) DetectCycle() error { // The callback should return true on success or false on failure. Nodes are not // skipped when dependencies fail; instead, they are executed with failedDependency set. func (g *Graph) Run(pool int, runUnit func(node string, failedDependency *string) bool) { - if pool <= 0 || pool > len(g.adj) { - pool = len(g.adj) + if pool <= 0 || pool > len(g.Adj) { + pool = len(g.Adj) } in := g.indegrees() // Prepare initial ready nodes in stable insertion order var initial []string - for _, n := range g.nodes { + for _, n := range g.Nodes { if in[n] == 0 { initial = append(initial, n) } @@ -223,17 +201,17 @@ func (g *Graph) Run(pool int, runUnit func(node string, failedDependency *string cause = &parent } // Record a failed dependency cause for children, if not set yet - for _, e := range g.adj[res.n] { - if _, exists := failedCause[e.to]; !exists { - failedCause[e.to] = cause + for _, e := range g.Adj[res.n] { + if _, exists := failedCause[e.To]; !exists { + failedCause[e.To] = cause } } } // Decrement indegrees and enqueue children that become ready - for _, e := range g.adj[res.n] { - if in[e.to]--; in[e.to] == 0 { - ready <- task{n: e.to, failedFrom: failedCause[e.to]} + for _, e := range g.Adj[res.n] { + if in[e.To]--; in[e.To] == 0 { + ready <- task{n: e.To, failedFrom: failedCause[e.To]} } } } diff --git a/libs/dagrun/dagrun_test.go b/libs/dagrun/dagrun_test.go index 94c847c269..0c6488bec2 100644 --- a/libs/dagrun/dagrun_test.go +++ b/libs/dagrun/dagrun_test.go @@ -213,54 +213,3 @@ func runTestCase(t *testing.T, tc testCase, g *Graph, p int) { } } } - -func TestOutgoingLabels_OrderAndEmpty(t *testing.T) { - g := NewGraph() - a := "A" - b := "B" - c := "C" - - // no edges yet - require.Empty(t, g.OutgoingLabels(a)) - - // add edges from A - g.AddDirectedEdge(a, b, "A->B #1") - g.AddDirectedEdge(a, c, "A->C #1") - g.AddDirectedEdge(a, b, "A->B #1") - - // order preserved as added - got := g.OutgoingLabels(a) - require.Equal(t, []string{"A->B #1", "A->C #1"}, got) - - // B still has no outgoing edges - require.Empty(t, g.OutgoingLabels(b)) -} - -func TestOutgoingLabels_DuplicateEdgesDifferentLabels(t *testing.T) { - g := NewGraph() - x := "X" - y := "Y" - - // same (from,to) pair with different labels - g.AddDirectedEdge(x, y, "e1") - g.AddDirectedEdge(x, y, "e2") - g.AddDirectedEdge(x, y, "e3") - - labels := g.OutgoingLabels(x) - assert.Equal(t, []string{"e1", "e2", "e3"}, labels) -} - -func TestOutgoingLabels_SameLabelDifferentTargets(t *testing.T) { - g := NewGraph() - a := "A" - b := "B" - c := "C" - - // same label to different targets should be returned once - g.AddDirectedEdge(a, b, "dup") - g.AddDirectedEdge(a, c, "dup") - g.AddDirectedEdge(a, b, "other") - - labels := g.OutgoingLabels(a) - assert.Equal(t, []string{"dup", "other"}, labels) -} diff --git a/libs/structs/structaccess/convert.go b/libs/structs/structaccess/convert.go new file mode 100644 index 0000000000..bb730becae --- /dev/null +++ b/libs/structs/structaccess/convert.go @@ -0,0 +1,30 @@ +package structaccess + +import ( + "fmt" + "reflect" +) + +func ConvertToString(value any) (string, error) { + originalValue := value + + // Handle pointers by dereferencing them first + rv := reflect.ValueOf(value) + if rv.Kind() == reflect.Ptr { + if rv.IsNil() { + return "", nil + } + value = rv.Elem().Interface() + } + + // Use the same conversion logic as convertValue for consistency + valueVal := reflect.ValueOf(value) + stringType := reflect.TypeOf("") + + convertedValue, err := convertValue(valueVal, stringType) + if err != nil { + return "", fmt.Errorf("unsupported type for string conversion: %T", originalValue) + } + + return convertedValue.String(), nil +} diff --git a/libs/structs/structaccess/convert_test.go b/libs/structs/structaccess/convert_test.go new file mode 100644 index 0000000000..02fcfc700a --- /dev/null +++ b/libs/structs/structaccess/convert_test.go @@ -0,0 +1,164 @@ +package structaccess + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type ( + CustomString string + CustomInt int +) + +func TestConvertToString(t *testing.T) { + tests := []struct { + name string + value any + expected string + errorMsg string + }{ + // Basic scalar types + { + name: "string", + value: "hello", + expected: "hello", + }, + { + name: "int", + value: 42, + expected: "42", + }, + { + name: "float64", + value: 3.14, + expected: "3.14", + }, + { + name: "bool true", + value: true, + expected: "true", + }, + { + name: "bool false", + value: false, + expected: "false", + }, + + // Custom types (type aliases) + { + name: "custom string", + value: CustomString("world"), + expected: "world", + }, + { + name: "custom int", + value: CustomInt(100), + expected: "100", + }, + + // Pointers + { + name: "string pointer", + value: stringPtr("test"), + expected: "test", + }, + { + name: "int pointer", + value: intPtr(123), + expected: "123", + }, + { + name: "float64 pointer", + value: float64Ptr(2.5), + expected: "2.5", + }, + { + name: "bool pointer", + value: boolPtr(true), + expected: "true", + }, + { + name: "nil string pointer", + value: (*string)(nil), + expected: "", + }, + { + name: "nil int pointer", + value: (*int)(nil), + expected: "", + }, + { + name: "nil float64 pointer", + value: (*float64)(nil), + expected: "", + }, + { + name: "nil bool pointer", + value: (*bool)(nil), + expected: "", + }, + { + name: "nil", + value: nil, + }, + + // Unsupported types - should return errors + { + name: "slice", + value: []int{1, 2, 3}, + errorMsg: "unsupported type for string conversion: []int", + }, + { + name: "map", + value: map[string]int{"a": 1}, + errorMsg: "unsupported type for string conversion: map[string]int", + }, + { + name: "struct", + value: struct{ X int }{X: 1}, + errorMsg: "unsupported type for string conversion: struct { X int }", + }, + { + name: "pointer to struct", + value: &struct{ X int }{X: 1}, + errorMsg: "unsupported type for string conversion: *struct { X int }", + }, + { + name: "channel", + value: make(chan int), + errorMsg: "unsupported type for string conversion: chan int", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := ConvertToString(tt.value) + if tt.errorMsg != "" { + require.Error(t, err) + assert.Equal(t, tt.errorMsg, err.Error()) + assert.Equal(t, "", result) + } else { + require.NoError(t, err) + assert.Equal(t, tt.expected, result) + } + }) + } +} + +func stringPtr(s string) *string { + return &s +} + +func intPtr(i int) *int { + return &i +} + +func float64Ptr(f float64) *float64 { + return &f +} + +func boolPtr(b bool) *bool { + return &b +} diff --git a/libs/structs/structaccess/get.go b/libs/structs/structaccess/get.go index cc74d1e2dd..aeef9f3c6b 100644 --- a/libs/structs/structaccess/get.go +++ b/libs/structs/structaccess/get.go @@ -24,11 +24,11 @@ func GetByString(v any, path string) (any, error) { return Get(v, pathNode) } -// Get returns the value at the given path inside v. -// Wildcards ("*" or "[*]") are not supported and return an error. -func Get(v any, path *structpath.PathNode) (any, error) { +// getValue returns the reflect.Value at the given path inside v. +// This is the internal function that Get() wraps. +func getValue(v any, path *structpath.PathNode) (reflect.Value, error) { if path.IsRoot() { - return v, nil + return reflect.ValueOf(v), nil } // Convert path to slice for easier iteration @@ -37,23 +37,23 @@ func Get(v any, path *structpath.PathNode) (any, error) { cur := reflect.ValueOf(v) for _, node := range pathSegments { if node.DotStar() || node.BracketStar() { - return nil, fmt.Errorf("wildcards not supported: %s", path.String()) + return reflect.Value{}, fmt.Errorf("wildcards not supported: %s", path.String()) } var ok bool cur, ok = deref(cur) if !ok { // cannot proceed further due to nil encountered at current location - return nil, fmt.Errorf("%s: cannot access nil value", node.Parent().String()) + return reflect.Value{}, fmt.Errorf("%s: cannot access nil value", node.Parent().String()) } if idx, isIndex := node.Index(); isIndex { kind := cur.Kind() if kind != reflect.Slice && kind != reflect.Array { - return nil, fmt.Errorf("%s: cannot index %s", node.String(), kind) + return reflect.Value{}, fmt.Errorf("%s: cannot index %s", node.String(), kind) } if idx < 0 || idx >= cur.Len() { - return nil, fmt.Errorf("%s: index out of range, length is %d", node.String(), cur.Len()) + return reflect.Value{}, fmt.Errorf("%s: index out of range, length is %d", node.String(), cur.Len()) } cur = cur.Index(idx) continue @@ -61,16 +61,27 @@ func Get(v any, path *structpath.PathNode) (any, error) { key, ok := node.StringKey() if !ok { - return nil, errors.New("unsupported path node type") + return reflect.Value{}, errors.New("unsupported path node type") } nv, err := accessKey(cur, key, node) if err != nil { - return nil, err + return reflect.Value{}, err } cur = nv } + return cur, nil +} + +// Get returns the value at the given path inside v. +// Wildcards ("*" or "[*]") are not supported and return an error. +func Get(v any, path *structpath.PathNode) (any, error) { + cur, err := getValue(v, path) + if err != nil { + return nil, err + } + // If the current value is invalid (e.g., omitted due to omitempty), return nil. if !cur.IsValid() { return nil, nil diff --git a/libs/structs/structaccess/set.go b/libs/structs/structaccess/set.go new file mode 100644 index 0000000000..104e8e4dbc --- /dev/null +++ b/libs/structs/structaccess/set.go @@ -0,0 +1,414 @@ +package structaccess + +import ( + "errors" + "fmt" + "reflect" + "slices" + "strconv" + + "github.com/databricks/cli/libs/structs/structpath" + "github.com/databricks/cli/libs/structs/structtag" +) + +// Set sets the value at the given path inside the target object. +// The target must be a pointer to the object to modify. +func Set(target any, path *structpath.PathNode, value any) error { + if path.IsRoot() { + return errors.New("cannot set root value") + } + + pathLen := path.Len() + if pathLen == 0 { + return errors.New("empty path") + } + + // Validate that target is a pointer + targetVal := reflect.ValueOf(target) + if targetVal.Kind() != reflect.Pointer { + return errors.New("target must be a pointer") + } + + // For single-level paths, get the target directly + if pathLen == 1 { + return setValueAtNode(targetVal.Elem(), path, value) + } + + // For multi-level paths, get the parent container + parent := path.Parent() + if parent == nil { + return errors.New("failed to get parent path") + } + + // Get the parent container using getValue, passing the original target + parentVal, err := getValue(target, parent) + if err != nil { + return fmt.Errorf("failed to navigate to parent %s: %w", parent.String(), err) + } + + // Set the value at the final node + return setValueAtNode(parentVal, path, value) +} + +// SetByString sets the value at the given path string inside the target object. +// This is a convenience function that parses the path string and calls Set. +func SetByString(target any, path string, value any) error { + if path == "" { + return errors.New("cannot set empty path") + } + + pathNode, err := structpath.Parse(path) + if err != nil { + return err + } + + return Set(target, pathNode, value) +} + +// setValueAtNode sets the value at the specific node in the parent object +func setValueAtNode(parentVal reflect.Value, node *structpath.PathNode, value any) error { + // Dereference parent if it's a pointer + for parentVal.Kind() == reflect.Pointer { + if parentVal.IsNil() { + return errors.New("parent is nil pointer") + } + parentVal = parentVal.Elem() + } + + valueVal := reflect.ValueOf(value) + + if idx, isIndex := node.Index(); isIndex { + return setArrayElement(parentVal, idx, valueVal) + } + + if node.DotStar() || node.BracketStar() { + return errors.New("wildcards not supported") + } + + if key, hasKey := node.StringKey(); hasKey { + return setFieldOrMapValue(parentVal, key, valueVal) + } + + return errors.New("unsupported path node type") +} + +// setArrayElement sets an element in an array or slice +func setArrayElement(parentVal reflect.Value, index int, valueVal reflect.Value) error { + kind := parentVal.Kind() + if kind != reflect.Slice && kind != reflect.Array { + return fmt.Errorf("cannot index %s", kind) + } + + if index < 0 || index >= parentVal.Len() { + return fmt.Errorf("index %d out of range, length is %d", index, parentVal.Len()) + } + + elemVal := parentVal.Index(index) + return assignValue(elemVal, valueVal) +} + +// setFieldOrMapValue sets a field in a struct or a value in a map, allowing flexible syntax +func setFieldOrMapValue(parentVal reflect.Value, key string, valueVal reflect.Value) error { + switch parentVal.Kind() { + case reflect.Struct: + return setStructField(parentVal, key, valueVal) + case reflect.Map: + return setMapValue(parentVal, key, valueVal) + default: + return fmt.Errorf("cannot set key %q on %s", key, parentVal.Kind()) + } +} + +// setStructField sets a field in a struct and handles ForceSendFields +func setStructField(parentVal reflect.Value, fieldName string, valueVal reflect.Value) error { + fv, sf, embeddedIndex, ok := findStructFieldByKey(parentVal, fieldName) + if !ok { + return fmt.Errorf("field %q not found in %s", fieldName, parentVal.Type()) + } + + // Check if field is settable + if !fv.CanSet() { + return fmt.Errorf("field %q cannot be set", sf.Name) + } + + // Handle ForceSendFields: remove if setting nil, add if setting empty value + err := updateForceSendFields(parentVal, sf.Name, embeddedIndex, valueVal, sf) + if err != nil { + return err + } + + return assignValue(fv, valueVal) +} + +// setMapValue sets a value in a map +func setMapValue(parentVal reflect.Value, key string, valueVal reflect.Value) error { + kt := parentVal.Type().Key() + if kt.Kind() != reflect.String { + return fmt.Errorf("map key must be string, got %s", kt) + } + + mk := reflect.ValueOf(key) + if kt != mk.Type() { + mk = mk.Convert(kt) + } + + // For maps, we need to handle the value type + vt := parentVal.Type().Elem() + convertedValue, err := convertValue(valueVal, vt) + if err != nil { + return fmt.Errorf("cannot convert value for map key %q: %w", key, err) + } + + parentVal.SetMapIndex(mk, convertedValue) + return nil +} + +// assignValue assigns valueVal to targetVal with type compatibility checking +func assignValue(targetVal, valueVal reflect.Value) error { + if !targetVal.CanSet() { + return errors.New("target cannot be set") + } + + convertedValue, err := convertValue(valueVal, targetVal.Type()) + if err != nil { + return err + } + + targetVal.Set(convertedValue) + return nil +} + +// convertValue converts valueVal to targetType with compatibility checking +func convertValue(valueVal reflect.Value, targetType reflect.Type) (reflect.Value, error) { + if !valueVal.IsValid() { + // Handle nil values - return zero value for the target type + return reflect.Zero(targetType), nil + } + + valueType := valueVal.Type() + + // Handle scalar-to-string conversions first (before Go's built-in convertibility). + // This is critical because Go's built-in ConvertibleTo/Convert has different behavior: + // - Integers (int, uint8, etc.) convert to string as character codes: 42 → "*", 200 → "È" + // - Floats and bools are not convertible to string at all and would error + // We want semantic string representations instead: 42 → "42", true → "true", 3.14 → "3.14" + if targetType.Kind() == reflect.String { + switch valueType.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + str := strconv.FormatInt(valueVal.Int(), 10) + return reflect.ValueOf(str), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + str := strconv.FormatUint(valueVal.Uint(), 10) + return reflect.ValueOf(str), nil + case reflect.Float32, reflect.Float64: + str := strconv.FormatFloat(valueVal.Float(), 'g', -1, 64) + return reflect.ValueOf(str), nil + case reflect.Bool: + str := strconv.FormatBool(valueVal.Bool()) + return reflect.ValueOf(str), nil + default: + // handled below + } + } + + // Handle string-to-scalar conversions + if valueType.Kind() == reflect.String { + str := valueVal.String() + switch targetType.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + val, err := strconv.ParseInt(str, 10, 64) + if err != nil { + return reflect.Value{}, fmt.Errorf("cannot parse %q as %s: %w", str, targetType.Kind(), err) + } + // Check for overflow after parsing + converted := reflect.ValueOf(val).Convert(targetType) + if converted.Int() != val { + return reflect.Value{}, fmt.Errorf("value %d overflows %s", val, targetType.Kind()) + } + return converted, nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + val, err := strconv.ParseUint(str, 10, 64) + if err != nil { + return reflect.Value{}, fmt.Errorf("cannot parse %q as %s: %w", str, targetType.Kind(), err) + } + // Check for overflow after parsing + converted := reflect.ValueOf(val).Convert(targetType) + if converted.Uint() != val { + return reflect.Value{}, fmt.Errorf("value %d overflows %s", val, targetType.Kind()) + } + return converted, nil + case reflect.Float32, reflect.Float64: + val, err := strconv.ParseFloat(str, 64) + if err != nil { + return reflect.Value{}, fmt.Errorf("cannot parse %q as %s: %w", str, targetType.Kind(), err) + } + converted := reflect.ValueOf(val).Convert(targetType) + return converted, nil + case reflect.Bool: + val, err := strconv.ParseBool(str) + if err != nil { + return reflect.Value{}, fmt.Errorf("cannot parse %q as bool: %w", str, err) + } + return reflect.ValueOf(val), nil + default: + // handled below + } + } + + // Direct assignability check + if valueType.AssignableTo(targetType) { + return valueVal, nil + } + + // Convertibility check (handles typedefed types) + if valueType.ConvertibleTo(targetType) { + return valueVal.Convert(targetType), nil + } + + // Handle pointer types + if targetType.Kind() == reflect.Pointer { + elemType := targetType.Elem() + if valueType.AssignableTo(elemType) { + // Create a new pointer and set the value + ptr := reflect.New(elemType) + ptr.Elem().Set(valueVal) + return ptr, nil + } + if valueType.ConvertibleTo(elemType) { + ptr := reflect.New(elemType) + ptr.Elem().Set(valueVal.Convert(elemType)) + return ptr, nil + } + } + + // Handle case where value is a pointer but target is not + if valueType.Kind() == reflect.Pointer && !valueVal.IsNil() { + elemVal := valueVal.Elem() + return convertValue(elemVal, targetType) + } + + return reflect.Value{}, fmt.Errorf("cannot convert %s to %s", valueType, targetType) +} + +// updateForceSendFields handles ForceSendFields when setting values: +// - If setting nil: remove field from ForceSendFields +// - If setting empty value: add field to ForceSendFields (if not already present) +// Only applies to fields with omitempty tag +func updateForceSendFields(parentVal reflect.Value, fieldName string, embeddedIndex int, valueVal reflect.Value, structField reflect.StructField) error { + isSettingNil := !valueVal.IsValid() + isSettingEmptyValue := valueVal.IsValid() && isEmptyForOmitEmpty(valueVal) + + // Early return if we don't need to modify ForceSendFields + if !isSettingNil && !isSettingEmptyValue { + return nil + } + + // Check if field has omitempty tag - only omitempty fields need ForceSendFields management + jsonTag := structtag.JSONTag(structField.Tag.Get("json")) + if !jsonTag.OmitEmpty() { + // Non-omitempty fields don't need ForceSendFields management + return nil + } + + // Find the appropriate ForceSendFields slice to modify + forceSendFieldsSlice := findForceSendFieldsForSetting(parentVal, embeddedIndex) + if !forceSendFieldsSlice.IsValid() { + // No ForceSendFields to update + return nil + } + + if isSettingNil { + // Remove from ForceSendFields + removeFromForceSendFields(forceSendFieldsSlice, fieldName) + } else if isSettingEmptyValue { + // Add to ForceSendFields if not already present + addToForceSendFields(forceSendFieldsSlice, fieldName) + } + + return nil +} + +// findForceSendFieldsForSetting finds the correct ForceSendFields slice to modify +// This should match the logic in get.go's getForceSendFieldsForFromTyped +// Only the struct that contains the ForceSendFields can manage its own fields +// embeddedIndex: -1 for direct fields, or the index of the embedded struct +func findForceSendFieldsForSetting(parentVal reflect.Value, embeddedIndex int) reflect.Value { + if embeddedIndex == -1 { + // Direct field - check if parent struct has its own ForceSendFields + // We need to check the struct type directly, not through field promotion + parentType := parentVal.Type() + for i := range parentType.NumField() { + field := parentType.Field(i) + if field.Name == "ForceSendFields" && !field.Anonymous { + // Parent has direct ForceSendFields + return parentVal.Field(i) + } + } + // Parent struct has no direct ForceSendFields, so no management possible + return reflect.Value{} + } else { + // Embedded field - look for ForceSendFields in the embedded struct + embeddedField := parentVal.Field(embeddedIndex) + embeddedStruct := getEmbeddedStructForSetting(embeddedField) + if !embeddedStruct.IsValid() { + return reflect.Value{} + } + fsf := embeddedStruct.FieldByName("ForceSendFields") + if fsf.IsValid() { + return fsf + } + // Embedded struct has no ForceSendFields, so no management possible + return reflect.Value{} + } +} + +// getEmbeddedStructForSetting gets the embedded struct for setting operations +// Creates nil pointers if needed +func getEmbeddedStructForSetting(fieldValue reflect.Value) reflect.Value { + if fieldValue.Kind() == reflect.Pointer { + if fieldValue.IsNil() { + // Create new instance if needed + if fieldValue.CanSet() { + fieldValue.Set(reflect.New(fieldValue.Type().Elem())) + } else { + return reflect.Value{} + } + } + fieldValue = fieldValue.Elem() + } + if fieldValue.Kind() == reflect.Struct { + return fieldValue + } + return reflect.Value{} +} + +// removeFromForceSendFields removes fieldName from the ForceSendFields slice +func removeFromForceSendFields(forceSendFieldsSlice reflect.Value, fieldName string) { + // Get the original []string slice + fields := forceSendFieldsSlice.Interface().([]string) + + // Find the index of the field to remove + index := slices.Index(fields, fieldName) + if index == -1 { + return // Field not found, nothing to remove + } + + // Remove the field using slices.Delete + newFields := slices.Delete(fields, index, index+1) + forceSendFieldsSlice.Set(reflect.ValueOf(newFields)) +} + +// addToForceSendFields adds fieldName to the ForceSendFields slice if not already present +func addToForceSendFields(forceSendFieldsSlice reflect.Value, fieldName string) { + // Get the original []string slice + fields := forceSendFieldsSlice.Interface().([]string) + + // Check if already present + if slices.Contains(fields, fieldName) { + return // Already present + } + + // Add the new field + newFields := append(fields, fieldName) + forceSendFieldsSlice.Set(reflect.ValueOf(newFields)) +} diff --git a/libs/structs/structaccess/set_test.go b/libs/structs/structaccess/set_test.go new file mode 100644 index 0000000000..f2348618c6 --- /dev/null +++ b/libs/structs/structaccess/set_test.go @@ -0,0 +1,735 @@ +package structaccess_test + +import ( + "testing" + + "github.com/databricks/cli/libs/structs/structaccess" + "github.com/databricks/cli/libs/structs/structdiff" + "github.com/databricks/cli/libs/structs/structpath" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type ( + CustomString string + CustomInt int +) + +type NestedInfo struct { + Version string `json:"version"` + Build int `json:"build"` +} + +type TestStruct struct { + Name string `json:"name"` + Age int `json:"age"` + Score float64 `json:"score"` + Active bool `json:"active"` + Priority uint8 `json:"priority"` + Tags map[string]string `json:"tags"` + Items []string `json:"items"` + Count *int `json:"count,omitempty"` + Custom CustomString `json:"custom"` + Info NestedInfo `json:"info"` + Internal string `json:"-"` +} + +// mustParsePath is a helper to parse path strings in tests +func mustParsePath(path string) *structpath.PathNode { + p, err := structpath.Parse(path) + if err != nil { + panic(err) + } + return p +} + +// newTestStruct creates a fresh TestStruct instance for testing +func newTestStruct() *TestStruct { + return &TestStruct{ + Name: "OldName", + Age: 25, + Score: 85.5, + Active: true, + Priority: 10, + Tags: map[string]string{ + "env": "old_env", + }, + Items: []string{"old_a", "old_b", "old_c"}, + Count: nil, + Custom: CustomString("old custom"), + Info: NestedInfo{ + Version: "old_version", + Build: 100, + }, + } +} + +func TestSet(t *testing.T) { + tests := []struct { + name string + path string + value any + expectedChanges []structdiff.Change + errorMsg string // if set, test expects an error containing this message + }{ + { + name: "set struct field by dot notation", + path: "name", + value: "NewName", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "NewName", + }, + }, + }, + { + name: "set struct field by bracket notation", + path: "['name']", + value: "BracketName", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "BracketName", + }, + }, + }, + { + name: "set top-level int field", + path: "age", + value: 30, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("age"), + Old: 25, + New: 30, + }, + }, + }, + { + name: "set nested struct field", + path: "info.version", + value: "new_version", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("info.version"), + Old: "old_version", + New: "new_version", + }, + }, + }, + { + name: "set nested struct field with bracket notation", + path: "info['build']", + value: 200, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("info.build"), + Old: 100, + New: 200, + }, + }, + }, + { + name: "set map value by bracket notation", + path: "tags['version']", + value: "new_map_value", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("tags['version']"), + Old: nil, // new key + New: "new_map_value", + }, + }, + }, + { + name: "set map value by dot notation", + path: "tags.version", + value: "dot_map_value", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("tags['version']"), + Old: nil, // new key + New: "dot_map_value", + }, + }, + }, + { + name: "set array element", + path: "items[1]", + value: "new_item", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("items[1]"), + Old: "old_b", + New: "new_item", + }, + }, + }, + { + name: "set pointer field", + path: "count", + value: 42, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("count"), + Old: nil, // structdiff reports this as interface{}(nil) + New: intPtr(42), + }, + }, + }, + { + name: "set typedefed string with string", + path: "custom", + value: "new custom", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("custom"), + Old: CustomString("old custom"), + New: CustomString("new custom"), + }, + }, + }, + { + name: "set typedefed string with typedefed string", + path: "custom", + value: CustomString("typed custom"), + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("custom"), + Old: CustomString("old custom"), + New: CustomString("typed custom"), + }, + }, + }, + { + name: "error on non-existent field", + path: "nonexistent", + value: "value", + errorMsg: "field \"nonexistent\" not found in structaccess_test.TestStruct", + }, + { + name: "error on array index out of bounds", + path: "items[5]", + value: "value", + errorMsg: "index 5 out of range, length is 3", + }, + { + name: "error on setting root", + path: "", + value: "value", + errorMsg: "cannot set empty path", + }, + { + name: "error on wildcard", + path: "items[*]", + value: "value", + errorMsg: "wildcards not supported", + }, + { + name: "custom string to string field", + path: "name", + value: CustomString("custom to regular"), + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "custom to regular", + }, + }, + }, + { + name: "int to custom int field", + path: "age", + value: CustomInt(35), + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("age"), + Old: 25, + New: 35, + }, + }, + }, + { + name: "error on incompatible slice to string", + path: "name", + value: []int{1, 2, 3}, + errorMsg: "cannot convert []int to string", + }, + { + name: "error on string to int field", + path: "age", + value: "not a number", + errorMsg: "cannot parse \"not a number\" as int: strconv.ParseInt: parsing \"not a number\": invalid syntax", + }, + { + name: "set numeric string to int field", + path: "age", + value: "42", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("age"), + Old: 25, + New: 42, + }, + }, + }, + { + name: "set string 'true' to bool field (no change)", + path: "active", + value: "true", + expectedChanges: nil, // No changes because true → true + }, + { + name: "set string 'false' to bool field", + path: "active", + value: "false", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("active"), + Old: true, + New: false, + }, + }, + }, + { + name: "error on invalid string to bool field", + path: "active", + value: "bla", + errorMsg: "cannot parse \"bla\" as bool: strconv.ParseBool: parsing \"bla\": invalid syntax", + }, + { + name: "set numeric string to float field", + path: "score", + value: "3.14", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("score"), + Old: 85.5, + New: 3.14, + }, + }, + }, + { + name: "set zero string to float field", + path: "score", + value: "0", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("score"), + Old: 85.5, + New: 0.0, + }, + }, + }, + { + name: "error on invalid string to float field", + path: "score", + value: "bla", + errorMsg: "cannot parse \"bla\" as float64: strconv.ParseFloat: parsing \"bla\": invalid syntax", + }, + { + name: "set valid numeric string to uint8 field", + path: "priority", + value: "200", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("priority"), + Old: uint8(10), + New: uint8(200), + }, + }, + }, + { + name: "error on overflow string to uint8 field", + path: "priority", + value: "256", + errorMsg: "value 256 overflows uint8", + }, + { + name: "error on negative string to uint8 field", + path: "priority", + value: "-1", + errorMsg: "cannot parse \"-1\" as uint8: strconv.ParseUint: parsing \"-1\": invalid syntax", + }, + { + name: "set int value to string field", + path: "name", + value: 42, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "42", + }, + }, + }, + { + name: "set bool true to string field", + path: "name", + value: true, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "true", + }, + }, + }, + { + name: "set bool false to string field", + path: "name", + value: false, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "false", + }, + }, + }, + { + name: "set float64 to string field", + path: "name", + value: 3.14, + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "3.14", + }, + }, + }, + { + name: "set uint8 to string field", + path: "name", + value: uint8(200), + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("name"), + Old: "OldName", + New: "200", + }, + }, + }, + { + name: "set negative string to int field", + path: "age", + value: "-10", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("age"), + Old: 25, + New: -10, + }, + }, + }, + { + name: "set zero string to uint8 field", + path: "priority", + value: "0", + expectedChanges: []structdiff.Change{ + { + Path: mustParsePath("priority"), + Old: uint8(10), + New: uint8(0), + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a fresh instance for this test + original := newTestStruct() + target := newTestStruct() + + err := structaccess.SetByString(target, tt.path, tt.value) + + if tt.errorMsg != "" { + // Test expects an error + require.Error(t, err) + assert.Equal(t, tt.errorMsg, err.Error()) + return + } + + // Test expects success + require.NoError(t, err) + + // Compare the actual changes using structdiff + changes, err := structdiff.GetStructDiff(original, target) + require.NoError(t, err) + assert.Equal(t, tt.expectedChanges, changes) + }) + } +} + +func intPtr(i int) *int { + return &i +} + +// testSet sets a value and gets it back, asserting they're equal (roundtrip) +func testSet(t *testing.T, obj any, path string, value any) { + t.Helper() + err := structaccess.SetByString(obj, path, value) + require.NoError(t, err, "SetByString(%T, %q, %#v)", obj, path, value) + got, err := structaccess.GetByString(obj, path) + require.NoError(t, err, "GetByString(%T, %q)", obj, path) + require.Equal(t, value, got, "SetByString(%T, %q, %#v) then GetByString should return same value", obj, path, value) +} + +// testSetGet sets a value and gets it back, allowing different expected get value +func testSetGet(t *testing.T, obj any, path string, setValue, expectedGetValue any) { + t.Helper() + err := structaccess.SetByString(obj, path, setValue) + require.NoError(t, err, "SetByString(%#v, %q, %#v)", obj, path, setValue) + got, err := structaccess.GetByString(obj, path) + require.NoError(t, err, "GetByString(%#v, %q)", obj, path) + require.Equal(t, expectedGetValue, got, "SetByString(%#v, %q, %#v) then GetByString should return %#v", obj, path, setValue, expectedGetValue) +} + +func TestSetJobSettings(t *testing.T) { + jobSettings := jobs.JobSettings{ + Name: "job foo", + Tasks: []jobs.Task{ + { + TaskKey: "job_task", + RunJobTask: &jobs.RunJobTask{ + JobId: 0, // This will be resolved from the reference + }, + }, + }, + } + + err := structaccess.SetByString(&jobSettings, "tasks[0].run_job_task.job_id", "123") + require.NoError(t, err) + + require.Equal(t, &jobs.JobSettings{ + Name: "job foo", + Tasks: []jobs.Task{ + { + TaskKey: "job_task", + RunJobTask: &jobs.RunJobTask{ + JobId: 123, + }, + }, + }, + }, &jobSettings) +} + +func TestSet_EmbeddedStructForceSendFields(t *testing.T) { + type Inner struct { + InnerFieldOmit string `json:"inner_field_omit,omitempty"` + InnerFieldNoOmit string `json:"inner_field_no_omit"` + ForceSendFields []string `json:"-"` + } + + type Outer struct { + OuterFieldOmit string `json:"outer_field_omit,omitempty"` + OuterFieldNoOmit string `json:"outer_field_no_omit"` + Inner + } + + t.Run("set nil", func(t *testing.T) { + obj := Outer{ + OuterFieldOmit: "outer_value", + OuterFieldNoOmit: "outer_no_omit", + Inner: Inner{ + InnerFieldOmit: "inner_value", + InnerFieldNoOmit: "inner_no_omit", + ForceSendFields: []string{"OuterFieldOmit", "InnerFieldOmit"}, + }, + } + + // Set nil value for outer field - roundtrip nil -> nil + // Outer has no ForceSendFields, so Inner.ForceSendFields unchanged + testSet(t, &obj, "outer_field_omit", nil) + assert.Equal(t, []string{"OuterFieldOmit", "InnerFieldOmit"}, obj.ForceSendFields) + + // Set nil value for outer field no-omit - roundtrip nil -> "" + // Outer has no ForceSendFields, so Inner.ForceSendFields unchanged + testSetGet(t, &obj, "outer_field_no_omit", nil, "") + assert.Equal(t, []string{"OuterFieldOmit", "InnerFieldOmit"}, obj.ForceSendFields) + + // Set nil value for inner field no-omit - roundtrip nil -> "" + // Inner has ForceSendFields but this field has no omitempty, so no change to ForceSendFields + testSetGet(t, &obj, "inner_field_no_omit", nil, "") + assert.Equal(t, []string{"OuterFieldOmit", "InnerFieldOmit"}, obj.ForceSendFields) + + // Set nil value for inner field omit - should clear and remove from Inner.ForceSendFields + testSet(t, &obj, "inner_field_omit", nil) + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + + // Repeat + testSet(t, &obj, "inner_field_omit", nil) + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + }) + + t.Run("set empty", func(t *testing.T) { + obj := Outer{ + OuterFieldOmit: "outer_value", + OuterFieldNoOmit: "outer_no_omit", + Inner: Inner{ + InnerFieldOmit: "inner_value", + InnerFieldNoOmit: "inner_no_omit", + ForceSendFields: []string{}, + }, + } + + // Set empty string for outer field, but get nil back (omitempty, no ForceSendFields) + testSetGet(t, &obj, "outer_field_omit", "", nil) + // Outer has no ForceSendFields, so Inner.ForceSendFields unchanged + assert.Equal(t, []string{}, obj.ForceSendFields) + + // Set empty string for outer field no-omit - roundtrip "" -> "" + testSet(t, &obj, "outer_field_no_omit", "") + // Outer has no ForceSendFields, so Inner.ForceSendFields unchanged + assert.Equal(t, []string{}, obj.ForceSendFields) + + // Set empty string for inner field no-omit - roundtrip "" -> "" + testSet(t, &obj, "inner_field_no_omit", "") + // Inner has ForceSendFields but this field has no omitempty, so no change to ForceSendFields + assert.Equal(t, []string{}, obj.ForceSendFields) + + // Set empty string for inner field omit - should set field and add to Inner.ForceSendFields + testSet(t, &obj, "inner_field_omit", "") + assert.Equal(t, []string{"InnerFieldOmit"}, obj.ForceSendFields) + + // Repeat - should not duplicate in ForceSendFields + testSet(t, &obj, "inner_field_omit", "") + assert.Equal(t, []string{"InnerFieldOmit"}, obj.ForceSendFields) + }) +} + +func TestSet_MixedForceSendFields(t *testing.T) { + type First struct { + FirstFieldOmit string `json:"first_field_omit,omitempty"` + FirstFieldNoOmit string `json:"first_field_no_omit"` + } + + type Second struct { + SecondFieldOmit string `json:"second_field_omit,omitempty"` + SecondFieldNoOmit string `json:"second_field_no_omit"` + ForceSendFields []string `json:"-"` + } + + type Outer struct { + OuterFieldOmit string `json:"outer_field_omit,omitempty"` + OuterFieldNoOmit string `json:"outer_field_no_omit"` + ForceSendFields []string `json:"-"` + First + Second + } + + t.Run("set nil", func(t *testing.T) { + obj := Outer{ + OuterFieldOmit: "outer_value", + OuterFieldNoOmit: "outer_no_omit", + ForceSendFields: []string{"OuterFieldOmit", "FirstFieldOmit"}, + First: First{ + FirstFieldOmit: "first_value", + FirstFieldNoOmit: "first_no_omit", + }, + Second: Second{ + SecondFieldOmit: "second_value", + SecondFieldNoOmit: "second_no_omit", + ForceSendFields: []string{"SecondFieldOmit"}, + }, + } + + // Set nil for outer field - should clear and remove from Outer.ForceSendFields + testSet(t, &obj, "outer_field_omit", nil) + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Set nil for outer field no-omit - roundtrip nil -> "" + testSetGet(t, &obj, "outer_field_no_omit", nil, "") + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Set nil for first field no-omit - roundtrip nil -> "" + testSetGet(t, &obj, "first_field_no_omit", nil, "") + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Set nil for first field omit - should clear field but NOT affect any ForceSendFields + // (First has no ForceSendFields, so nothing to manage there) + testSet(t, &obj, "first_field_omit", nil) + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) // unchanged - First fields don't belong to Outer's ForceSendFields management + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Set nil for second field no-omit - roundtrip nil -> "" + testSetGet(t, &obj, "second_field_no_omit", nil, "") + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Set nil for second field omit - should clear and remove from Second.ForceSendFields + // (Second owns its own fields since it has ForceSendFields) + testSet(t, &obj, "second_field_omit", nil) + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) // unchanged + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Repeat operations - should be idempotent + testSet(t, &obj, "first_field_omit", nil) + testSet(t, &obj, "second_field_omit", nil) + assert.Equal(t, []string{"FirstFieldOmit"}, obj.ForceSendFields) // unchanged + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + }) + + t.Run("set empty", func(t *testing.T) { + obj := Outer{ + OuterFieldOmit: "outer_value", + OuterFieldNoOmit: "outer_no_omit", + ForceSendFields: []string{}, + First: First{ + FirstFieldOmit: "first_value", + FirstFieldNoOmit: "first_no_omit", + }, + Second: Second{ + SecondFieldOmit: "second_value", + SecondFieldNoOmit: "second_no_omit", + ForceSendFields: []string{}, + }, + } + + // Set empty for outer field omit - should zero and add to Outer.ForceSendFields + testSet(t, &obj, "outer_field_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Set empty for outer field no-omit - roundtrip "" -> "" + testSet(t, &obj, "outer_field_no_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Set empty for first field no-omit - roundtrip "" -> "" + testSet(t, &obj, "first_field_no_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Set empty for first field omit - should zero field but get nil back (no ForceSendFields to manage) + // (First has no ForceSendFields, so empty value won't survive roundtrip) + testSetGet(t, &obj, "first_field_omit", "", nil) + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) // unchanged - First fields don't belong to Outer's ForceSendFields management + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Set empty for second field no-omit - roundtrip "" -> "" + testSet(t, &obj, "second_field_no_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{}, obj.Second.ForceSendFields) + + // Set empty for second field omit - should zero and add to Second.ForceSendFields + // (Second owns its own fields since it has ForceSendFields) + testSet(t, &obj, "second_field_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) + + // Repeat operations - should not duplicate + testSet(t, &obj, "second_field_omit", "") + assert.Equal(t, []string{"OuterFieldOmit"}, obj.ForceSendFields) + assert.Equal(t, []string{"SecondFieldOmit"}, obj.Second.ForceSendFields) // no duplicates + }) +} diff --git a/libs/structs/structvar/structvar.go b/libs/structs/structvar/structvar.go new file mode 100644 index 0000000000..a4e54d7969 --- /dev/null +++ b/libs/structs/structvar/structvar.go @@ -0,0 +1,83 @@ +package structvar + +import ( + "errors" + "fmt" + "strings" + + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/databricks/cli/libs/structs/structaccess" + "github.com/databricks/cli/libs/structs/structpath" +) + +// StructVar is a container holding a struct and a map of unresolved references inside this struct +type StructVar struct { + Config any `json:"config"` + + // Refs holds unresolved references. Key is serialized PathNode pointing inside a struct (e.g. "name") + // and value is either pure or multiple references string: "${resources.foo.jobs.id}" or "${a} ${b}" + Refs map[string]string `json:"vars,omitempty"` +} + +var ErrNotFound = errors.New("reference not found") + +// ResolveRef resolves the given reference by finding it in Refs values and replacing it. +// It searches through the Refs map to find values that contain the reference, +// performs string replacement, sets the resolved value, and removes fully resolved refs. +// Returns an error if the reference is not found or if setting the value fails. +func (sv *StructVar) ResolveRef(reference string, value any) error { + var foundAny bool + + // Find all refs that contain this reference + // QQQ we can add reverse index + for pathKey, refValue := range sv.Refs { + if !strings.Contains(refValue, reference) { + continue + } + + foundAny = true + + // Parse the path + pathNode, err := structpath.Parse(pathKey) + if err != nil { + return fmt.Errorf("invalid path %q: %w", pathKey, err) + } + + // Check if this is a pure reference (reference equals the entire value) + if refValue == reference { + // Pure reference - use original typed value + err = structaccess.Set(sv.Config, pathNode, value) + if err != nil { + return fmt.Errorf("cannot set (%T).%s to %T (%#v): %w", sv.Config, pathNode.String(), value, value, err) + } + // Remove the fully resolved reference + delete(sv.Refs, pathKey) + } else { + // Partial reference or multiple references - do string replacement + valueStr, err := structaccess.ConvertToString(value) + if err != nil { + return fmt.Errorf("cannot set %s to %T (%#v): %w", pathNode.String(), value, value, err) + } + newValue := strings.ReplaceAll(refValue, reference, valueStr) + + // Set the updated string value + err = structaccess.Set(sv.Config, pathNode, newValue) + if err != nil { + return fmt.Errorf("cannot update %s to string: %w", pathNode.String(), err) + } + + // Check if fully resolved (no more ${} patterns) + if !dynvar.ContainsVariableReference(newValue) { + delete(sv.Refs, pathKey) + } else { + sv.Refs[pathKey] = newValue + } + } + } + + if !foundAny { + return ErrNotFound + } + + return nil +} diff --git a/libs/structs/structvar/structvar_test.go b/libs/structs/structvar/structvar_test.go new file mode 100644 index 0000000000..aba498eeea --- /dev/null +++ b/libs/structs/structvar/structvar_test.go @@ -0,0 +1,206 @@ +package structvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/structs/structvar" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type TestObj struct { + Name string `json:"name"` + Age int `json:"age"` + Tags map[string]string `json:"tags"` +} + +// newTestStructVar creates a fresh StructVar instance for testing +func newTestStructVar() *structvar.StructVar { + return &structvar.StructVar{ + Config: &TestObj{ + Name: "OldName", + Age: 25, + Tags: map[string]string{ + "env": "old_env", + }, + }, + Refs: map[string]string{ + "name": "${var.name}", + "age": "${var.age}", + "tags['version']": "${var.version}", + }, + } +} + +func TestResolveRef(t *testing.T) { + tests := []struct { + name string + setup func() *structvar.StructVar // custom setup for special cases + reference string + value any + expectedObj *TestObj + expectedRefs map[string]string + errorMsg string // if set, test expects an error containing this message + }{ + { + name: "resolve simple field reference", + reference: "${var.name}", + value: "NewName", + expectedObj: &TestObj{ + Name: "NewName", + Age: 25, + Tags: map[string]string{ + "env": "old_env", + }, + }, + expectedRefs: map[string]string{ + "age": "${var.age}", + "tags['version']": "${var.version}", + }, + }, + { + name: "resolve age field reference", + reference: "${var.age}", + value: 99, + expectedObj: &TestObj{ + Name: "OldName", + Age: 99, + Tags: map[string]string{ + "env": "old_env", + }, + }, + expectedRefs: map[string]string{ + "name": "${var.name}", + "tags['version']": "${var.version}", + }, + }, + { + name: "resolve map field reference", + reference: "${var.version}", + value: "new_version", + expectedObj: &TestObj{ + Name: "OldName", + Age: 25, + Tags: map[string]string{ + "env": "old_env", + "version": "new_version", + }, + }, + expectedRefs: map[string]string{ + "name": "${var.name}", + "age": "${var.age}", + }, + }, + { + name: "reference not found returns error", + reference: "${var.nonexistent}", + value: "NewName", + errorMsg: "reference not found", + }, + { + name: "error on invalid path", + setup: func() *structvar.StructVar { + return &structvar.StructVar{ + Config: &TestObj{}, + Refs: map[string]string{ + "invalid[path": "${var.test}", + }, + } + }, + reference: "${var.test}", + value: "value", + errorMsg: "unexpected character", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create instance for this test + var sv *structvar.StructVar + if tt.setup != nil { + sv = tt.setup() + } else { + sv = newTestStructVar() + } + + err := sv.ResolveRef(tt.reference, tt.value) + + if tt.errorMsg != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), tt.errorMsg) + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expectedObj, sv.Config) + assert.Equal(t, tt.expectedRefs, sv.Refs) + }) + } +} + +func TestResolveRefMultiReference(t *testing.T) { + sv := &structvar.StructVar{ + Config: &TestObj{ + Name: "OldName", + }, + Refs: map[string]string{ + "name": "${var.prefix} ${var.suffix}", + }, + } + + // Resolve one reference + err := sv.ResolveRef("${var.prefix}", "Hello") + require.NoError(t, err) + + // The value should be partially resolved + assert.Equal(t, "Hello ${var.suffix}", sv.Config.(*TestObj).Name) + assert.Equal(t, map[string]string{ + "name": "Hello ${var.suffix}", // partially resolved + }, sv.Refs) + + // Resolve the remaining reference + err = sv.ResolveRef("${var.suffix}", "World") + require.NoError(t, err) + + // Now it should be fully resolved + assert.Equal(t, "Hello World", sv.Config.(*TestObj).Name) + assert.Equal(t, map[string]string{}, sv.Refs) // fully resolved, reference removed +} + +func TestResolveRefJobSettings(t *testing.T) { + // Create a realistic JobSettings based on the provided YAML config + jobSettings := jobs.JobSettings{ + Name: "job foo", + Tasks: []jobs.Task{ + { + TaskKey: "job_task", + RunJobTask: &jobs.RunJobTask{ + JobId: 0, // This will be resolved from the reference + }, + }, + }, + } + + sv := &structvar.StructVar{ + Config: &jobSettings, + Refs: map[string]string{ + "tasks[0].run_job_task.job_id": "${resources.jobs.bar.id}", + }, + } + + // Resolve the reference with a realistic job ID value (as string that gets converted to int64) + err := sv.ResolveRef("${resources.jobs.bar.id}", "123") + require.NoError(t, err) + + // Verify the job ID was set correctly + updatedSettings := sv.Config.(*jobs.JobSettings) + assert.Equal(t, "job foo", updatedSettings.Name) + assert.Len(t, updatedSettings.Tasks, 1) + assert.Equal(t, "job_task", updatedSettings.Tasks[0].TaskKey) + assert.NotNil(t, updatedSettings.Tasks[0].RunJobTask) + assert.Equal(t, int64(123), updatedSettings.Tasks[0].RunJobTask.JobId) + + // Verify the reference was removed after resolution + assert.Empty(t, sv.Refs) +}