diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 03f2ca3975..8762696413 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -12,6 +12,7 @@ ### Dependency updates ### Bundles +* Added support for --bind flag in `bundle generate` ([#3782](https://github.com/databricks/cli/pull/3782)) * Add `pydabs` template replacing `experimental-jobs-as-code` template ([#3806](https://github.com/databricks/cli/pull/3806)) * You can now use `python` section instead of `experimental/python` ([#3540](https://github.com/databricks/cli/pull/3540)) diff --git a/acceptance/bundle/deployment/bind/alert/output.txt b/acceptance/bundle/deployment/bind/alert/output.txt index e6f74a8d88..d37176b247 100644 --- a/acceptance/bundle/deployment/bind/alert/output.txt +++ b/acceptance/bundle/deployment/bind/alert/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind my_alert [UUID] --auto-approve Updating deployment state... -Successfully bound alert with an id '[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound alert with an id '[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/cluster/output.txt b/acceptance/bundle/deployment/bind/cluster/output.txt index 9572504bc9..aeb195f0f1 100644 --- a/acceptance/bundle/deployment/bind/cluster/output.txt +++ b/acceptance/bundle/deployment/bind/cluster/output.txt @@ -6,7 +6,8 @@ >>> [CLI] bundle deployment bind cluster1 [CLUSTER-ID] --auto-approve Updating deployment state... -Successfully bound cluster with an id '[CLUSTER-ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound cluster with an id '[CLUSTER-ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deployment unbind cluster1 Updating deployment state... diff --git a/acceptance/bundle/deployment/bind/dashboard/output.txt b/acceptance/bundle/deployment/bind/dashboard/output.txt index 0ccd387124..3926b8f1f9 100644 --- a/acceptance/bundle/deployment/bind/dashboard/output.txt +++ b/acceptance/bundle/deployment/bind/dashboard/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind dashboard1 [DASHBOARD_ID] --auto-approve Updating deployment state... -Successfully bound dashboard with an id '[DASHBOARD_ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound dashboard with an id '[DASHBOARD_ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt b/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt index 5b58737491..7f2651fc6c 100644 --- a/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt +++ b/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind dashboard1 [DASHBOARD_ID] --auto-approve Updating deployment state... -Successfully bound dashboard with an id '[DASHBOARD_ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound dashboard with an id '[DASHBOARD_ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> errcode [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/database_instance/output.txt b/acceptance/bundle/deployment/bind/database_instance/output.txt index 7b3477b4d9..71309cdc88 100644 --- a/acceptance/bundle/deployment/bind/database_instance/output.txt +++ b/acceptance/bundle/deployment/bind/database_instance/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind database_instance1 [UUID] --auto-approve Updating deployment state... -Successfully bound database_instance with an id '[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound database_instance with an id '[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/experiment/output.txt b/acceptance/bundle/deployment/bind/experiment/output.txt index ac57ce8b85..b783247880 100644 --- a/acceptance/bundle/deployment/bind/experiment/output.txt +++ b/acceptance/bundle/deployment/bind/experiment/output.txt @@ -3,7 +3,8 @@ === Substitute variables in the template === Create a pre-defined experiment === Bind experiment: Updating deployment state... -Successfully bound experiment with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound experiment with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace === Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... Deploying resources... diff --git a/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt b/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt index e8a39ae2d1..a9b3e047cf 100644 --- a/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt +++ b/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt @@ -18,7 +18,8 @@ test.py >>> [CLI] bundle deployment bind test_job_key [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-generate-bind-[UNIQUE_NAME]/files... diff --git a/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt b/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt index 421e0e25de..3ea190bbf8 100644 --- a/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt +++ b/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt @@ -5,7 +5,8 @@ Created job with ID: [NUMID] === Bind job: >>> [CLI] bundle deployment bind foo [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace === Remove .databricks directory to simulate fresh deployment: >>> rm -rf .databricks diff --git a/acceptance/bundle/deployment/bind/job/noop-job/output.txt b/acceptance/bundle/deployment/bind/job/noop-job/output.txt index bb859490ef..2f4cf39310 100644 --- a/acceptance/bundle/deployment/bind/job/noop-job/output.txt +++ b/acceptance/bundle/deployment/bind/job/noop-job/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind job_1 [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/my_project/default/files... diff --git a/acceptance/bundle/deployment/bind/job/python-job/output.txt b/acceptance/bundle/deployment/bind/job/python-job/output.txt index e5f4483ed4..285ccf9030 100644 --- a/acceptance/bundle/deployment/bind/job/python-job/output.txt +++ b/acceptance/bundle/deployment/bind/job/python-job/output.txt @@ -3,7 +3,8 @@ >>> uv run --with [DATABRICKS_BUNDLES_WHEEL] -q [CLI] bundle deployment bind job_1 [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> uv run --with [DATABRICKS_BUNDLES_WHEEL] -q [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/my_project/default/files... diff --git a/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt b/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt index 68bad006e6..2516ab002f 100644 --- a/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt +++ b/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt @@ -29,7 +29,8 @@ resources: >>> [CLI] bundle deployment bind endpoint1 test-endpoint-[UUID] Updating deployment state... -Successfully bound model_serving_endpoint with an id 'test-endpoint-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound model_serving_endpoint with an id 'test-endpoint-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/quality-monitor/output.txt b/acceptance/bundle/deployment/bind/quality-monitor/output.txt index e6551c31c8..e45b0350b5 100644 --- a/acceptance/bundle/deployment/bind/quality-monitor/output.txt +++ b/acceptance/bundle/deployment/bind/quality-monitor/output.txt @@ -13,7 +13,8 @@ >>> [CLI] bundle deployment bind monitor1 catalog.schema.table Updating deployment state... -Successfully bound quality_monitor with an id 'catalog.schema.table'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound quality_monitor with an id 'catalog.schema.table' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-quality-monitor-test-localonly/default/files... diff --git a/acceptance/bundle/deployment/bind/registered-model/output.txt b/acceptance/bundle/deployment/bind/registered-model/output.txt index 4d8c1e7482..fb9dd1c56d 100644 --- a/acceptance/bundle/deployment/bind/registered-model/output.txt +++ b/acceptance/bundle/deployment/bind/registered-model/output.txt @@ -15,7 +15,8 @@ resources: >>> [CLI] bundle deployment bind model1 main.test-schema-rmodel-[UUID].test-registered-model-[UUID] Updating deployment state... -Successfully bound registered_model with an id 'main.test-schema-rmodel-[UUID].test-registered-model-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound registered_model with an id 'main.test-schema-rmodel-[UUID].test-registered-model-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/schema/output.txt b/acceptance/bundle/deployment/bind/schema/output.txt index 4023996156..2e6976fb58 100644 --- a/acceptance/bundle/deployment/bind/schema/output.txt +++ b/acceptance/bundle/deployment/bind/schema/output.txt @@ -7,7 +7,8 @@ } === Bind schema: Updating deployment state... -Successfully bound schema with an id 'main.test-schema-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound schema with an id 'main.test-schema-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace === Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... Deploying resources... diff --git a/acceptance/bundle/deployment/bind/secret-scope/output.txt b/acceptance/bundle/deployment/bind/secret-scope/output.txt index 53f66c3c8c..2a0433bb17 100644 --- a/acceptance/bundle/deployment/bind/secret-scope/output.txt +++ b/acceptance/bundle/deployment/bind/secret-scope/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind secret_scope1 test-secret-scope-[UUID] --auto-approve Updating deployment state... -Successfully bound secret_scope with an id 'test-secret-scope-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound secret_scope with an id 'test-secret-scope-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-secret-scope-test-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/sql_warehouse/output.txt b/acceptance/bundle/deployment/bind/sql_warehouse/output.txt index aedea6082b..67a882e2c1 100644 --- a/acceptance/bundle/deployment/bind/sql_warehouse/output.txt +++ b/acceptance/bundle/deployment/bind/sql_warehouse/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind sql_warehouse1 [SQL-WAREHOUSE-ID] --auto-approve Updating deployment state... -Successfully bound sql_warehouse with an id '[SQL-WAREHOUSE-ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound sql_warehouse with an id '[SQL-WAREHOUSE-ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/volume/output.txt b/acceptance/bundle/deployment/bind/volume/output.txt index b065873604..c7c18e5e33 100644 --- a/acceptance/bundle/deployment/bind/volume/output.txt +++ b/acceptance/bundle/deployment/bind/volume/output.txt @@ -8,7 +8,8 @@ === Create a pre-defined volume: >>> [CLI] bundle deployment bind volume1 main.test-schema-[UUID].volume-[UUID] --auto-approve Updating deployment state... -Successfully bound volume with an id 'main.test-schema-[UUID].volume-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound volume with an id 'main.test-schema-[UUID].volume-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl b/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl new file mode 100644 index 0000000000..c58a4bbd14 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl @@ -0,0 +1,8 @@ +bundle: + name: auto-bind-test + +workspace: + root_path: /tmp/${UNIQUE_NAME} + +include: + - resources/*.yml diff --git a/acceptance/bundle/generate/auto-bind/out.test.toml b/acceptance/bundle/generate/auto-bind/out.test.toml new file mode 100644 index 0000000000..3cdb920b67 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/out.test.toml @@ -0,0 +1,5 @@ +Local = false +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/acceptance/bundle/generate/auto-bind/output.txt b/acceptance/bundle/generate/auto-bind/output.txt new file mode 100644 index 0000000000..652ee7d545 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/output.txt @@ -0,0 +1,48 @@ + +=== Create a pre-defined job: +Created job with ID: [NUMID] + +>>> [CLI] workspace mkdirs /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME] + +>>> [CLI] workspace import /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME]/test --file test.py --language PYTHON + +=== Generate and bind in one step: +>>> [CLI] bundle generate job --key test_job --existing-job-id [NUMID] --config-dir resources --source-dir src --bind +File successfully saved to src/test.py +Job configuration successfully saved to resources/test_job.job.yml +Updating deployment state... +Successfully bound job with an id '[NUMID]' + +>>> ls src/ +test.py + +>>> cat resources/test_job.job.yml + name: auto-bind-job-[UNIQUE_NAME] + +=== Deploy the bound job: +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/tmp/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Destroy the bundle: +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job test_job + +All files and directories at the following location will be deleted: /Workspace/tmp/[UNIQUE_NAME] + +Deleting files... +Destroy complete! + +=== Check that job is bound and does not exist after bundle is destroyed: +>>> errcode [CLI] jobs get [NUMID] --output json +Error: Job [NUMID] does not exist. + +Exit code: 1 + +=== Delete the tmp folder: +>>> [CLI] workspace delete /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME]/test + +>>> [CLI] workspace delete /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME] diff --git a/acceptance/bundle/generate/auto-bind/script b/acceptance/bundle/generate/auto-bind/script new file mode 100755 index 0000000000..f6517a8adb --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/script @@ -0,0 +1,51 @@ +title "Create a pre-defined job:\n" + +PYTHON_NOTEBOOK_DIR="/Workspace/Users/${CURRENT_USER_NAME}/python-${UNIQUE_NAME}" +PYTHON_NOTEBOOK="${PYTHON_NOTEBOOK_DIR}/test" + +JOB_ID=$($CLI jobs create --json ' +{ + "name": "auto-bind-job-'${UNIQUE_NAME}'", + "tasks": [ + { + "task_key": "test", + "new_cluster": { + "spark_version": "'${DEFAULT_SPARK_VERSION}'", + "node_type_id": "'${NODE_TYPE_ID}'", + "num_workers": 1 + }, + "notebook_task": { + "notebook_path": "'${PYTHON_NOTEBOOK}'" + } + } + ] +}' | jq -r '.job_id') + +echo "Created job with ID: $JOB_ID" + +envsubst < databricks.yml.tmpl > databricks.yml + +cleanup() { + title "Delete the tmp folder:" + trace $CLI workspace delete ${PYTHON_NOTEBOOK} + trace $CLI workspace delete ${PYTHON_NOTEBOOK_DIR} +} +trap cleanup EXIT + +trace $CLI workspace mkdirs "${PYTHON_NOTEBOOK_DIR}" +trace $CLI workspace import "${PYTHON_NOTEBOOK}" --file test.py --language PYTHON + +title "Generate and bind in one step:" +trace $CLI bundle generate job --key test_job --existing-job-id $JOB_ID --config-dir resources --source-dir src --bind +trace ls src/ +# The output of the job is difference per cloud so we only check the name. +trace cat resources/test_job.job.yml | grep "name: auto-bind-job-${UNIQUE_NAME}" + +title "Deploy the bound job:" +trace $CLI bundle deploy + +title "Destroy the bundle:" +trace $CLI bundle destroy --auto-approve + +title "Check that job is bound and does not exist after bundle is destroyed:" +trace errcode $CLI jobs get "${JOB_ID}" --output json diff --git a/acceptance/bundle/generate/auto-bind/test.py b/acceptance/bundle/generate/auto-bind/test.py new file mode 100644 index 0000000000..8cfae7c74f --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/test.py @@ -0,0 +1,2 @@ +# Databricks notebook source +print("Test notebook") diff --git a/acceptance/bundle/generate/auto-bind/test.toml b/acceptance/bundle/generate/auto-bind/test.toml new file mode 100644 index 0000000000..9272f7bfd0 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/test.toml @@ -0,0 +1,26 @@ +# This test is using a workspace import API to load a notebook file. +# This API has a logic on how to accept notebook files and distinguishes them from regular python files. +# To succeed locally we would need to replicate this logic in the fake_workspace +Local = false +Cloud = true + +Ignore = [ + "databricks.yml", + "resources/*", + "src/*", + ".databricks", +] + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] + + +[Env] +# MSYS2 automatically converts absolute paths like /Users/$username/$UNIQUE_NAME to +# C:/Program Files/Git/Users/$username/UNIQUE_NAME before passing it to the CLI +# Setting this environment variable prevents that conversion on windows. +MSYS_NO_PATHCONV = "1" + +[[Repls]] +Old = '\\' +New = '/' diff --git a/acceptance/bundle/help/bundle-generate-dashboard/output.txt b/acceptance/bundle/help/bundle-generate-dashboard/output.txt index 41ed2d6c02..3f5f4bf1c3 100644 --- a/acceptance/bundle/help/bundle-generate-dashboard/output.txt +++ b/acceptance/bundle/help/bundle-generate-dashboard/output.txt @@ -36,6 +36,7 @@ Usage: databricks bundle generate dashboard [flags] Flags: + -b, --bind automatically bind the generated dashboard config to the existing dashboard -s, --dashboard-dir string directory to write the dashboard representation to (default "src") --existing-id string ID of the dashboard to generate configuration for --existing-path string workspace path of the dashboard to generate configuration for diff --git a/acceptance/bundle/help/bundle-generate-job/output.txt b/acceptance/bundle/help/bundle-generate-job/output.txt index feadcb0713..ba126d29ad 100644 --- a/acceptance/bundle/help/bundle-generate-job/output.txt +++ b/acceptance/bundle/help/bundle-generate-job/output.txt @@ -13,6 +13,9 @@ Examples: databricks bundle generate job --existing-job-id 67890 \ --key data_pipeline --config-dir resources --source-dir src + # Generate and automatically bind to the existing job + databricks bundle generate job --existing-job-id 12345 --key my_etl_job --bind + What gets generated: - Job configuration YAML file in the resources directory - Any associated notebook or Python files in the source directory @@ -25,6 +28,7 @@ Usage: databricks bundle generate job [flags] Flags: + -b, --bind automatically bind the generated resource to the existing resource -d, --config-dir string Dir path where the output config will be stored (default "resources") --existing-job-id int Job ID of the job to generate config for -f, --force Force overwrite existing files in the output directory diff --git a/acceptance/bundle/help/bundle-generate-pipeline/output.txt b/acceptance/bundle/help/bundle-generate-pipeline/output.txt index f05ce2852d..927eb2c653 100644 --- a/acceptance/bundle/help/bundle-generate-pipeline/output.txt +++ b/acceptance/bundle/help/bundle-generate-pipeline/output.txt @@ -14,6 +14,9 @@ Examples: databricks bundle generate pipeline --existing-pipeline-id def456 \ --key data_transformation --config-dir resources --source-dir src + # Generate and automatically bind to the existing pipeline + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline --bind + What gets generated: - Pipeline configuration YAML file with settings and libraries - Pipeline notebooks downloaded to the source directory @@ -25,6 +28,7 @@ Usage: databricks bundle generate pipeline [flags] Flags: + -b, --bind automatically bind the generated resource to the existing resource -d, --config-dir string Dir path where the output config will be stored (default "resources") --existing-pipeline-id string ID of the pipeline to generate config for -f, --force Force overwrite existing files in the output directory diff --git a/acceptance/bundle/help/bundle-generate/output.txt b/acceptance/bundle/help/bundle-generate/output.txt index 13f1318184..9dedd80b39 100644 --- a/acceptance/bundle/help/bundle-generate/output.txt +++ b/acceptance/bundle/help/bundle-generate/output.txt @@ -4,16 +4,24 @@ Generate bundle configuration from existing Databricks resources. Common patterns: databricks bundle generate job --existing-job-id 123 --key my_job + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline databricks bundle generate dashboard --existing-path /my-dashboard --key sales_dash databricks bundle generate dashboard --resource my_dashboard --watch --force # Keep local copy in sync. Useful for development. databricks bundle generate dashboard --resource my_dashboard --force # Do a one-time sync. -Complete migration workflow: - 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job - 2. Bind: databricks bundle deployment bind my_job 123 - 3. Deploy: databricks bundle deploy +Migration workflows: + + Two-step workflow (manual bind): + 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job + 2. Bind: databricks bundle deployment bind my_job 123 + 3. Deploy: databricks bundle deploy + + One-step workflow (automatic bind): + 1. Generate and bind: databricks bundle generate job --existing-job-id 123 --key my_job --bind + 2. Deploy: databricks bundle deploy Use --key to specify the resource name in your bundle configuration. +Use --bind to automatically bind the generated resource to the existing workspace resource. Usage: databricks bundle generate [command] diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 8b9129a2ae..fcfb9c900a 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -1,13 +1,6 @@ package deployment import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/deploy/terraform" - "github.com/databricks/cli/bundle/phases" - "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/logdiag" @@ -61,48 +54,12 @@ Any manual changes made in the workspace UI may be overwritten on deployment.`, ctx := logdiag.InitContext(cmd.Context()) cmd.SetContext(ctx) - b := utils.ConfigureBundleWithVariables(cmd) - if b == nil || logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - ctx = cmd.Context() - - phases.Initialize(ctx, b) - if logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - - resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) + err := BindResource(cmd, args[0], args[1], autoApprove, forceLock) if err != nil { return err } - w := b.WorkspaceClient() - exists, err := resource.Exists(ctx, w, args[1]) - if err != nil { - return fmt.Errorf("failed to fetch the resource, err: %w", err) - } - - if !exists { - return fmt.Errorf("%s with an id '%s' is not found", resource.ResourceDescription().SingularName, args[1]) - } - - bundle.ApplyFuncContext(ctx, b, func(context.Context, *bundle.Bundle) { - b.Config.Bundle.Deployment.Lock.Force = forceLock - }) - - tfName := terraform.GroupToTerraformName[resource.ResourceDescription().PluralName] - phases.Bind(ctx, b, &terraform.BindOptions{ - AutoApprove: autoApprove, - ResourceType: tfName, - ResourceKey: args[0], - ResourceId: args[1], - }) - if logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - - cmdio.LogString(ctx, fmt.Sprintf("Successfully bound %s with an id '%s'. Run 'bundle deploy' to deploy changes to your workspace", resource.ResourceDescription().SingularName, args[1])) + cmdio.LogString(ctx, "Run 'bundle deploy' to deploy changes to your workspace") return nil } diff --git a/cmd/bundle/deployment/bind_resource.go b/cmd/bundle/deployment/bind_resource.go new file mode 100644 index 0000000000..96f9236cb3 --- /dev/null +++ b/cmd/bundle/deployment/bind_resource.go @@ -0,0 +1,65 @@ +package deployment + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/logdiag" + "github.com/spf13/cobra" +) + +// BindResource binds a bundle resource to an existing workspace resource. +// This function is shared between the bind command and generate commands with --bind flag. +func BindResource(cmd *cobra.Command, resourceKey, resourceId string, autoApprove, forceLock bool) error { + ctx := cmd.Context() + // Reload the bundle configuration to ensure we're using the latest configuration. + b := utils.ReloadBundle(cmd) + if b == nil || logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + ctx = cmd.Context() + phases.Initialize(ctx, b) + if logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + resource, err := b.Config.Resources.FindResourceByConfigKey(resourceKey) + if err != nil { + return err + } + + w := b.WorkspaceClient() + exists, err := resource.Exists(ctx, w, resourceId) + if err != nil { + return fmt.Errorf("failed to fetch the resource, err: %w", err) + } + + if !exists { + return fmt.Errorf("%s with an id '%s' is not found", resource.ResourceDescription().SingularName, resourceId) + } + + bundle.ApplyFuncContext(ctx, b, func(context.Context, *bundle.Bundle) { + b.Config.Bundle.Deployment.Lock.Force = forceLock + }) + + tfName := terraform.GroupToTerraformName[resource.ResourceDescription().PluralName] + phases.Bind(ctx, b, &terraform.BindOptions{ + AutoApprove: autoApprove, + ResourceType: tfName, + ResourceKey: resourceKey, + ResourceId: resourceId, + }) + if logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + cmdio.LogString(ctx, fmt.Sprintf("Successfully bound %s with an id '%s'", resource.ResourceDescription().SingularName, resourceId)) + return nil +} diff --git a/cmd/bundle/generate.go b/cmd/bundle/generate.go index 448a172820..d282e14b5e 100644 --- a/cmd/bundle/generate.go +++ b/cmd/bundle/generate.go @@ -15,16 +15,24 @@ func newGenerateCommand() *cobra.Command { Common patterns: databricks bundle generate job --existing-job-id 123 --key my_job + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline databricks bundle generate dashboard --existing-path /my-dashboard --key sales_dash databricks bundle generate dashboard --resource my_dashboard --watch --force # Keep local copy in sync. Useful for development. databricks bundle generate dashboard --resource my_dashboard --force # Do a one-time sync. -Complete migration workflow: - 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job - 2. Bind: databricks bundle deployment bind my_job 123 - 3. Deploy: databricks bundle deploy +Migration workflows: -Use --key to specify the resource name in your bundle configuration.`, + Two-step workflow (manual bind): + 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job + 2. Bind: databricks bundle deployment bind my_job 123 + 3. Deploy: databricks bundle deploy + + One-step workflow (automatic bind): + 1. Generate and bind: databricks bundle generate job --existing-job-id 123 --key my_job --bind + 2. Deploy: databricks bundle deploy + +Use --key to specify the resource name in your bundle configuration. +Use --bind to automatically bind the generated resource to the existing workspace resource.`, } cmd.AddCommand(generate.NewGenerateJobCommand()) diff --git a/cmd/bundle/generate/app.go b/cmd/bundle/generate/app.go index df2fa70785..e56578fb11 100644 --- a/cmd/bundle/generate/app.go +++ b/cmd/bundle/generate/app.go @@ -5,6 +5,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -20,6 +21,7 @@ func NewGenerateAppCommand() *cobra.Command { var sourceDir string var appName string var force bool + var bind bool cmd := &cobra.Command{ Use: "app", @@ -37,6 +39,9 @@ Examples: databricks bundle generate app --existing-app-name data-viewer \ --key data_app --config-dir resources --source-dir src/apps + # Generate and automatically bind to the existing app + databricks bundle generate app --existing-app-name my-app --key analytics_app --bind + What gets generated: - App configuration YAML file with app settings and dependencies - App source files downloaded to the specified source directory @@ -53,6 +58,7 @@ per target environment.`, cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Directory path where the output bundle config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src/app", `Directory path where the app files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated app config to the existing app`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -120,6 +126,11 @@ per target environment.`, } cmdio.LogString(ctx, "App configuration successfully saved to "+filename) + + if bind { + return deployment.BindResource(cmd, appKey, app.Name, true, false) + } + return nil } diff --git a/cmd/bundle/generate/dashboard.go b/cmd/bundle/generate/dashboard.go index 7480ac145e..d09bb90e3f 100644 --- a/cmd/bundle/generate/dashboard.go +++ b/cmd/bundle/generate/dashboard.go @@ -18,7 +18,9 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/resources" "github.com/databricks/cli/bundle/statemgmt" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/yamlsaver" @@ -54,6 +56,12 @@ type dashboard struct { // Relative path from the resource directory to the dashboard directory. relativeDashboardDir string + // Command. + cmd *cobra.Command + + // Automatically bind the generated resource to the existing resource. + bind bool + // Output and error streams. out io.Writer err io.Writer @@ -333,6 +341,15 @@ func (d *dashboard) generateForExisting(ctx context.Context, b *bundle.Bundle, d if err != nil { logdiag.LogError(ctx, err) } + + if d.bind { + err = deployment.BindResource(d.cmd, key, dashboardID, true, false) + if err != nil { + logdiag.LogError(ctx, err) + return + } + cmdio.LogString(ctx, fmt.Sprintf("Successfully bound dashboard with an id '%s'", dashboardID)) + } } func (d *dashboard) initialize(ctx context.Context, b *bundle.Bundle) { @@ -486,6 +503,8 @@ bundle files automatically, useful during active dashboard development.`, cmd.Flags().StringVarP(&d.dashboardDir, "dashboard-dir", "s", "src", `directory to write the dashboard representation to`) cmd.Flags().BoolVarP(&d.force, "force", "f", false, `force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&d.bind, "bind", "b", false, `automatically bind the generated dashboard config to the existing dashboard`) + // Exactly one of the lookup flags must be provided. cmd.MarkFlagsOneRequired( "existing-path", @@ -500,9 +519,13 @@ bundle files automatically, useful during active dashboard development.`, cmd.MarkFlagsMutuallyExclusive("watch", "existing-path") cmd.MarkFlagsMutuallyExclusive("watch", "existing-id") + // Make sure the bind flag is only used with the existing-resource flag. + cmd.MarkFlagsMutuallyExclusive("bind", "resource") + // Completion for the resource flag. cmd.RegisterFlagCompletionFunc("resource", dashboardResourceCompletion) cmd.RunE = d.RunE + d.cmd = cmd return cmd } diff --git a/cmd/bundle/generate/job.go b/cmd/bundle/generate/job.go index 12b984232e..5663fbbfb1 100644 --- a/cmd/bundle/generate/job.go +++ b/cmd/bundle/generate/job.go @@ -6,8 +6,10 @@ import ( "io/fs" "os" "path/filepath" + "strconv" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -24,6 +26,7 @@ func NewGenerateJobCommand() *cobra.Command { var sourceDir string var jobId int64 var force bool + var bind bool cmd := &cobra.Command{ Use: "job", @@ -41,6 +44,9 @@ Examples: databricks bundle generate job --existing-job-id 67890 \ --key data_pipeline --config-dir resources --source-dir src + # Generate and automatically bind to the existing job + databricks bundle generate job --existing-job-id 12345 --key my_etl_job --bind + What gets generated: - Job configuration YAML file in the resources directory - Any associated notebook or Python files in the source directory @@ -56,6 +62,7 @@ After generation, you can deploy this job to other targets using: cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Dir path where the output config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src", `Dir path where the downloaded files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated resource to the existing resource`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -138,6 +145,11 @@ After generation, you can deploy this job to other targets using: } cmdio.LogString(ctx, "Job configuration successfully saved to "+filename) + + if bind { + return deployment.BindResource(cmd, jobKey, strconv.FormatInt(jobId, 10), true, false) + } + return nil } diff --git a/cmd/bundle/generate/pipeline.go b/cmd/bundle/generate/pipeline.go index f8ed9b2ba0..d52d69b74c 100644 --- a/cmd/bundle/generate/pipeline.go +++ b/cmd/bundle/generate/pipeline.go @@ -8,6 +8,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -24,6 +25,7 @@ func NewGeneratePipelineCommand() *cobra.Command { var sourceDir string var pipelineId string var force bool + var bind bool cmd := &cobra.Command{ Use: "pipeline", @@ -42,6 +44,9 @@ Examples: databricks bundle generate pipeline --existing-pipeline-id def456 \ --key data_transformation --config-dir resources --source-dir src + # Generate and automatically bind to the existing pipeline + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline --bind + What gets generated: - Pipeline configuration YAML file with settings and libraries - Pipeline notebooks downloaded to the source directory @@ -56,6 +61,7 @@ like catalogs, schemas, and compute configurations per target.`, cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Dir path where the output config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src", `Dir path where the downloaded files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated resource to the existing resource`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -143,6 +149,11 @@ like catalogs, schemas, and compute configurations per target.`, } cmdio.LogString(ctx, "Pipeline configuration successfully saved to "+filename) + + if bind { + return deployment.BindResource(cmd, pipelineKey, pipelineId, true, false) + } + return nil } diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index 65decce058..dcc7c1b8eb 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/deployplan" + bundleenv "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/diag" @@ -25,6 +26,97 @@ func configureVariables(cmd *cobra.Command, b *bundle.Bundle, variables []string }) } +// getTargetFromCmd returns the target name from command flags or environment. +func getTargetFromCmd(cmd *cobra.Command) string { + // Check command line flag first + if flag := cmd.Flag("target"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Check deprecated environment flag + if flag := cmd.Flag("environment"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Fall back to environment variable + target, _ := bundleenv.Target(cmd.Context()) + return target +} + +// ReloadBundle reloads the bundle configuration without modifying the command context. +// This is useful when you need to refresh the bundle configuration after changes +// without side effects like setting values on the context. +func ReloadBundle(cmd *cobra.Command) *bundle.Bundle { + ctx := cmd.Context() + + // Load the bundle configuration fresh from the filesystem + b := bundle.MustLoad(ctx) + if b == nil || logdiag.HasError(ctx) { + return b + } + + // Load the target configuration + if target := getTargetFromCmd(cmd); target == "" { + phases.LoadDefaultTarget(ctx, b) + } else { + phases.LoadNamedTarget(ctx, b, target) + } + + if logdiag.HasError(ctx) { + return b + } + + // Configure the workspace profile if provided + configureProfile(cmd, b) + + // Configure variables if provided + variables, err := cmd.Flags().GetStringSlice("var") + if err != nil { + logdiag.LogDiag(ctx, diag.FromErr(err)[0]) + return b + } + configureVariables(cmd, b, variables) + + // Set DirectDeployment flag based on environment + engine, err := deploymentEngine(ctx) + if err != nil { + logdiag.LogError(ctx, err) + return b + } + b.DirectDeployment = engine == "direct-exp" + + return b +} + +// configureProfile applies the profile flag to the bundle. +func configureProfile(cmd *cobra.Command, b *bundle.Bundle) { + profile := getProfileFromCmd(cmd) + if profile == "" { + return + } + + bundle.ApplyFuncContext(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) { + b.Config.Workspace.Profile = profile + }) +} + +// getProfileFromCmd returns the profile from command flags or environment. +func getProfileFromCmd(cmd *cobra.Command) string { + // Check command line flag first + if flag := cmd.Flag("profile"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Fall back to environment variable + return env.Get(cmd.Context(), "DATABRICKS_CONFIG_PROFILE") +} + func ConfigureBundleWithVariables(cmd *cobra.Command) *bundle.Bundle { // Load bundle config and apply target b := root.MustConfigureBundle(cmd)