Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ resources:
depends_on:
- task_key: c_task

no_tasks_job: {}

rename_task_job:
tasks:
- task_key: b_task
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Deployment complete!

=== Modify c_task, remove d_task, add e_task
=== Detect and save changes
Detected changes in 1 resource(s):
Detected changes in 2 resource(s):

Resource: resources.jobs.my_job
tasks[task_key='c_task'].depends_on[0].task_key: replace
Expand All @@ -14,6 +14,9 @@ Resource: resources.jobs.my_job
tasks[task_key='d_task']: remove
tasks[task_key='e_task']: add

Resource: resources.jobs.no_tasks_job
tasks: add



=== Configuration changes
Expand Down Expand Up @@ -52,6 +55,21 @@ Resource: resources.jobs.my_job
+ timeout_seconds: 3600
- task_key: a_task
notebook_task:
@@ -41,5 +40,13 @@
- task_key: c_task

- no_tasks_job: {}
+ no_tasks_job:
+ tasks:
+ - new_cluster:
+ node_type_id: [NODE_TYPE_ID]
+ num_workers: 1
+ spark_version: 13.3.x-snapshot-scala2.12
+ notebook_task:
+ notebook_path: /Users/[USERNAME]/new_task
+ task_key: new_task

rename_task_job:
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files...
Deploying resources...
Updating deployment state...
Expand All @@ -76,7 +94,7 @@ Resource: resources.jobs.rename_task_job
>>> diff.py databricks.yml.backup2 databricks.yml
--- databricks.yml.backup2
+++ databricks.yml
@@ -42,14 +42,14 @@
@@ -52,14 +52,14 @@
rename_task_job:
tasks:
- - task_key: b_task
Expand All @@ -97,14 +115,14 @@ Resource: resources.jobs.rename_task_job
+ - task_key: b_task_renamed
notebook_task:
notebook_path: /Users/{{workspace_user_name}}/d_task
@@ -60,5 +60,5 @@
@@ -70,5 +70,5 @@
- task_key: c_task
depends_on:
- - task_key: b_task
+ - task_key: b_task_renamed
notebook_task:
notebook_path: /Users/{{workspace_user_name}}/c_task
@@ -69,7 +69,14 @@
@@ -79,7 +79,14 @@
- task_key: a_task
notebook_task:
- notebook_path: /Users/{{workspace_user_name}}/a_task
Expand All @@ -124,6 +142,7 @@ Resource: resources.jobs.rename_task_job
>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.jobs.my_job
delete resources.jobs.no_tasks_job
delete resources.jobs.rename_task_job

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default
Expand Down
16 changes: 16 additions & 0 deletions acceptance/bundle/config-remote-sync/job_multiple_tasks/script
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,22 @@ r["tasks"].append({
})
EOF

no_tasks_job_id="$(read_id.py no_tasks_job)"
edit_resource.py jobs $no_tasks_job_id <<EOF
r.setdefault("tasks", [])
r["tasks"].append({
"task_key": "new_task",
"notebook_task": {
"notebook_path": "/Users/${CURRENT_USER_NAME}/new_task"
},
"new_cluster": {
"spark_version": "${DEFAULT_SPARK_VERSION}",
"node_type_id": "${NODE_TYPE_ID}",
"num_workers": 1
}
})
EOF

title "Detect and save changes"
echo
cp databricks.yml databricks.yml.backup
Expand Down
26 changes: 11 additions & 15 deletions bundle/configsync/diff.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ import (
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/structs/structpath"
)

type OperationType string
Expand Down Expand Up @@ -48,20 +47,20 @@ func normalizeValue(v any) (any, error) {
return dynValue.AsAny(), nil
}

func isEntityPath(path string) bool {
pathNode, err := structpath.ParsePath(path)
if err != nil {
return false
func filterEntityDefaults(basePath string, value any) any {
if value == nil {
return nil
}

if _, _, ok := pathNode.KeyValue(); ok {
return true
if arr, ok := value.([]any); ok {
result := make([]any, 0, len(arr))
for i, elem := range arr {
elementPath := fmt.Sprintf("%s[%d]", basePath, i)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll update basePath from string to PathNode in next PRs

result = append(result, filterEntityDefaults(elementPath, elem))
}
return result
}

return false
}

func filterEntityDefaults(basePath string, value any) any {
m, ok := value.(map[string]any)
if !ok {
return value
Expand Down Expand Up @@ -98,6 +97,7 @@ func convertChangeDesc(path string, cd *deployplan.ChangeDesc) (*ConfigChangeDes
}, nil
}

normalizedValue = filterEntityDefaults(path, normalizedValue)
normalizedValue = resetValueIfNeeded(path, normalizedValue)

var op OperationType
Expand All @@ -111,10 +111,6 @@ func convertChangeDesc(path string, cd *deployplan.ChangeDesc) (*ConfigChangeDes
op = OperationSkip
}

if (op == OperationAdd || op == OperationReplace) && isEntityPath(path) {
normalizedValue = filterEntityDefaults(path, normalizedValue)
}

return &ConfigChangeDesc{
Operation: op,
Value: normalizedValue,
Expand Down