Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
bundle:
name: test-ssd-count-0-$UNIQUE_NAME

resources:
clusters:
test_cluster:
cluster_name: test-cluster-$UNIQUE_NAME
spark_version: $DEFAULT_SPARK_VERSION
node_type_id: $NODE_TYPE_ID
num_workers: 1
gcp_attributes:
# config from https://github.com/databricks/terraform-provider-databricks/issues/4089
availability: PREEMPTIBLE_GCP
local_ssd_count: 0
zone_id: auto
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"method": "POST",
"path": "/api/2.1/clusters/create",
"body": {
"autotermination_minutes": 60,
"cluster_name": "test-cluster-[UNIQUE_NAME]",
"gcp_attributes": {
"availability": "PREEMPTIBLE_GCP",
"local_ssd_count": 0,
"zone_id": "auto"
},
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 1,
"spark_version": "13.3.x-snapshot-scala2.12"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"method": "POST",
"path": "/api/2.1/clusters/create",
"body": {
"autotermination_minutes": 60,
"cluster_name": "test-cluster-[UNIQUE_NAME]",
"gcp_attributes": {
"availability": "PREEMPTIBLE_GCP",
"zone_id": "auto"
},
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 1,
"spark_version": "13.3.x-snapshot-scala2.12"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
json.gcp_attributes.availability = "PREEMPTIBLE_GCP";
json.gcp_attributes.first_on_demand = 1;
json.gcp_attributes.local_ssd_count = 0;
json.gcp_attributes.use_preemptible_executors = false;
json.gcp_attributes.zone_id = "auto";
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
json.gcp_attributes.availability = "PREEMPTIBLE_GCP";
json.gcp_attributes.first_on_demand = 1;
json.gcp_attributes.use_preemptible_executors = false;
json.gcp_attributes.zone_id = "auto";

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@

>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-ssd-count-0-[UNIQUE_NAME]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

=== Cluster should exist after bundle deployment:

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.clusters.test_cluster

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-ssd-count-0-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
rm out.requests.txt
}
trap cleanup EXIT

trace $CLI bundle deploy
print_requests.py //cluster > out.requests.$DATABRICKS_BUNDLE_ENGINE.json

title "Cluster should exist after bundle deployment:\n"
CLUSTER_ID=`read_id.py test_cluster`
$CLI clusters get "${CLUSTER_ID}" | gron.py | grep json.gcp_attributes > out.result.$DATABRICKS_BUNDLE_ENGINE.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Badness = "In terraform, local_ssd_count field is not sent to backend"
Local = true
Cloud = true
RecordRequests = true

# Testing GCP-specific field
CloudEnvs.gcp = true
CloudEnvs.aws = false
CloudEnvs.azure = false

Ignore = [
"databricks.yml",
]
13 changes: 12 additions & 1 deletion libs/testserver/clusters.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"fmt"
"os"

"github.com/databricks/cli/libs/structs/structaccess"
"github.com/databricks/databricks-sdk-go/service/compute"
)

Expand Down Expand Up @@ -90,9 +91,19 @@ func (s *FakeWorkspace) ClustersEdit(req Request) any {
return Response{}
}

func setDefault(obj any, path string, value any) {
if val, _ := structaccess.GetByString(obj, path); val == nil {
_ = structaccess.SetByString(obj, path, value)
}
}

// clusterFixUps applies server-side defaults that the real API sets.
func clusterFixUps(cluster *compute.ClusterDetails) {
if cluster.AwsAttributes == nil {
gcp := cluster.GcpAttributes
if gcp != nil {
setDefault(gcp, "first_on_demand", 1)
setDefault(gcp, "use_preemptible_executors", false)
} else if cluster.AwsAttributes == nil {
cluster.AwsAttributes = &compute.AwsAttributes{
Availability: compute.AwsAvailabilitySpotWithFallback,
ZoneId: "us-east-1c",
Expand Down