Skip to content

Commit 7209d6f

Browse files
committed
handing single node and standard mode
1 parent 45baa86 commit 7209d6f

File tree

9 files changed

+92
-84
lines changed

9 files changed

+92
-84
lines changed

README.md

Lines changed: 36 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -43,28 +43,57 @@
4343
##### [Create Cluster:](examples/cluster)
4444

4545
- This is where you would normally start with if you just deployed your databricks workspace.
46-
Two options are available:
47-
- Minimum configuration required to bring up a cluster.
48-
- Bring up cluster with most of the available options.
49-
Note: Some option may be missing.
46+
47+
Two Cluster Mode are supported by the module:
48+
49+
- `Single Node` mode: To deploy cluster in Single Node mode, update `fixed_value` to `0`:
50+
```
51+
fixed_value = 0
52+
```
53+
54+
- `Standard` mode: To deploy in Standard mode, two options are available:
55+
56+
```
57+
fixed_value = 1
58+
```
59+
OR
60+
```
61+
auto_scaling = [1,3]
62+
```
63+
64+
**Note:** If you need to configure `Instance Pool`:
65+
66+
```
67+
deploy_instance_pool = true
68+
min_idle_instances = 1
69+
max_capacity = 5
70+
idle_instance_autotermination_minutes = 30
71+
```
72+
73+
> ❗️ **Important**
74+
>
75+
> If `deploy_instance_pool` is set to `true` and `auto_scaling` is enabled.
76+
> Ensure `max_capacity` is more than `auto_scaling`
77+
max value.
5078

5179
##### [Deploy Job on new or existing cluster:](examples/job)
5280

5381
- Deploy Job to an existing cluster.
5482
- Deploy Cluster and deploy Job.
55-
Note: Job name and Notebook name is same.
83+
84+
Note: `Job name` and `Notebook name` is same.
5685

5786
##### [Deploy Notebook:](examples/notebook)
5887

5988
- Once you have Notebook ready put them in the notebooks folder and specify the job as below:
6089

6190
```
6291
notebook_info = {
63-
default994 = {
92+
demo1 = {
6493
language = "PYTHON"
6594
local_path = "notebooks/demo_notebook_1.py"
6695
}
67-
default140 = {
96+
demo2 = {
6897
language = "PYTHON"
6998
local_path = "notebooks/demo_notebook_2.py"
7099
}

cluster.tf

Lines changed: 26 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
resource "databricks_cluster" "cluster" {
2-
count = var.deploy_cluster ? 1 : 0
2+
count = var.deploy_cluster == true && var.fixed_value != 0 ? 1 : 0
33

4-
cluster_name = "${var.teamid}-${var.prjid} (${data.databricks_current_user.me.alphanumeric})"
5-
spark_version = data.databricks_spark_version.latest.id
6-
autotermination_minutes = var.cluster_autotermination_minutes
4+
cluster_name = "${var.teamid}-${var.prjid} (${data.databricks_current_user.me.alphanumeric})"
5+
spark_version = data.databricks_spark_version.latest.id
6+
7+
driver_node_type_id = var.driver_node_type_id
8+
node_type_id = var.deploy_instance_pool != true ? join("", data.databricks_node_type.cluster_node_type.*.id) : null
79

8-
node_type_id = var.deploy_instance_pool != true ? join("", data.databricks_node_type.cluster_node_type.*.id) : null
910
instance_pool_id = var.deploy_instance_pool == true ? join("", databricks_instance_pool.instance_nodes.*.id) : null
1011

1112
num_workers = var.fixed_value != null ? var.fixed_value : null
@@ -17,18 +18,26 @@ resource "databricks_cluster" "cluster" {
1718
max_workers = autoscale.value[1]
1819
}
1920
}
20-
autoscale {
21-
min_workers = var.cluster_min_workers
22-
max_workers = var.cluster_max_workers
23-
}
21+
autotermination_minutes = var.cluster_autotermination_minutes
22+
custom_tags = merge(local.shared_tags)
23+
}
2424

25-
custom_tags = merge(local.shared_tags)
25+
resource "databricks_cluster" "single_node_cluster" {
26+
count = var.deploy_cluster == true && var.fixed_value == 0 && var.auto_scaling == null ? 1 : 0
2627

27-
/*
28-
spark_conf = {
29-
# Single-node
30-
"spark.databricks.cluster.profile" : "singleNode"
31-
"spark.master" : "local[*]"
32-
}
33-
*/
28+
cluster_name = "${var.teamid}-${var.prjid} (${data.databricks_current_user.me.alphanumeric})"
29+
spark_version = data.databricks_spark_version.latest.id
30+
autotermination_minutes = var.cluster_autotermination_minutes
31+
node_type_id = var.deploy_instance_pool != true ? join("", data.databricks_node_type.cluster_node_type.*.id) : null
32+
num_workers = 0
33+
34+
custom_tags = {
35+
"ResourceClass" = "SingleNode"
36+
}
37+
38+
spark_conf = {
39+
# Single-node
40+
"spark.databricks.cluster.profile" : "singleNode"
41+
"spark.master" : "local[*]"
42+
}
3443
}

cluster_permissions.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
resource "databricks_permissions" "cluster" {
22
count = var.deploy_cluster ? 1 : 0
33

4-
cluster_id = join("", databricks_cluster.cluster.*.id)
4+
cluster_id = var.fixed_value != 0 ? join("", databricks_cluster.cluster.*.id) : join("", databricks_cluster.single_node_cluster.*.id)
55
access_control {
66
user_name = join("", databricks_user.users.*.user_name)
77
permission_level = "CAN_RESTART"

examples/all/sample-maximum_config/main.tf

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ module "databricks_workspace_management" {
1919
deploy_cluster = true
2020
# cluster_id = "0507-210128-assay460"
2121
cluster_autotermination_minutes = 30
22-
cluster_min_workers = 1
23-
cluster_max_workers = 2
22+
fixed_value = 1
23+
auto_scaling = [2, 3]
2424
# ------------------------------------------------
2525
# Cluster Policy
2626
# ------------------------------------------------
@@ -29,6 +29,7 @@ module "databricks_workspace_management" {
2929
# ------------------------------------------------
3030
# Cluster Instance Pool
3131
# ------------------------------------------------
32+
deploy_instance_pool = false
3233
min_idle_instances = 1
3334
max_capacity = 2
3435
idle_instance_autotermination_minutes = 30

examples/cluster/sample-maximum_config/main.tf

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
module "databricks_workspace_management" {
22
source = "git::git@github.com:tomarv2/terraform-databricks-workspace-management.git"
33

4-
workspace_url = "https://<workspace_url>.cloud.databricks.com"
4+
workspace_url = "https://https://<workspace_url>.cloud.databricks.com"
55
dapi_token = "dapi123456789012"
66
# ------------------------------------------------
77
# Admin Console
@@ -16,20 +16,21 @@ module "databricks_workspace_management" {
1616
# ------------------------------------------------
1717
deploy_cluster = true
1818
cluster_autotermination_minutes = 30
19-
cluster_min_workers = 1
20-
cluster_max_workers = 2
21-
# ------------------------------------------------
22-
# Cluster Policy
23-
# ------------------------------------------------
24-
cluster_policy_max_dbus_per_hour = 5
25-
cluster_policy_autotermination_minutes = 5
19+
fixed_value = 1
20+
auto_scaling = [2, 3]
2621
# ------------------------------------------------
2722
# Cluster Instance Pool
2823
# ------------------------------------------------
24+
deploy_instance_pool = false
2925
min_idle_instances = 1
30-
max_capacity = 2
26+
max_capacity = 5
3127
idle_instance_autotermination_minutes = 30
3228
# ------------------------------------------------
29+
# Cluster Policy
30+
# ------------------------------------------------
31+
cluster_policy_max_dbus_per_hour = 5
32+
cluster_policy_autotermination_minutes = 5
33+
# ------------------------------------------------
3334
# Cluster Worker Type
3435
# ------------------------------------------------
3536
local_disk = 0

examples/job/sample/output.tf

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,6 @@ output "databricks_group_member" {
4949
value = module.databricks_workspace_management.databricks_group_member
5050
}
5151

52-
output "databricks_permissions_notebook" {
53-
description = "databricks notebook permissions"
54-
value = module.databricks_workspace_management.databricks_permissions_notebook
55-
}
56-
5752
/*
5853
output "databricks_permissions_job" {
5954
value = module.databricks_workspace_management.databricks_permissions_job

examples/notebook/sample/output.tf

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,17 +49,6 @@ output "databricks_group_member" {
4949
value = module.databricks_workspace_management.databricks_group_member
5050
}
5151

52-
output "databricks_permissions_notebook" {
53-
description = "databricks notebook permissions"
54-
value = module.databricks_workspace_management.databricks_permissions_notebook
55-
}
56-
57-
/*
58-
output "databricks_permissions_job" {
59-
value = module.databricks_workspace_management.databricks_permissions_job
60-
}
61-
*/
62-
6352
output "databricks_permissions_cluster" {
6453
description = "databricks cluster permissions"
6554
value = module.databricks_workspace_management.databricks_permissions_cluster

output.tf

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,8 @@ output "databricks_group_member" {
4949
description = "databricks group members"
5050
value = join("", databricks_group_member.group_members.*.group_id)
5151
}
52-
/*
53-
output "databricks_permissions_notebook" {
54-
description = "databricks notebook permissions"
55-
value = join("", databricks_permissions.notebook.*.notebook_path)
56-
}
5752

53+
/*
5854
output "databricks_permissions_job" {
5955
value = databricks_permissions.
6056
}

variables.tf

Lines changed: 15 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -49,13 +49,13 @@ variable "databricks_secret_key" {
4949
variable "min_idle_instances" {
5050
description = "instance pool minimum idle instances"
5151
type = number
52-
default = 0
52+
default = 1
5353
}
5454

5555
variable "max_capacity" {
5656
description = "instance pool maximum capacity"
5757
type = number
58-
default = 30
58+
default = 3
5959
}
6060

6161
variable "idle_instance_autotermination_minutes" {
@@ -79,46 +79,28 @@ variable "cluster_autotermination_minutes" {
7979
default = 20
8080
}
8181

82-
variable "cluster_min_workers" {
83-
description = "cluster minimum workers"
84-
type = number
85-
default = 1
86-
}
87-
88-
variable "cluster_max_workers" {
89-
description = "cluster maximum workers"
90-
type = number
91-
default = 10
92-
}
93-
9482
variable "cluster_policy_max_dbus_per_hour" {
9583
description = "cluster maximum dbus per hour"
9684
type = number
9785
default = 10
9886
}
9987

10088
variable "cluster_policy_autotermination_minutes" {
101-
description = "cluster policy auto termination minutes"
89+
description = "cluster policy auto termination minutes."
10290
type = number
10391
default = 20
10492
}
10593

106-
variable "cluster_type" {
107-
description = "Type of cluster(autoscale or fixed)"
108-
type = string
109-
default = "fixed"
110-
}
111-
11294
variable "auto_scaling" {
113-
description = "Type of cluster(autoscale or fixed)"
95+
description = "Number of min and max workers in auto scale."
11496
type = list(any)
115-
default = [1, 1]
97+
default = null
11698
}
11799

118100
variable "fixed_value" {
119-
description = "Type of cluster(autoscale or fixed)"
101+
description = "Number of nodes in the cluster."
120102
type = number
121-
default = null
103+
default = 0
122104
}
123105
# ------------------------------------------------
124106
# Job
@@ -226,15 +208,15 @@ variable "cluster_id" {
226208
}
227209

228210
variable "note_type_id" {
229-
description = "Existing cluster id"
211+
description = "Type of node"
230212
type = string
231213
default = null
232214
}
233215

234216
variable "deploy_instance_pool" {
235217
description = "Deploy instance pool"
236218
type = bool
237-
default = true
219+
default = false
238220
}
239221
# ------------------------------------------------
240222
# Spark version
@@ -276,3 +258,9 @@ variable "create_group" {
276258
type = bool
277259
default = true
278260
}
261+
#
262+
variable "driver_node_type_id" {
263+
description = "The node type of the Spark driver. This field is optional; if unset, API will set the driver node type to the same value as node_type_id"
264+
type = string
265+
default = null
266+
}

0 commit comments

Comments
 (0)