Skip to content

Commit b2816c4

Browse files
authoredFeb 12, 2024··
feat: adding extension notebooks (#303)
1 parent d676b28 commit b2816c4

24 files changed

+2032
-2855
lines changed
 

‎metadata.display.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.

‎metadata.yaml

+10-16
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -27,7 +27,7 @@ spec:
2727
version: 7.0.0
2828
actuationTool:
2929
flavor: Terraform
30-
version: ">= 0.13"
30+
version: ">= 1.3"
3131
description: {}
3232
content:
3333
subBlueprints:
@@ -61,7 +61,6 @@ spec:
6161
- name: dataset_id
6262
description: Unique ID for the dataset being provisioned.
6363
varType: string
64-
defaultValue: null
6564
required: true
6665
- name: dataset_labels
6766
description: Key value pairs in a map for dataset labels
@@ -70,27 +69,22 @@ spec:
7069
- name: dataset_name
7170
description: Friendly name for the dataset being provisioned.
7271
varType: string
73-
defaultValue: null
7472
- name: default_table_expiration_ms
7573
description: TTL of tables using the dataset in MS
7674
varType: number
77-
defaultValue: null
7875
- name: delete_contents_on_destroy
7976
description: (Optional) If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
8077
varType: bool
81-
defaultValue: null
8278
- name: deletion_protection
83-
description: Whether or not to allow Terraform to destroy the instance. Unless this field is set to false in Terraform state, a terraform destroy or terraform apply that would delete the instance will fail
79+
description: Whether or not to allow deletion of tables and external tables defined by this module. Can be overriden by table-level deletion_protection configuration.
8480
varType: bool
8581
defaultValue: false
8682
- name: description
8783
description: Dataset description.
8884
varType: string
89-
defaultValue: null
9085
- name: encryption_key
9186
description: Default encryption key to apply to the dataset. Defaults to null (Google-managed).
9287
varType: string
93-
defaultValue: null
9488
- name: external_tables
9589
description: A list of objects which include table_id, expiration_time, external_data_configuration, and labels.
9690
varType: |-
@@ -120,9 +114,10 @@ spec:
120114
mode = string,
121115
source_uri_prefix = string,
122116
}),
123-
expiration_time = string,
124-
max_staleness = optional(string),
125-
labels = map(string),
117+
expiration_time = string,
118+
max_staleness = optional(string),
119+
deletion_protection = optional(bool),
120+
labels = map(string),
126121
}))
127122
defaultValue: []
128123
- name: location
@@ -161,11 +156,9 @@ spec:
161156
- name: max_time_travel_hours
162157
description: Defines the time travel window in hours
163158
varType: number
164-
defaultValue: null
165159
- name: project_id
166160
description: Project where the dataset and table are created
167161
varType: string
168-
defaultValue: null
169162
required: true
170163
- name: routines
171164
description: A list of objects which include routine_id, routine_type, routine_language, definition_body, return_type, routine_description and arguments.
@@ -208,8 +201,9 @@ spec:
208201
interval = string,
209202
}),
210203
}),
211-
expiration_time = string,
212-
labels = map(string),
204+
expiration_time = string,
205+
deletion_protection = optional(bool),
206+
labels = map(string),
213207
}))
214208
defaultValue: []
215209
- name: views

‎modules/authorization/metadata.display.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.

‎modules/authorization/metadata.yaml

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ spec:
2828
version: 7.0.0
2929
actuationTool:
3030
flavor: Terraform
31-
version: ">= 0.13"
31+
version: ">= 1.3"
3232
description: {}
3333
content:
3434
examples:
@@ -73,12 +73,10 @@ spec:
7373
- name: dataset_id
7474
description: Unique ID for the dataset being provisioned.
7575
varType: string
76-
defaultValue: null
7776
required: true
7877
- name: project_id
7978
description: Project where the dataset and table are created
8079
varType: string
81-
defaultValue: null
8280
required: true
8381
- name: roles
8482
description: An array of objects that define dataset access for one or more entities.

‎modules/data_warehouse/README.md

+10-2
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ The resources/services/activations/deletions that this module will create/trigge
1717
- Creates and inferences with a BigQuery ML model
1818
- Creates a remote model and uses Generative AI to generate text through a BigQuery ML remote model
1919
- Creates a Looker Studio report
20+
- Deploys follow-on learning notebooks
2021

2122
### preDeploy
2223
To deploy this blueprint you must have an active billing account and billing permissions.
@@ -35,9 +36,11 @@ Functional examples are included in the
3536

3637
| Name | Description | Type | Default | Required |
3738
|------|-------------|------|---------|:--------:|
38-
| deletion\_protection | Whether or not to protect GCS resources from deletion when solution is modified or changed. | `string` | `true` | no |
39+
| create\_ignore\_service\_accounts | Whether or not to ignore creation of a service account if an account of the same name already exists | `string` | `true` | no |
40+
| dataform\_region | Region that is used to deploy Dataform resources. This does not limit where resources can be run or what region data must be located in. | `string` | `null` | no |
41+
| deletion\_protection | Whether or not to protect GCS resources from deletion when solution is modified or changed. | `string` | `false` | no |
3942
| enable\_apis | Whether or not to enable underlying apis in this solution. | `string` | `true` | no |
40-
| force\_destroy | Whether or not to protect BigQuery resources from deletion when solution is modified or changed. | `string` | `false` | no |
43+
| force\_destroy | Whether or not to protect BigQuery resources from deletion when solution is modified or changed. | `string` | `true` | no |
4144
| labels | A map of labels to apply to contained resources. | `map(string)` | <pre>{<br> "data-warehouse": true<br>}</pre> | no |
4245
| project\_id | Google Cloud Project ID | `string` | n/a | yes |
4346
| region | Google Cloud Region | `string` | n/a | yes |
@@ -86,6 +89,7 @@ A project with the following APIs enabled must be used to host the
8689
resources of this module:
8790

8891
- Vertex AI API: `aiplatform.googleapis.com`
92+
- Artifact Registry API: `artifactregistry.googleapis.com`
8993
- BigQuery API: `bigquery.googleapis.com`
9094
- BigQuery Connection API: `bigqueryconnection.googleapis.com`
9195
- BigQuery Data Policy API: `bigquerydatapolicy.googleapis.com`
@@ -95,10 +99,14 @@ resources of this module:
9599
- BigQuery Storage API: `bigquerystorage.googleapis.com`
96100
- Google Cloud APIs: `cloudapis.googleapis.com`
97101
- Cloud Build API: `cloudbuild.googleapis.com`
102+
- Cloud Functions API: `cloudfunctions.googleapis.com`
98103
- Compute Engine API: `compute.googleapis.com`
99104
- Infrastructure Manager API: `config.googleapis.com`
100105
- Data Catalog API: `datacatalog.googleapis.com`
106+
- Dataform API: `dataform.googleapis.com`
101107
- Data Lineage API: `datalineage.googleapis.com`
108+
- Notebooks API: `notebooks.googleapis.com`
109+
- Cloud Run API: `run.googleapis.com`
102110
- Service Usage API: `serviceusage.googleapis.com`
103111
- Google Cloud Storage API: `storage.googleapis.com`
104112
- Google Cloud Storage JSON API: `storage-api.googleapis.com`

‎modules/data_warehouse/assets/data-warehouse-architecture.json

+1-1
Large diffs are not rendered by default.
Loading

‎modules/data_warehouse/assets/data-warehouse-architecture.svg

+604-2,738
Loading

‎modules/data_warehouse/bigquery.tf

+51-43
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
*/
1616

1717
# Set up BigQuery resources
18-
# # Create the BigQuery dataset
18+
## Create the BigQuery dataset
1919
resource "google_bigquery_dataset" "ds_edw" {
2020
project = module.project-services.project_id
2121
dataset_id = "thelook"
@@ -28,7 +28,7 @@ resource "google_bigquery_dataset" "ds_edw" {
2828
depends_on = [time_sleep.wait_after_apis]
2929
}
3030

31-
# # Create a BigQuery connection for Cloud Storage to create BigLake tables
31+
## Create a BigQuery connection for Cloud Storage to create BigLake tables
3232
resource "google_bigquery_connection" "ds_connection" {
3333
project = module.project-services.project_id
3434
connection_id = "ds_connection"
@@ -38,7 +38,7 @@ resource "google_bigquery_connection" "ds_connection" {
3838
depends_on = [time_sleep.wait_after_apis]
3939
}
4040

41-
# # Grant IAM access to the BigQuery Connection account for Cloud Storage
41+
## Grant IAM access to the BigQuery Connection account for Cloud Storage
4242
resource "google_project_iam_member" "bq_connection_iam_object_viewer" {
4343
project = module.project-services.project_id
4444
role = "roles/storage.objectViewer"
@@ -47,7 +47,7 @@ resource "google_project_iam_member" "bq_connection_iam_object_viewer" {
4747
depends_on = [google_storage_bucket.raw_bucket, google_bigquery_connection.ds_connection]
4848
}
4949

50-
# # Create a BigQuery connection for Vertex AI to support GenerativeAI use cases
50+
## Create a BigQuery connection for Vertex AI to support GenerativeAI use cases
5151
resource "google_bigquery_connection" "vertex_ai_connection" {
5252
project = module.project-services.project_id
5353
connection_id = "genai_connection"
@@ -57,23 +57,27 @@ resource "google_bigquery_connection" "vertex_ai_connection" {
5757
depends_on = [time_sleep.wait_after_apis]
5858
}
5959

60-
# # Grant IAM access to the BigQuery Connection account for Vertex AI
61-
resource "google_project_iam_member" "bq_connection_iam_vertex_ai" {
62-
for_each = toset([
60+
## Define IAM roles granted to the BigQuery Connection service account
61+
locals {
62+
bq_vertex_ai_roles = [
6363
"roles/aiplatform.user",
6464
"roles/bigquery.connectionUser",
6565
"roles/serviceusage.serviceUsageConsumer",
66-
]
67-
)
66+
]
67+
}
68+
69+
## Grant IAM access to the BigQuery Connection account for Vertex AI
70+
resource "google_project_iam_member" "bq_connection_iam_vertex_ai" {
71+
count = length(local.bq_vertex_ai_roles)
72+
role = local.bq_vertex_ai_roles[count.index]
6873
project = module.project-services.project_id
69-
role = each.key
7074
member = "serviceAccount:${google_bigquery_connection.vertex_ai_connection.cloud_resource[0].service_account_id}"
7175

72-
depends_on = [google_bigquery_connection.vertex_ai_connection]
76+
depends_on = [google_bigquery_connection.vertex_ai_connection, google_project_iam_member.bq_connection_iam_object_viewer]
7377
}
7478

7579
# Create data tables in BigQuery
76-
# # Create a Biglake table for events with metadata caching
80+
## Create a Biglake table for events with metadata caching
7781
resource "google_bigquery_table" "tbl_edw_events" {
7882
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
7983
table_id = "events"
@@ -92,7 +96,7 @@ resource "google_bigquery_table" "tbl_edw_events" {
9296
labels = var.labels
9397
}
9498

95-
# # Create a Biglake table for inventory_items
99+
## Create a Biglake table for inventory_items
96100
resource "google_bigquery_table" "tbl_edw_inventory_items" {
97101
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
98102
table_id = "inventory_items"
@@ -111,7 +115,7 @@ resource "google_bigquery_table" "tbl_edw_inventory_items" {
111115
labels = var.labels
112116
}
113117

114-
# # Create a Biglake table with metadata caching for order_items
118+
## Create a Biglake table with metadata caching for order_items
115119
resource "google_bigquery_table" "tbl_edw_order_items" {
116120
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
117121
table_id = "order_items"
@@ -130,7 +134,7 @@ resource "google_bigquery_table" "tbl_edw_order_items" {
130134
labels = var.labels
131135
}
132136

133-
# # Create a Biglake table for orders
137+
## Create a Biglake table for orders
134138
resource "google_bigquery_table" "tbl_edw_orders" {
135139
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
136140
table_id = "orders"
@@ -149,7 +153,7 @@ resource "google_bigquery_table" "tbl_edw_orders" {
149153
labels = var.labels
150154
}
151155

152-
# # Create a Biglake table for products
156+
## Create a Biglake table for products
153157
resource "google_bigquery_table" "tbl_edw_products" {
154158
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
155159
table_id = "products"
@@ -168,7 +172,7 @@ resource "google_bigquery_table" "tbl_edw_products" {
168172
labels = var.labels
169173
}
170174

171-
# # Create a Biglake table for products
175+
## Create a Biglake table for products
172176
resource "google_bigquery_table" "tbl_edw_users" {
173177
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
174178
table_id = "users"
@@ -188,7 +192,7 @@ resource "google_bigquery_table" "tbl_edw_users" {
188192
}
189193

190194
# Load Queries for Stored Procedure Execution
191-
# # Load Distribution Center Lookup Data Tables
195+
## Load Distribution Center Lookup Data Tables
192196
resource "google_bigquery_routine" "sp_provision_lookup_tables" {
193197
project = module.project-services.project_id
194198
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -202,7 +206,7 @@ resource "google_bigquery_routine" "sp_provision_lookup_tables" {
202206
)
203207
}
204208

205-
# # Add Looker Studio Data Report Procedure
209+
## Add Looker Studio Data Report Procedure
206210
resource "google_bigquery_routine" "sproc_sp_demo_lookerstudio_report" {
207211
project = module.project-services.project_id
208212
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -222,7 +226,7 @@ resource "google_bigquery_routine" "sproc_sp_demo_lookerstudio_report" {
222226
]
223227
}
224228

225-
# # Add Sample Queries
229+
## Add Sample Queries
226230
resource "google_bigquery_routine" "sp_sample_queries" {
227231
project = module.project-services.project_id
228232
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -242,7 +246,7 @@ resource "google_bigquery_routine" "sp_sample_queries" {
242246
}
243247

244248

245-
# # Add Bigquery ML Model for clustering
249+
## Add Bigquery ML Model for clustering
246250
resource "google_bigquery_routine" "sp_bigqueryml_model" {
247251
project = module.project-services.project_id
248252
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -259,7 +263,7 @@ resource "google_bigquery_routine" "sp_bigqueryml_model" {
259263
]
260264
}
261265

262-
# # Create Bigquery ML Model for using text generation
266+
## Create Bigquery ML Model for using text generation
263267
resource "google_bigquery_routine" "sp_bigqueryml_generate_create" {
264268
project = module.project-services.project_id
265269
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -281,7 +285,7 @@ resource "google_bigquery_routine" "sp_bigqueryml_generate_create" {
281285
]
282286
}
283287

284-
# # Query Bigquery ML Model for describing customer clusters
288+
## Query Bigquery ML Model for describing customer clusters
285289
resource "google_bigquery_routine" "sp_bigqueryml_generate_describe" {
286290
project = module.project-services.project_id
287291
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -300,7 +304,7 @@ resource "google_bigquery_routine" "sp_bigqueryml_generate_describe" {
300304
]
301305
}
302306

303-
# # Add Translation Scripts
307+
## Add Translation Scripts
304308
resource "google_bigquery_routine" "sp_sample_translation_queries" {
305309
project = module.project-services.project_id
306310
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -320,33 +324,37 @@ resource "google_bigquery_routine" "sp_sample_translation_queries" {
320324
# Add Scheduled Query
321325

322326
# Create specific service account for DTS Run
323-
# # Create a DTS specific service account
327+
## Create a DTS specific service account
324328
resource "google_service_account" "dts" {
325-
project = module.project-services.project_id
326-
account_id = "cloud-dts-sa-${random_id.id.hex}"
327-
display_name = "Service Account for Data Transfer Service"
329+
project = module.project-services.project_id
330+
account_id = "cloud-dts-sa-${random_id.id.hex}"
331+
display_name = "Service Account for Data Transfer Service"
332+
description = "Service account used to manage Data Transfer Service"
333+
create_ignore_already_exists = var.create_ignore_service_accounts
334+
335+
depends_on = [time_sleep.wait_after_apis]
336+
328337
}
329338

330-
# # Grant the DTS Specific service account access
331-
resource "google_project_iam_member" "dts_roles" {
332-
for_each = toset([
339+
## Define the IAM roles granted to the DTS service account
340+
locals {
341+
dts_roles = [
333342
"roles/bigquery.user",
334343
"roles/bigquery.dataEditor",
335-
"roles/bigquery.connectionUser"
336-
])
337-
project = module.project-services.project_id
338-
role = each.key
339-
member = "serviceAccount:${google_service_account.dts.email}"
344+
"roles/bigquery.connectionUser",
345+
"roles/iam.serviceAccountTokenCreator"
346+
]
340347
}
341348

342-
# # # Grant the DTS service account access
343-
# resource "google_project_iam_member" "dts_service_account_roles" {
344-
# role = "roles/iam.serviceAccountTokenCreator"
345-
# project = module.project-services.project_id
346-
# member = "serviceAccount:${google_project_service_identity.bigquery_data_transfer_sa.email}"
349+
## Grant the DTS Specific service account access
350+
resource "google_project_iam_member" "dts_roles" {
351+
project = module.project-services.project_id
352+
count = length(local.dts_roles)
353+
role = local.dts_roles[count.index]
354+
member = "serviceAccount:${google_service_account.dts.email}"
347355

348-
# depends_on = [ google_project_iam ]
349-
# }
356+
depends_on = [time_sleep.wait_after_apis, google_project_iam_member.bq_connection_iam_vertex_ai]
357+
}
350358

351359
# Set up scheduled query
352360
resource "google_bigquery_data_transfer_config" "dts_config" {

‎modules/data_warehouse/main.tf

+8-3
Original file line numberDiff line numberDiff line change
@@ -24,19 +24,24 @@ module "project-services" {
2424

2525
activate_apis = [
2626
"aiplatform.googleapis.com",
27+
"artifactregistry.googleapis.com",
2728
"bigquery.googleapis.com",
2829
"bigqueryconnection.googleapis.com",
29-
"bigquerydatatransfer.googleapis.com",
3030
"bigquerydatapolicy.googleapis.com",
31+
"bigquerydatatransfer.googleapis.com",
3132
"bigquerymigration.googleapis.com",
3233
"bigqueryreservation.googleapis.com",
3334
"bigquerystorage.googleapis.com",
3435
"cloudapis.googleapis.com",
3536
"cloudbuild.googleapis.com",
37+
"cloudfunctions.googleapis.com",
3638
"compute.googleapis.com",
3739
"config.googleapis.com",
3840
"datacatalog.googleapis.com",
41+
"dataform.googleapis.com",
3942
"datalineage.googleapis.com",
43+
"notebooks.googleapis.com",
44+
"run.googleapis.com",
4045
"serviceusage.googleapis.com",
4146
"storage.googleapis.com",
4247
"storage-api.googleapis.com",
@@ -59,7 +64,7 @@ module "project-services" {
5964

6065
# Wait after APIs are enabled to give time for them to spin up
6166
resource "time_sleep" "wait_after_apis" {
62-
create_duration = "90s"
67+
create_duration = "30s"
6368
depends_on = [module.project-services]
6469
}
6570

@@ -69,7 +74,7 @@ resource "random_id" "id" {
6974
}
7075

7176
# Set up Storage Buckets
72-
# # Set up the raw storage bucket
77+
## Set up the raw storage bucket for data
7378
resource "google_storage_bucket" "raw_bucket" {
7479
name = "ds-edw-raw-${random_id.id.hex}"
7580
project = module.project-services.project_id

‎modules/data_warehouse/metadata.display.yaml

+18-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -28,15 +28,23 @@ spec:
2828
ui:
2929
input:
3030
variables:
31+
create_ignore_service_accounts:
32+
name: create_ignore_service_accounts
33+
title: Ignore Service Accounts if Exist
34+
dataform_region:
35+
name: dataform_region
36+
title: Dataform Region
3137
deletion_protection:
3238
name: deletion_protection
3339
title: Deletion Protection
40+
invisible: true
3441
enable_apis:
3542
name: enable_apis
3643
title: Enable Apis
3744
force_destroy:
3845
name: force_destroy
3946
title: Force Destroy
47+
invisible: true
4048
labels:
4149
name: labels
4250
title: Labels
@@ -49,3 +57,12 @@ spec:
4957
text_generation_model_name:
5058
name: text_generation_model_name
5159
title: Text Generation Model Name
60+
runtime:
61+
outputs:
62+
bigquery_editor_url:
63+
openInNewTab: true
64+
showInNotification: true
65+
ds_friendly_name: {}
66+
lookerstudio_report_url:
67+
openInNewTab: true
68+
raw_bucket: {}

‎modules/data_warehouse/metadata.yaml

+45-5
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -30,7 +30,42 @@ spec:
3030
flavor: Terraform
3131
version: ">= 0.13"
3232
description: {}
33+
icon: assets/data_warehouse_icon_v1.png
34+
deploymentDuration:
35+
configurationSecs: 120
36+
deploymentSecs: 420
37+
costEstimate:
38+
description: cost of this solution is $0.65
39+
url: https://cloud.google.com/products/calculator/#id=857776c6-49e8-4c6a-adc5-42a15b8fb67d
40+
cloudProducts:
41+
- productId: search_BIGQUERY_SECTION
42+
pageUrl: ""
43+
- productId: WORKFLOWS_SECTION
44+
pageUrl: ""
45+
- productId: STORAGE_SECTION
46+
pageUrl: ""
47+
- productId: ai-platform
48+
pageUrl: ""
49+
- productId: LOOKER_STUDIO_SECTION
50+
pageUrl: lookerstudio.google.com
51+
isExternal: true
52+
- productId: CLOUD_DMS_SECTION
53+
pageUrl: ""
54+
- productId: FUNCTIONS_SECTION
55+
pageUrl: ""
56+
- productId: DATAFORM_SECTION
57+
pageUrl: ""
3358
content:
59+
architecture:
60+
diagramUrl: www.gstatic.com/pantheon/images/solutions/data-warehouse-architecture_v6.svg
61+
description:
62+
- Data lands in a Cloud Storage bucket.
63+
- Cloud Workflows facilitates the data movement.
64+
- Data is loaded into BigQuery as a BigLake table.
65+
- Views of the data are created in BigQuery using stored procedures
66+
- Dashboards are created from the data to perform more analytics.
67+
- BigQuery ML calls the generative AI capabilities of Vertex AI to summarize the analysis.
68+
- Cloud Functions creates notebooks with additional learning content.
3469
documentation:
3570
- title: Create a Data Warehouse
3671
url: https://cloud.google.com/architecture/big-data-analytics/data-warehouse
@@ -49,18 +84,25 @@ spec:
4984
location: examples/scheduled_queries
5085
interfaces:
5186
variables:
87+
- name: create_ignore_service_accounts
88+
description: Whether or not to ignore creation of a service account if an account of the same name already exists
89+
varType: string
90+
defaultValue: true
91+
- name: dataform_region
92+
description: Region that is used to deploy Dataform resources. This does not limit where resources can be run or what region data must be located in.
93+
varType: string
5294
- name: deletion_protection
5395
description: Whether or not to protect GCS resources from deletion when solution is modified or changed.
5496
varType: string
55-
defaultValue: true
97+
defaultValue: false
5698
- name: enable_apis
5799
description: Whether or not to enable underlying apis in this solution.
58100
varType: string
59101
defaultValue: true
60102
- name: force_destroy
61103
description: Whether or not to protect BigQuery resources from deletion when solution is modified or changed.
62104
varType: string
63-
defaultValue: false
105+
defaultValue: true
64106
- name: labels
65107
description: A map of labels to apply to contained resources.
66108
varType: map(string)
@@ -69,12 +111,10 @@ spec:
69111
- name: project_id
70112
description: Google Cloud Project ID
71113
varType: string
72-
defaultValue: null
73114
required: true
74115
- name: region
75116
description: Google Cloud Region
76117
varType: string
77-
defaultValue: null
78118
required: true
79119
- name: text_generation_model_name
80120
description: Name of the BigQuery ML GenAI remote model that connects to the LLM used for text generation
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,210 @@
1+
/*
2+
* Copyright 2023 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
# Define the list of notebook files to be created
18+
locals {
19+
notebook_names = [
20+
for s in fileset("${path.module}/templates/notebooks/", "*.ipynb") : trimsuffix(s, ".ipynb")
21+
]
22+
}
23+
24+
# Create the notebook files to be uploaded
25+
resource "local_file" "notebooks" {
26+
count = length(local.notebook_names)
27+
filename = "${path.module}/src/function/notebooks/${local.notebook_names[count.index]}.ipynb"
28+
content = templatefile("${path.module}/templates/notebooks/${local.notebook_names[count.index]}.ipynb", {
29+
PROJECT_ID = format("\\%s${module.project-services.project_id}\\%s", "\"", "\""),
30+
REGION = format("\\%s${var.region}\\%s", "\"", "\""),
31+
GCS_BUCKET_URI = google_storage_bucket.raw_bucket.url
32+
}
33+
)
34+
}
35+
36+
# Upload the Cloud Function source code to a GCS bucket
37+
## Define/create zip file for the Cloud Function source. This includes notebooks that will be uploaded
38+
data "archive_file" "create_notebook_function_zip" {
39+
type = "zip"
40+
output_path = "${path.module}/tmp/notebooks_function_source.zip"
41+
source_dir = "${path.module}/src/function/"
42+
43+
depends_on = [local_file.notebooks]
44+
}
45+
46+
## Set up a storage bucket for Cloud Function source code
47+
resource "google_storage_bucket" "function_source" {
48+
name = "ds-edw-gcf-source-${random_id.id.hex}"
49+
project = module.project-services.project_id
50+
location = var.region
51+
uniform_bucket_level_access = true
52+
force_destroy = var.force_destroy
53+
54+
public_access_prevention = "enforced"
55+
56+
depends_on = [time_sleep.wait_after_apis]
57+
58+
labels = var.labels
59+
}
60+
61+
## Upload the zip file of the source code to GCS
62+
resource "google_storage_bucket_object" "function_source_upload" {
63+
name = "notebooks_function_source.zip"
64+
bucket = google_storage_bucket.function_source.name
65+
source = data.archive_file.create_notebook_function_zip.output_path
66+
}
67+
68+
# Manage Cloud Function permissions and access
69+
## Create a service account to manage the function
70+
resource "google_service_account" "cloud_function_manage_sa" {
71+
project = module.project-services.project_id
72+
account_id = "notebook-deployment"
73+
display_name = "Cloud Functions Service Account"
74+
description = "Service account used to manage Cloud Function"
75+
create_ignore_already_exists = var.create_ignore_service_accounts
76+
77+
depends_on = [
78+
time_sleep.wait_after_apis,
79+
]
80+
}
81+
82+
## Define the IAM roles that are granted to the Cloud Function service account
83+
locals {
84+
cloud_function_roles = [
85+
"roles/cloudfunctions.admin", // Service account role to manage access to the remote function
86+
"roles/dataform.admin", // Edit access code resources
87+
"roles/iam.serviceAccountUser",
88+
"roles/iam.serviceAccountTokenCreator",
89+
"roles/run.invoker", // Service account role to invoke the remote function
90+
"roles/storage.objectViewer" // Read GCS files
91+
]
92+
}
93+
94+
## Assign required permissions to the function service account
95+
resource "google_project_iam_member" "function_manage_roles" {
96+
project = module.project-services.project_id
97+
count = length(local.cloud_function_roles)
98+
role = local.cloud_function_roles[count.index]
99+
member = "serviceAccount:${google_service_account.cloud_function_manage_sa.email}"
100+
101+
depends_on = [google_service_account.cloud_function_manage_sa, google_project_iam_member.dts_roles]
102+
}
103+
104+
## Grant the Cloud Workflows service account access to act as the Cloud Function service account
105+
resource "google_service_account_iam_member" "workflow_auth_function" {
106+
service_account_id = google_service_account.cloud_function_manage_sa.name
107+
role = "roles/iam.serviceAccountUser"
108+
member = "serviceAccount:${google_service_account.workflow_manage_sa.email}"
109+
110+
depends_on = [
111+
google_service_account.workflow_manage_sa,
112+
google_project_iam_member.function_manage_roles
113+
]
114+
}
115+
116+
locals {
117+
dataform_region = (var.dataform_region == null ? var.region : var.dataform_region)
118+
}
119+
120+
# Setup Dataform repositories to host notebooks
121+
## Create the Dataform repos
122+
resource "google_dataform_repository" "notebook_repo" {
123+
count = length(local.notebook_names)
124+
provider = google-beta
125+
project = module.project-services.project_id
126+
region = local.dataform_region
127+
name = local.notebook_names[count.index]
128+
display_name = local.notebook_names[count.index]
129+
labels = {
130+
"data-warehouse" = "true"
131+
"single-file-asset-type" = "notebook"
132+
}
133+
depends_on = [time_sleep.wait_after_apis]
134+
}
135+
136+
## Grant Cloud Function service account access to write to the repo
137+
resource "google_dataform_repository_iam_member" "function_manage_repo" {
138+
provider = google-beta
139+
project = module.project-services.project_id
140+
region = local.dataform_region
141+
role = "roles/dataform.admin"
142+
member = "serviceAccount:${google_service_account.cloud_function_manage_sa.email}"
143+
count = length(local.notebook_names)
144+
repository = local.notebook_names[count.index]
145+
depends_on = [time_sleep.wait_after_apis, google_service_account_iam_member.workflow_auth_function, google_dataform_repository.notebook_repo]
146+
}
147+
148+
## Grant Cloud Workflows service account access to write to the repo
149+
resource "google_dataform_repository_iam_member" "workflow_manage_repo" {
150+
provider = google-beta
151+
project = module.project-services.project_id
152+
region = local.dataform_region
153+
role = "roles/dataform.admin"
154+
member = "serviceAccount:${google_service_account.workflow_manage_sa.email}"
155+
count = length(local.notebook_names)
156+
repository = local.notebook_names[count.index]
157+
158+
depends_on = [google_service_account_iam_member.workflow_auth_function, google_dataform_repository_iam_member.function_manage_repo, google_dataform_repository.notebook_repo]
159+
}
160+
161+
# Create and deploy a Cloud Function to deploy notebooks
162+
## Create the Cloud Function
163+
resource "google_cloudfunctions2_function" "notebook_deploy_function" {
164+
name = "deploy-notebooks"
165+
project = module.project-services.project_id
166+
location = var.region
167+
description = "A Cloud Function that deploys sample notebooks."
168+
build_config {
169+
runtime = "python310"
170+
entry_point = "run_it"
171+
172+
source {
173+
storage_source {
174+
bucket = google_storage_bucket.function_source.name
175+
object = google_storage_bucket_object.function_source_upload.name
176+
}
177+
}
178+
}
179+
180+
service_config {
181+
max_instance_count = 1
182+
# min_instance_count can be set to 1 to improve performance and responsiveness
183+
min_instance_count = 0
184+
available_memory = "512Mi"
185+
timeout_seconds = 300
186+
max_instance_request_concurrency = 1
187+
available_cpu = "2"
188+
ingress_settings = "ALLOW_ALL"
189+
all_traffic_on_latest_revision = true
190+
service_account_email = google_service_account.cloud_function_manage_sa.email
191+
environment_variables = {
192+
"PROJECT_ID" : module.project-services.project_id,
193+
"REGION" : local.dataform_region
194+
}
195+
}
196+
197+
depends_on = [
198+
time_sleep.wait_after_apis,
199+
google_project_iam_member.function_manage_roles,
200+
google_dataform_repository.notebook_repo,
201+
google_dataform_repository_iam_member.workflow_manage_repo,
202+
google_dataform_repository_iam_member.function_manage_repo
203+
]
204+
}
205+
206+
## Wait for Function deployment to complete
207+
resource "time_sleep" "wait_after_function" {
208+
create_duration = "5s"
209+
depends_on = [google_cloudfunctions2_function.notebook_deploy_function]
210+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from google.cloud import dataform_v1beta1
16+
import os
17+
18+
# Commit the notebook files to the repositories created by Terraform
19+
20+
21+
def commit_repository_changes(client, project, region) -> str:
22+
directory = f"{os.path.dirname(__file__)}/notebooks/"
23+
for file in os.listdir(directory):
24+
with open(os.path.join(directory, file), 'rb') as f:
25+
encoded_string = f.read()
26+
file_base_name = os.path.basename(file).removesuffix(".ipynb")
27+
repo_id = f"projects/{project}/locations/{region}/repositories/{file_base_name}" # ignore line too long error # noqa: E501
28+
print(repo_id)
29+
request = dataform_v1beta1.CommitRepositoryChangesRequest()
30+
request.name = repo_id
31+
request.commit_metadata = dataform_v1beta1.CommitMetadata(
32+
author=dataform_v1beta1.CommitAuthor(
33+
name="Google JSS",
34+
email_address="no-reply@google.com"
35+
),
36+
commit_message="Committing Jump Start Solution notebooks"
37+
)
38+
request.file_operations = {}
39+
request.file_operations["content.ipynb"] = \
40+
dataform_v1beta1.\
41+
CommitRepositoryChangesRequest.\
42+
FileOperation(write_file=dataform_v1beta1.
43+
CommitRepositoryChangesRequest.
44+
FileOperation.
45+
WriteFile(contents=encoded_string)
46+
)
47+
print(request.file_operations)
48+
client.commit_repository_changes(request=request)
49+
print(f"Committed changes to {repo_id}")
50+
return ("Committed changes to all repos")
51+
52+
53+
def run_it(request) -> str:
54+
dataform_client = dataform_v1beta1.DataformClient()
55+
project_id = os.environ.get("PROJECT_ID")
56+
region_id = os.environ.get("REGION")
57+
commit_changes = commit_repository_changes(
58+
dataform_client, project_id, region_id)
59+
print("Notebooks created!")
60+
return commit_changes
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
functions-framework==3.*
2+
google-cloud-dataform==0.5.5

‎modules/data_warehouse/templates/notebooks/getting_started_bq_dataframes.ipynb

+867
Large diffs are not rendered by default.

‎modules/data_warehouse/templates/workflow.tftpl

+27
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ main:
2525
- sub_create_tables:
2626
call: create_tables
2727
result: output1
28+
- sub_invoke_function:
29+
call: invoke_function
30+
result: output2
2831

2932
# Subworkflow to copy initial objects
3033
copy_objects:
@@ -105,3 +108,27 @@ create_tables:
105108
- sumStep:
106109
assign:
107110
- results[key]: $${queryResult}
111+
112+
# Subworkflow to invoke Cloud Function
113+
invoke_function:
114+
steps:
115+
- init:
116+
assign:
117+
- function_url: ${function_url}
118+
- function_name: ${function_name}
119+
- run_function:
120+
try:
121+
call: http.get
122+
args:
123+
url: $${function_url}
124+
auth:
125+
type: OIDC
126+
result: function_result
127+
except:
128+
as: e
129+
raise:
130+
exception: $${e}
131+
functionName: $${function_name}
132+
- finish:
133+
return: $${function_result}
134+

‎modules/data_warehouse/variables.tf

+65-5
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,68 @@ variable "region" {
2323
type = string
2424
description = "Google Cloud Region"
2525

26+
/**
27+
* This variable list assumes you are using the same region for both Dataform and all other assets.
28+
* If you want to deploy your Dataform respositories in a different region, set the default value
29+
* for var.dataform_region to one of the regions in the Dataform validation list.
30+
* You can then set this variable value to any of the following:
31+
* "asia-northeast3"
32+
* "asia-southeast1"
33+
* "europe-west1"
34+
* "europe-west2"
35+
* "europe-west3"
36+
* "europe-west4"
37+
* "europe-west9"
38+
* "us-central1"
39+
* "us-west4"
40+
*
41+
* Be sure to update the validation list below to include these additional values!
42+
*/
43+
44+
validation {
45+
condition = contains([
46+
"asia-southeast1",
47+
"europe-west1",
48+
"europe-west2",
49+
"europe-west3",
50+
"europe-west4",
51+
"us-central1",
52+
],
53+
var.region)
54+
error_message = "This region is not supported. Region must be one of: asia-southeast1, europe-west1, europe-west2, europe-west3, europe-west4, us-central1"
55+
}
56+
}
57+
58+
59+
variable "dataform_region" {
60+
type = string
61+
description = "Region that is used to deploy Dataform resources. This does not limit where resources can be run or what region data must be located in."
62+
default = null
63+
nullable = true
64+
2665
validation {
27-
condition = contains(["us-central1", "us-west4", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west9", "asia-northeast3", "asia-southeast1"], var.region)
28-
error_message = "This region is not supported. Region must be one of us-central1, us-west4, europe-west1, europe-west2, europe-west3, europe-west4, europe-west9, asia-northeast3, asia-southeast1."
66+
condition = anytrue([var.dataform_region == null, try(contains(
67+
[
68+
"asia-east1",
69+
"asia-northeast1",
70+
"asia-south1",
71+
"asia-southeast1",
72+
"australia-southeast1",
73+
"europe-west1",
74+
"europe-west2",
75+
"europe-west3",
76+
"europe-west4",
77+
"europe-west6",
78+
"southamerica-east1",
79+
"us-central1",
80+
"us-east1",
81+
"us-west1",
82+
], var.dataform_region), true)])
83+
error_message = "This region is not supported for Dataform. Region must be one of: asia-east1, asia-northeast1, asia-south1, asia-southeast1, australia-southeast1, europe-west1, europe-west2, europe-west3, europe-west4, europe-west6, southamerica-east1, us-central1, us-east1, us-west1."
2984
}
3085
}
3186

87+
3288
variable "text_generation_model_name" {
3389
type = string
3490
description = "Name of the BigQuery ML GenAI remote model that connects to the LLM used for text generation"
@@ -50,13 +106,17 @@ variable "enable_apis" {
50106
variable "force_destroy" {
51107
type = string
52108
description = "Whether or not to protect BigQuery resources from deletion when solution is modified or changed."
53-
default = false
109+
default = true
54110
}
55111

56112
variable "deletion_protection" {
57113
type = string
58114
description = "Whether or not to protect GCS resources from deletion when solution is modified or changed."
59-
default = true
115+
default = false
60116
}
61117

62-
118+
variable "create_ignore_service_accounts" {
119+
type = string
120+
description = "Whether or not to ignore creation of a service account if an account of the same name already exists"
121+
default = true
122+
}

‎modules/data_warehouse/versions.tf

+4
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,10 @@ terraform {
4040
source = "hashicorp/http"
4141
version = ">= 2"
4242
}
43+
local = {
44+
source = "hashicorp/local"
45+
version = ">=2.4"
46+
}
4347
}
4448
required_version = ">= 0.13"
4549

‎modules/data_warehouse/workflows.tf

+40-25
Original file line numberDiff line numberDiff line change
@@ -15,58 +15,70 @@
1515
*/
1616

1717
# Set up the Workflow
18-
# # Create the Workflows service account
19-
resource "google_service_account" "workflow_service_account" {
20-
project = module.project-services.project_id
21-
account_id = "cloud-workflow-sa-${random_id.id.hex}"
22-
display_name = "Service Account for Cloud Workflows"
18+
## Create the Workflows service account
19+
resource "google_service_account" "workflow_manage_sa" {
20+
project = module.project-services.project_id
21+
account_id = "cloud-workflow-sa-${random_id.id.hex}"
22+
display_name = "Service Account for Cloud Workflows"
23+
description = "Service account used to manage Cloud Workflows"
24+
create_ignore_already_exists = var.create_ignore_service_accounts
25+
2326

2427
depends_on = [time_sleep.wait_after_apis]
2528
}
2629

27-
# # Grant the Workflow service account access
28-
resource "google_project_iam_member" "workflow_service_account_roles" {
29-
for_each = toset([
30-
"roles/workflows.admin",
31-
"roles/run.invoker",
32-
"roles/iam.serviceAccountTokenCreator",
33-
"roles/storage.objectAdmin",
30+
## Define the IAM roles granted to the Workflows service account
31+
locals {
32+
workflow_roles = [
3433
"roles/bigquery.connectionUser",
35-
"roles/bigquery.jobUser",
3634
"roles/bigquery.dataEditor",
37-
]
38-
)
35+
"roles/bigquery.jobUser",
36+
"roles/iam.serviceAccountTokenCreator",
37+
"roles/iam.serviceAccountUser",
38+
"roles/run.invoker",
39+
"roles/storage.objectAdmin",
40+
"roles/workflows.admin",
41+
]
42+
}
43+
44+
## Grant the Workflow service account access
45+
resource "google_project_iam_member" "workflow_manage_sa_roles" {
46+
count = length(local.workflow_roles)
3947
project = module.project-services.project_id
40-
role = each.key
41-
member = "serviceAccount:${google_service_account.workflow_service_account.email}"
48+
member = "serviceAccount:${google_service_account.workflow_manage_sa.email}"
49+
role = local.workflow_roles[count.index]
4250

43-
depends_on = [google_service_account.workflow_service_account]
51+
depends_on = [google_dataform_repository_iam_member.workflow_manage_repo]
4452
}
4553

46-
# # Create the workflow
54+
## Create the workflow
4755
resource "google_workflows_workflow" "workflow" {
4856
name = "initial-workflow"
4957
project = module.project-services.project_id
5058
region = var.region
5159
description = "Runs post Terraform setup steps for Solution in Console"
52-
service_account = google_service_account.workflow_service_account.id
60+
service_account = google_service_account.workflow_manage_sa.id
5361

5462
source_contents = templatefile("${path.module}/templates/workflow.tftpl", {
55-
raw_bucket = google_storage_bucket.raw_bucket.name,
56-
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
63+
raw_bucket = google_storage_bucket.raw_bucket.name,
64+
dataset_id = google_bigquery_dataset.ds_edw.dataset_id,
65+
function_url = google_cloudfunctions2_function.notebook_deploy_function.url
66+
function_name = google_cloudfunctions2_function.notebook_deploy_function.name
5767
})
5868

5969
labels = var.labels
6070

6171
depends_on = [
62-
google_project_iam_member.workflow_service_account_roles
72+
google_project_iam_member.workflow_manage_sa_roles,
73+
time_sleep.wait_after_function
6374
]
6475
}
6576

6677
data "google_client_config" "current" {
78+
6779
}
6880

69-
# # Trigger the execution of the setup workflow
81+
## Trigger the execution of the setup workflow with an API call
7082
data "http" "call_workflows_setup" {
7183
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow.name}/executions"
7284
method = "POST"
@@ -80,6 +92,9 @@ data "http" "call_workflows_setup" {
8092
google_bigquery_routine.sproc_sp_demo_lookerstudio_report,
8193
google_bigquery_routine.sp_provision_lookup_tables,
8294
google_workflows_workflow.workflow,
83-
google_storage_bucket.raw_bucket
95+
google_storage_bucket.raw_bucket,
96+
google_cloudfunctions2_function.notebook_deploy_function,
97+
time_sleep.wait_after_function,
98+
google_service_account_iam_member.workflow_auth_function
8499
]
85100
}

‎modules/scheduled_queries/metadata.display.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.

‎modules/scheduled_queries/metadata.yaml

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ spec:
2828
version: 7.0.0
2929
actuationTool:
3030
flavor: Terraform
31-
version: ">= 0.13"
31+
version: ">= 1.3"
3232
description: {}
3333
content:
3434
examples:
@@ -47,12 +47,10 @@ spec:
4747
- name: project_id
4848
description: The project where scheduled queries are created
4949
varType: string
50-
defaultValue: null
5150
required: true
5251
- name: queries
5352
description: Data transfer configuration for creating scheduled queries
5453
varType: list(any)
55-
defaultValue: null
5654
required: true
5755
outputs:
5856
- name: query_names

‎modules/udf/metadata.display.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.

‎modules/udf/metadata.yaml

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2023 Google LLC
1+
# Copyright 2024 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ spec:
2828
version: 7.0.0
2929
actuationTool:
3030
flavor: Terraform
31-
version: ">= 0.13"
31+
version: ">= 1.3"
3232
description: {}
3333
content:
3434
examples:
@@ -51,12 +51,10 @@ spec:
5151
- name: dataset_id
5252
description: Dataset id
5353
varType: string
54-
defaultValue: null
5554
required: true
5655
- name: project_id
5756
description: Project ID that contains the dataset
5857
varType: string
59-
defaultValue: null
6058
required: true
6159
outputs:
6260
- name: added_udfs

0 commit comments

Comments
 (0)
Please sign in to comment.