Skip to content

Commit 6146404

Browse files
authored
fix: adding regional constraints and simplifying workflow execution (#284)
1 parent 4c51616 commit 6146404

File tree

7 files changed

+40
-134
lines changed

7 files changed

+40
-134
lines changed

modules/data_warehouse/README.md

-4
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,6 @@ the resources of this module:
7474
- Storage Admin: `roles/storage.admin`
7575
- BigQuery Admin: `roles/bigquery.admin`
7676
- Workflows Admin: `roles/workflows.admin`
77-
- Eventarc Admin: `roles/eventarc.admin`
78-
- Pub/Sub Admin: `roles/pubsub.admin`
7977
- Dataplex Admin: `roles/dataplex.admin`
8078

8179
The [Project Factory module](./.terraform/modules/project-services/README.md) and the
@@ -101,8 +99,6 @@ resources of this module:
10199
- Infrastructure Manager API: `config.googleapis.com`
102100
- Data Catalog API: `datacatalog.googleapis.com`
103101
- Data Lineage API: `datalineage.googleapis.com`
104-
- Eventarc API: `eventarc.googleapis.com`
105-
- Google Cloud Pub/Sub API: `pubsub.googleapis.com`
106102
- Service Usage API: `serviceusage.googleapis.com`
107103
- Google Cloud Storage API: `storage.googleapis.com`
108104
- Google Cloud Storage JSON API: `storage-api.googleapis.com`

modules/data_warehouse/bigquery.tf

+5-5
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ resource "google_project_iam_member" "bq_connection_iam_vertex_ai" {
7070
member = "serviceAccount:${google_bigquery_connection.vertex_ai_connection.cloud_resource[0].service_account_id}"
7171
}
7272

73+
# Create data tables in BigQuery
7374
# # Create a Biglake table for events with metadata caching
7475
resource "google_bigquery_table" "tbl_edw_events" {
7576
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -199,7 +200,7 @@ resource "google_bigquery_routine" "sp_provision_lookup_tables" {
199200
)
200201
}
201202

202-
# Add Looker Studio Data Report Procedure
203+
# # Add Looker Studio Data Report Procedure
203204
resource "google_bigquery_routine" "sproc_sp_demo_lookerstudio_report" {
204205
project = module.project-services.project_id
205206
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -239,7 +240,7 @@ resource "google_bigquery_routine" "sp_sample_queries" {
239240
}
240241

241242

242-
# Add Bigquery ML Model for clustering
243+
# # Add Bigquery ML Model for clustering
243244
resource "google_bigquery_routine" "sp_bigqueryml_model" {
244245
project = module.project-services.project_id
245246
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -256,7 +257,7 @@ resource "google_bigquery_routine" "sp_bigqueryml_model" {
256257
]
257258
}
258259

259-
# Create Bigquery ML Model for using text generation
260+
# # Create Bigquery ML Model for using text generation
260261
resource "google_bigquery_routine" "sp_bigqueryml_generate_create" {
261262
project = module.project-services.project_id
262263
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -273,7 +274,7 @@ resource "google_bigquery_routine" "sp_bigqueryml_generate_create" {
273274
)
274275
}
275276

276-
# Query Bigquery ML Model for describing customer clusters
277+
# # Query Bigquery ML Model for describing customer clusters
277278
resource "google_bigquery_routine" "sp_bigqueryml_generate_describe" {
278279
project = module.project-services.project_id
279280
dataset_id = google_bigquery_dataset.ds_edw.dataset_id
@@ -382,6 +383,5 @@ resource "google_bigquery_data_transfer_config" "dts_config" {
382383
google_project_iam_member.dts_roles,
383384
google_bigquery_dataset.ds_edw,
384385
google_service_account_iam_binding.dts_token_creator,
385-
time_sleep.wait_to_startfile,
386386
]
387387
}

modules/data_warehouse/main.tf

+5-123
Original file line numberDiff line numberDiff line change
@@ -37,21 +37,13 @@ module "project-services" {
3737
"config.googleapis.com",
3838
"datacatalog.googleapis.com",
3939
"datalineage.googleapis.com",
40-
"eventarc.googleapis.com",
41-
"pubsub.googleapis.com",
4240
"serviceusage.googleapis.com",
4341
"storage.googleapis.com",
4442
"storage-api.googleapis.com",
4543
"workflows.googleapis.com",
4644
]
4745

4846
activate_api_identities = [
49-
{
50-
api = "pubsub.googleapis.com"
51-
roles = [
52-
"roles/iam.serviceAccountTokenCreator",
53-
]
54-
},
5547
{
5648
api = "workflows.googleapis.com"
5749
roles = [
@@ -61,18 +53,18 @@ module "project-services" {
6153
]
6254
}
6355

56+
# Wait after APIs are enabled to give time for them to spin up
6457
resource "time_sleep" "wait_after_apis" {
6558
create_duration = "90s"
6659
depends_on = [module.project-services]
6760
}
6861

69-
// Create random ID to be used for deployment uniqueness
62+
# Create random ID to be used for deployment uniqueness
7063
resource "random_id" "id" {
7164
byte_length = 4
7265
}
7366

7467
# Set up Storage Buckets
75-
7668
# # Set up the raw storage bucket
7769
resource "google_storage_bucket" "raw_bucket" {
7870
name = "ds-edw-raw-${random_id.id.hex}"
@@ -88,120 +80,10 @@ resource "google_storage_bucket" "raw_bucket" {
8880
labels = var.labels
8981
}
9082

91-
# # Set up the provisioning storage bucket
92-
resource "google_storage_bucket" "provisioning_bucket" {
93-
name = "ds-edw-provisioner-${random_id.id.hex}"
94-
project = module.project-services.project_id
95-
location = var.region
96-
uniform_bucket_level_access = true
97-
force_destroy = var.force_destroy
98-
99-
public_access_prevention = "enforced"
100-
101-
depends_on = [time_sleep.wait_after_apis]
102-
103-
labels = var.labels
104-
}
105-
106-
// Create Eventarc Trigger
107-
# # Create a Pub/Sub topic.
108-
resource "google_pubsub_topic" "topic" {
109-
name = "provisioning-topic"
110-
project = module.project-services.project_id
111-
112-
depends_on = [time_sleep.wait_after_apis]
113-
114-
labels = var.labels
115-
}
116-
117-
resource "google_pubsub_topic_iam_binding" "binding" {
118-
project = module.project-services.project_id
119-
topic = google_pubsub_topic.topic.id
120-
role = "roles/pubsub.publisher"
121-
members = ["serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}"]
122-
}
123-
124-
# # Get the GCS service account to trigger the pub/sub notification
125-
data "google_storage_project_service_account" "gcs_account" {
126-
project = module.project-services.project_id
127-
128-
depends_on = [time_sleep.wait_after_apis]
129-
}
130-
131-
# # Create the Storage trigger
132-
resource "google_storage_notification" "notification" {
133-
provider = google
134-
bucket = google_storage_bucket.provisioning_bucket.name
135-
payload_format = "JSON_API_V1"
136-
topic = google_pubsub_topic.topic.id
137-
depends_on = [
138-
google_pubsub_topic_iam_binding.binding,
139-
]
140-
}
141-
142-
# # Create the Eventarc trigger
143-
resource "google_eventarc_trigger" "trigger_pubsub_tf" {
144-
project = module.project-services.project_id
145-
name = "trigger-pubsub-tf"
146-
location = var.region
147-
matching_criteria {
148-
attribute = "type"
149-
value = "google.cloud.pubsub.topic.v1.messagePublished"
150-
151-
}
152-
destination {
153-
workflow = google_workflows_workflow.workflow.id
154-
}
155-
156-
transport {
157-
pubsub {
158-
topic = google_pubsub_topic.topic.id
159-
}
160-
}
161-
service_account = google_service_account.eventarc_service_account.email
162-
163-
labels = var.labels
164-
165-
depends_on = [
166-
google_project_iam_member.eventarc_service_account_invoke_role,
167-
]
168-
}
169-
170-
# Set up Eventarc service account for the Trigger to execute as
171-
# # Set up the Eventarc service account
172-
resource "google_service_account" "eventarc_service_account" {
173-
project = module.project-services.project_id
174-
account_id = "eventarc-sa-${random_id.id.hex}"
175-
display_name = "Service Account for Cloud Eventarc"
176-
177-
depends_on = [time_sleep.wait_after_apis]
178-
}
179-
180-
# # Grant the Eventarc service account Workflow Invoker Access
181-
resource "google_project_iam_member" "eventarc_service_account_invoke_role" {
182-
project = module.project-services.project_id
183-
role = "roles/workflows.invoker"
184-
member = "serviceAccount:${google_service_account.eventarc_service_account.email}"
185-
}
186-
187-
// Sleep for 120 seconds to drop start file
188-
resource "time_sleep" "wait_to_startfile" {
189-
depends_on = [
190-
google_storage_notification.notification,
191-
google_eventarc_trigger.trigger_pubsub_tf,
192-
google_workflows_workflow.workflow
193-
]
194-
83+
# Sleep for 120 seconds to allow the workflow to execute and finish setup
84+
resource "time_sleep" "wait_after_workflow_execution" {
19585
create_duration = "120s"
196-
}
197-
198-
// Drop start file for workflow to execute
199-
resource "google_storage_bucket_object" "startfile" {
200-
bucket = google_storage_bucket.provisioning_bucket.name
201-
name = "startfile"
202-
source = "${path.module}/src/startfile"
203-
20486
depends_on = [
205-
time_sleep.wait_to_startfile
87+
data.http.call_workflows_setup,
20688
]
20789
}

modules/data_warehouse/src/startfile

-1
This file was deleted.

modules/data_warehouse/variables.tf

+5
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,11 @@ variable "project_id" {
2222
variable "region" {
2323
type = string
2424
description = "Google Cloud Region"
25+
26+
validation {
27+
condition = contains(["us-central1", "us-west4", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west9", "asia-northeast3", "asia-southeast1"], var.region)
28+
error_message = "This region is not supported. Region must be one of us-central1, us-west4, europe-west1, europe-west2, europe-west3, europe-west4, europe-west9, asia-northeast3, asia-southeast1."
29+
}
2530
}
2631

2732
variable "text_generation_model_name" {

modules/data_warehouse/versions.tf

+4
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,10 @@ terraform {
3636
source = "hashicorp/time"
3737
version = ">= 0.9.1"
3838
}
39+
http = {
40+
source = "hashicorp/http"
41+
version = ">= 2"
42+
}
3943
}
4044
required_version = ">= 0.13"
4145

modules/data_warehouse/workflows.tf

+21-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@
1414
* limitations under the License.
1515
*/
1616

17-
# Set up the Workflows service account
17+
# Set up the Workflow
18+
# # Create the Workflows service account
1819
resource "google_service_account" "workflow_service_account" {
1920
project = module.project-services.project_id
2021
account_id = "cloud-workflow-sa-${random_id.id.hex}"
@@ -57,3 +58,22 @@ resource "google_workflows_workflow" "workflow" {
5758
google_project_iam_member.workflow_service_account_roles,
5859
]
5960
}
61+
62+
data "google_client_config" "current" {
63+
}
64+
65+
# # Trigger the execution of the setup workflow
66+
data "http" "call_workflows_setup" {
67+
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow.name}/executions"
68+
method = "POST"
69+
request_headers = {
70+
Accept = "application/json"
71+
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
72+
depends_on = [
73+
google_storage_bucket.raw_bucket,
74+
google_bigquery_routine.sp_bigqueryml_generate_create,
75+
google_bigquery_routine.sp_bigqueryml_model,
76+
google_bigquery_routine.sproc_sp_demo_lookerstudio_report,
77+
google_bigquery_routine.sp_provision_lookup_tables
78+
]
79+
}

0 commit comments

Comments
 (0)