@@ -19,6 +19,19 @@ resource "time_static" "snapshot_time_static" {}
1919locals {
2020 rootFs = " ${ path . module } /rootfs"
2121 additional_rootfs = var. rootfs != " " ? var. rootfs : " "
22+ # Input example: "https://github.com/my-org/my-repo.git"
23+ # This regex removes everything from the start up to the first single slash (after the protocol)
24+ repo_path = replace (var. docker_compose_repo , " /^[^:]+://[^/]+/" , " " )
25+
26+ # base the docker compose project name on the repo + branch
27+ clean_repo_path = replace (format (" %s-%s" , trim (local. repo_path , " /" ), var. docker_compose_branch ), " /^[^:]+://[^/]+/" , " " )
28+ compose_project_name = lower (
29+ replace (
30+ replace (local. clean_repo_path , " .git" , " " ),
31+ " /[^a-zA-Z0-9]/" ,
32+ " -"
33+ )
34+ )
2235
2336 # Get files from base rootfs
2437 base_files = fileset (local. rootFs , " **" )
@@ -41,7 +54,7 @@ locals {
4154EOT
4255 ])
4356 docker_compose_scripts = join (" \n " , [
44- for name , cmd in {
57+ for name , cmds in {
4558 " init" = var . docker_compose_init
4659 " up" = var . docker_compose_up
4760 " down" = var . docker_compose_down
5366
5467 set -eou pipefail
5568
69+ source /home/cloud-compose/profile.sh
70+ pushd "$${DOCKER_COMPOSE_DIR}"
71+
5672 echo "Running docker compose ${ name } "
57- ${ cmd }
73+ ${ join (" \n " , cmds)}
74+ popd
5875EOT
5976 ])
6077 env_file_content = <<- EOT
6784 GCP_INSTANCE_NAME="${ var . name } "
6885 GCP_REGION="${ var . region } "
6986 GCP_ZONE="${ var . zone } "
70- DOCKER_COMPOSE_DIR=/mnt/disks/data/compose
87+ COMPOSE_PROJECT_NAME=${ replace (local. compose_project_name , " /-+/" , " -" )}
88+ DOCKER_COMPOSE_DIR=/mnt/disks/data${ local . repo_path } /${ var . docker_compose_branch }
7189 DOCKER_COMPOSE_REPO="${ var . docker_compose_repo } "
7290 DOCKER_COMPOSE_BRANCH="${ var . docker_compose_branch } "
7391EOT
@@ -134,7 +152,7 @@ resource "google_compute_disk" "boot" {
134152 # force re-create VM when cloud-init changes
135153 name = format (" %s-boot-%s" , var. name , md5 (data. cloudinit_config . ci . rendered ))
136154 project = var. project_id
137- type = " hyperdisk-balanced "
155+ type = var . disk_type
138156 zone = var. zone
139157 size = 15
140158 image = " projects/cos-cloud/global/images/${ var . os } "
@@ -144,7 +162,7 @@ resource "google_compute_disk" "boot" {
144162resource "google_compute_disk" "data" {
145163 name = format (" %s-data-disk" , var. name )
146164 project = var. project_id
147- type = " hyperdisk-balanced "
165+ type = var . disk_type
148166 zone = var. zone
149167 size = 20
150168 image = " debian-13-trixie-v20251111"
@@ -154,14 +172,13 @@ resource "google_compute_disk" "data" {
154172resource "google_compute_disk" "docker-volumes" {
155173 name = format (" %s-docker-volumes" , var. name )
156174 project = var. project_id
157- type = " hyperdisk-balanced "
175+ type = var . disk_type
158176 zone = var. zone
159177 size = var. disk_size_gb
160178 image = " debian-13-trixie-v20251111"
161179 physical_block_size_bytes = 4096
162180}
163181
164-
165182# Daily snapshot schedule for production docker volume disk
166183resource "google_compute_resource_policy" "daily_snapshot" {
167184 count = var. run_snapshots ? 1 : 0
@@ -192,13 +209,6 @@ resource "google_compute_resource_policy" "daily_snapshot" {
192209 }
193210 }
194211}
195- resource "google_compute_disk_resource_policy_attachment" "daily_snapshot" {
196- count = var. run_snapshots ? 1 : 0
197- name = google_compute_resource_policy. daily_snapshot [0 ]. name
198- disk = google_compute_disk. docker-volumes . name
199- project = var. project_id
200- zone = var. zone
201- }
202212
203213resource "google_compute_resource_policy" "weekly_snapshot" {
204214 count = var. run_snapshots ? 1 : 0
@@ -228,10 +238,26 @@ resource "google_compute_resource_policy" "weekly_snapshot" {
228238 }
229239}
230240
241+ resource "google_compute_disk_resource_policy_attachment" "daily_snapshot" {
242+ for_each = var. run_snapshots ? toset ([
243+ google_compute_disk . docker-volumes . name ,
244+ google_compute_disk . data . name
245+ ]) : []
246+
247+ name = google_compute_resource_policy. daily_snapshot [0 ]. name
248+ disk = each. value
249+ project = var. project_id
250+ zone = var. zone
251+ }
252+
231253resource "google_compute_disk_resource_policy_attachment" "weekly_snapshot" {
232- count = var. run_snapshots ? 1 : 0
254+ for_each = var. run_snapshots ? toset ([
255+ google_compute_disk . docker-volumes . name ,
256+ google_compute_disk . data . name
257+ ]) : []
258+
233259 name = google_compute_resource_policy. weekly_snapshot [0 ]. name
234- disk = google_compute_disk . docker-volumes . name
260+ disk = each . value
235261 project = var. project_id
236262 zone = var. zone
237263}
@@ -251,7 +277,7 @@ resource "google_compute_disk" "overlay_disk" {
251277 count = local. use_overlay ? 1 : 0
252278 name = data. google_compute_snapshot . latest_prod [0 ]. name
253279 project = var. project_id
254- type = " hyperdisk-balanced "
280+ type = var . disk_type
255281 zone = var. zone
256282 snapshot = data. google_compute_snapshot . latest_prod [0 ]. self_link
257283 physical_block_size_bytes = 4096
@@ -322,10 +348,7 @@ resource "google_compute_instance" "cloud-compose" {
322348 service_account {
323349 email = google_service_account. cloud-compose . email
324350 scopes = [
325- " https://www.googleapis.com/auth/logging.write" ,
326- " https://www.googleapis.com/auth/monitoring.write" ,
327- " https://www.googleapis.com/auth/devstorage.read_only" ,
328- " https://www.googleapis.com/auth/iam" ,
351+ " https://www.googleapis.com/auth/cloud-platform"
329352 ]
330353 }
331354
@@ -335,12 +358,18 @@ resource "google_compute_instance" "cloud-compose" {
335358 enable_vtpm = " true"
336359 }
337360
338- depends_on = [google_compute_disk . overlay_disk ]
339361 lifecycle {
340- replace_triggered_by = [
341- google_compute_disk . overlay_disk
342- ]
362+ precondition {
363+ condition = (
364+ startswith (var. machine_type , " e2" ) ?
365+ contains ([" pd-ssd" , " pd-standard" ], var. disk_type ) :
366+ true
367+ )
368+ error_message = " When using an 'e2' machine type, 'disk_type' must be 'pd-ssd' or 'pd-standard'."
369+ }
343370 }
371+
372+ depends_on = [google_compute_disk . overlay_disk ]
344373}
345374
346375# machine needs to be able to suspend itself
@@ -394,6 +423,12 @@ resource "google_service_account_iam_member" "app-keys" {
394423 member = " serviceAccount:${ google_service_account . cloud-compose . email } "
395424}
396425
426+ resource "google_service_account_iam_member" "self_jwt_signer_policy" {
427+ service_account_id = google_service_account. app . id
428+ role = " roles/iam.serviceAccountTokenCreator"
429+ member = format (" serviceAccount:%s" , google_service_account. app . email )
430+ }
431+
397432# =============================================================================
398433# CLOUD RUN INGRESS
399434# =============================================================================
@@ -486,3 +521,36 @@ resource "google_project_iam_member" "gce-start" {
486521 role = data. google_project_iam_custom_role . gce-start . name
487522 member = " serviceAccount:${ google_service_account . ppb . email } "
488523}
524+
525+ resource "google_compute_firewall" "allow_ssh_ipv4" {
526+ project = var. project_id
527+ name = format (" allow-ssh-ipv4-%s" , var. name )
528+ network = " default"
529+ priority = 10
530+ direction = " INGRESS"
531+
532+ allow {
533+ protocol = " tcp"
534+ ports = [" 22" ]
535+ }
536+ target_tags = [var . name ]
537+
538+ source_ranges = length (var. allowed_ssh_ipv4 ) > 0 ? var. allowed_ssh_ipv4 : [" 127.0.0.1/32" ]
539+ }
540+
541+ resource "google_compute_firewall" "allow_ssh_ipv6" {
542+ project = var. project_id
543+ name = format (" allow-ssh-ipv6-%s" , var. name )
544+ network = " default"
545+ priority = 10
546+ direction = " INGRESS"
547+
548+ allow {
549+ protocol = " tcp"
550+ ports = [" 22" ]
551+ }
552+
553+ target_tags = [var . name ]
554+
555+ source_ranges = length (var. allowed_ssh_ipv6 ) > 0 ? var. allowed_ssh_ipv6 : [" 127.0.0.1/32" ]
556+ }
0 commit comments