From acfaf03fa1fd14a3cf19f52cef78eceb8f2fb654 Mon Sep 17 00:00:00 2001 From: JoshLarouche Date: Fri, 2 Feb 2024 14:32:17 -0800 Subject: [PATCH 1/3] chore: added terraform migration files and .ignore files --- .gitignore | 2 + helm/cas-ciip-portal/.helmignore | 8 ++ .../templates/jobs/terraform-apply.yaml | 77 +++++++++++++++++++ .../templates/jobs/terraform-modules.yaml | 14 ++++ .../jobs/terraform-service-account.yaml | 35 +++++++++ helm/cas-ciip-portal/values.yaml | 14 ++++ 6 files changed, 150 insertions(+) create mode 100644 helm/cas-ciip-portal/templates/jobs/terraform-apply.yaml create mode 100644 helm/cas-ciip-portal/templates/jobs/terraform-modules.yaml create mode 100644 helm/cas-ciip-portal/templates/jobs/terraform-service-account.yaml diff --git a/.gitignore b/.gitignore index 37bd08dd8d..b10da452aa 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,5 @@ app/.env.local app/sentry.properties app/tests/perf/**/*.json k6files/ + +.terraform diff --git a/helm/cas-ciip-portal/.helmignore b/helm/cas-ciip-portal/.helmignore index 0e8a0eb36f..7ee5f7eb1e 100644 --- a/helm/cas-ciip-portal/.helmignore +++ b/helm/cas-ciip-portal/.helmignore @@ -21,3 +21,11 @@ .idea/ *.tmproj .vscode/ + +# Terraform files from local and migration runs +.terraform/ +*.tfstate +*.tfvars +.terraform.lock.hcl +credentials.json +terraform/*.sh diff --git a/helm/cas-ciip-portal/templates/jobs/terraform-apply.yaml b/helm/cas-ciip-portal/templates/jobs/terraform-apply.yaml new file mode 100644 index 0000000000..1cec12f4ca --- /dev/null +++ b/helm/cas-ciip-portal/templates/jobs/terraform-apply.yaml @@ -0,0 +1,77 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: terraform-apply + labels: + component: backend + namespace: "{{ .Release.Namespace }}" + annotations: + "helm.sh/hook": pre-install, pre-upgrade +spec: + backoffLimit: 0 + activeDeadlineSeconds: 900 + template: + spec: + serviceAccountName: "terraform-kubernetes-service-account" + containers: + - name: terraform-apply + resources: "{{ toYaml .Values.devops.resources | nindent 12 }}" + image: "{{ .Values.devops.image.repository }}:{{ .Values.devops.sourceRepoImageTag | default .Values.devops.image.tag }}" + imagePullPolicy: "{{ .Values.devops.image.pullPolicy }}" + volumeMounts: + - mountPath: /etc/gcp + name: service-account-credentials-volume + readOnly: True + - mountPath: /etc/tf + name: terraform-backend-config-volume + readOnly: True + - name: tf-working-dir + mountPath: /working + readOnly: False + - name: terraform-modules + mountPath: /terraform + readOnly: False + env: + - name: TF_VAR_project_id + valueFrom: + secretKeyRef: + name: gcp-credentials-secret + key: gcp_project_id + - name: TF_VAR_openshift_namespace + value: "{{ .Release.Namespace | quote }}" + - name: TF_VAR_apps + value: '["ciip-backups", "ciip-documents", "ciip-2018"]' + - name: kubernetes_host + value: "https://api.silver.devops.gov.bc.ca:6443" + - name: GOOGLE_APPLICATION_CREDENTIALS + value: "/etc/gcp/credentials.json" + # Terraform was having an issue pulling kubernetes_host in as a TF_VAR, so we add it as a attribute to the command + command: + - /bin/sh + - -c + - | + set -euo pipefail; + cp -r /terraform/. /working; + cd working; + export TF_VAR_kubernetes_token=$( cat /var/run/secrets/kubernetes.io/serviceaccount/token ); + terraform init -backend-config=/etc/tf/gcs.tfbackend; + terraform apply -var=\"kubernetes_host=$kubernetes_host\" -auto-approve"; + restartPolicy: Never + volumes: + - name: service-account-credentials-volume + secret: + secretName: gcp-credentials-secret # pragma: allowlist secret + items: + - key: sa_json + path: credentials.json + - name: terraform-backend-config-volume + secret: + secretName: gcp-credentials-secret # pragma: allowlist secret + items: + - key: tf_backend + path: gcs.tfbackend + - name: tf-working-dir + emptyDir: {} + - name: terraform-modules + configMap: + name: terraform-modules diff --git a/helm/cas-ciip-portal/templates/jobs/terraform-modules.yaml b/helm/cas-ciip-portal/templates/jobs/terraform-modules.yaml new file mode 100644 index 0000000000..90346a7ad1 --- /dev/null +++ b/helm/cas-ciip-portal/templates/jobs/terraform-modules.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: terraform-modules + namespace: {{ .Release.Namespace }} + # Because terraform-apply.yaml is pre-install, pre-upgrade, this configmap needs to be in place before it + annotations: + "helm.sh/hook": pre-install, pre-upgrade + "helm.sh/hook-weight": "-10" +binaryData: +{{- range $path, $data := .Files.Glob "terraform/**.tf" }} +{{ $path | base | indent 2 }}: >- +{{- $data | toString | b64enc | nindent 4 }} +{{ end }} diff --git a/helm/cas-ciip-portal/templates/jobs/terraform-service-account.yaml b/helm/cas-ciip-portal/templates/jobs/terraform-service-account.yaml new file mode 100644 index 0000000000..663ab91770 --- /dev/null +++ b/helm/cas-ciip-portal/templates/jobs/terraform-service-account.yaml @@ -0,0 +1,35 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: "terraform-secret-admin" + namespace: {{ .Release.Namespace }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade +rules: +- apiGroups: [""] + resources: ["secrets"] + verbs: ["create", "delete", "deletecollection", "get", "list", "patch", "update", "watch"] +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: "terraform-kubernetes-service-account" + namespace: {{ .Release.Namespace }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "terraform-kubernetes-service-account-secret-admin-binding" + namespace: {{ .Release.Namespace }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: "terraform-secret-admin" +subjects: +- kind: ServiceAccount + name: "terraform-kubernetes-service-account" + namespace: {{ .Release.Namespace }} diff --git a/helm/cas-ciip-portal/values.yaml b/helm/cas-ciip-portal/values.yaml index 2920d4c1d9..23178ede90 100644 --- a/helm/cas-ciip-portal/values.yaml +++ b/helm/cas-ciip-portal/values.yaml @@ -168,3 +168,17 @@ nginx-sidecar: storageClassName: netapp-file-standard renewalDays: 60 clientMaxBodySize: 50M + +devops: + image: + repository: hashicorp/terraform + pullPolicy: Always + tag: "1.4.6" + + resources: + limits: + cpu: 1000m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi From a551a55343a8c5a19fdaa2f306c1c75f330d8ea3 Mon Sep 17 00:00:00 2001 From: JoshLarouche Date: Fri, 2 Feb 2024 14:36:37 -0800 Subject: [PATCH 2/3] chore: adding terraform scripts --- helm/cas-ciip-portal/terraform/main.tf | 99 +++++++++++++++++++++ helm/cas-ciip-portal/terraform/variables.tf | 33 +++++++ 2 files changed, 132 insertions(+) create mode 100644 helm/cas-ciip-portal/terraform/main.tf create mode 100644 helm/cas-ciip-portal/terraform/variables.tf diff --git a/helm/cas-ciip-portal/terraform/main.tf b/helm/cas-ciip-portal/terraform/main.tf new file mode 100644 index 0000000000..2629873e51 --- /dev/null +++ b/helm/cas-ciip-portal/terraform/main.tf @@ -0,0 +1,99 @@ +terraform { + required_version = ">=1.4.6" + + required_providers { + kubernetes = { + source = "hashicorp/kubernetes" + version = "~> 2.23" + } + google = { + source = "hashicorp/google" + version = "~> 5.2.0" + } + } + + backend "gcs" {} +} + +# Configure OCP infrastructure to setup the host and authentication token +provider "kubernetes" { + host = var.kubernetes_host + token = var.kubernetes_token +} + +# Configure GCP infrastructure to setup the credentials, default project and location (zone and/or region) for your resources +provider "google" { + project = var.project_id + region = local.region + credentials = "/Users/jolarouc/Repositories/cas-cif/.scratch/credentials.json" +} + +# Create GCS buckets +resource "google_storage_bucket" "bucket" { + for_each = { for v in var.apps : v => v } + name = "${var.openshift_namespace}-${each.value}" + location = local.region +} + +# Create GCP service accounts for each GCS bucket +resource "google_service_account" "account" { + for_each = { for v in var.apps : v => v } + account_id = "sa-${var.openshift_namespace}-${each.value}" + display_name = "${var.openshift_namespace}-${each.value} Service Account" + depends_on = [google_storage_bucket.bucket] +} + +# Assign Storage Admin role for the corresponding service accounts +resource "google_storage_bucket_iam_member" "admin" { + for_each = { for v in var.apps : v => v } + bucket = "${var.openshift_namespace}-${each.value}" + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.account[each.key].email}" + depends_on = [google_service_account.account] +} + +# Create viewer GCP service accounts for each GCS bucket +resource "google_service_account" "viewer_account" { + for_each = { for v in var.apps : v => v } + account_id = "ro-${var.openshift_namespace}-${each.value}" + display_name = "${var.openshift_namespace}-${each.value} Viewer Service Account" + depends_on = [google_storage_bucket.bucket] +} + +# Assign (manually created) Storage Viewer role for the corresponding service accounts +resource "google_storage_bucket_iam_member" "viewer" { + for_each = { for v in var.apps : v => v } + bucket = "${var.openshift_namespace}-${each.value}" + role = "projects/${var.project_id}/roles/${var.iam_storage_role_template_id}" + member = "serviceAccount:${google_service_account.viewer_account[each.key].email}" + depends_on = [google_service_account.viewer_account] +} + +# Create keys for the service accounts +resource "google_service_account_key" "key" { + for_each = { for v in var.apps : v => v } + service_account_id = google_service_account.account[each.key].name +} + +# Create keys for the viewer service accounts +resource "google_service_account_key" "viewer_key" { + for_each = { for v in var.apps : v => v } + service_account_id = google_service_account.viewer_account[each.key].name +} + +resource "kubernetes_secret" "secret_sa" { + for_each = { for v in var.apps : v => v } + metadata { + name = "gcp-${var.openshift_namespace}-${each.value}-service-account-key" + namespace = var.openshift_namespace + labels = { + created-by = "Terraform" + } + } + + data = { + "bucket_name" = "${var.openshift_namespace}-${each.value}" + "credentials.json" = base64decode(google_service_account_key.key[each.key].private_key) + "viewer_credentials.json" = base64decode(google_service_account_key.viewer_key[each.key].private_key) + } +} diff --git a/helm/cas-ciip-portal/terraform/variables.tf b/helm/cas-ciip-portal/terraform/variables.tf new file mode 100644 index 0000000000..d1919e69a1 --- /dev/null +++ b/helm/cas-ciip-portal/terraform/variables.tf @@ -0,0 +1,33 @@ +# Since variables could be overridden via environment variables, use local values to define immutable values +locals { + # The GCP region to create things in. https://cloud.google.com/compute/docs/regions-zones" + region = "northamerica-northeast1" # Montreal +} + +variable "project_id" { + description = "The ID of the GCP project" +} + +variable "kubernetes_host" { + description = "The hostname of the OCP cluster" +} + +variable "kubernetes_token" { + description = "The authentication token of the OCP cluster" +} + +variable "apps" { + type = list(string) + description = "The list of app names for the OCP project in a namespace" +} + +variable "openshift_namespace" { + type = string + description = "The OCP project namespace" +} + +variable "iam_storage_role_template_id" { + type = string + description = "ID for a custom IAM role template we manually created in GCP for Storage Viewers" + default = "casStorageViewer" +} From dbe0d37c180dfdc09b6cdbb28cb1189cb326e106 Mon Sep 17 00:00:00 2001 From: Josh Gamache Date: Wed, 7 Feb 2024 09:34:30 -0700 Subject: [PATCH 3/3] fix: add storage limits to tf pod to satisfy sonarcloud --- helm/cas-ciip-portal/values.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/helm/cas-ciip-portal/values.yaml b/helm/cas-ciip-portal/values.yaml index 23178ede90..68e5bee218 100644 --- a/helm/cas-ciip-portal/values.yaml +++ b/helm/cas-ciip-portal/values.yaml @@ -179,6 +179,8 @@ devops: limits: cpu: 1000m memory: 512Mi + ephemeral-storage: 1Gi requests: cpu: 100m memory: 64Mi + ephemeral-storage: 256Mi