From 116c6833008631f12ae7327b4e38a3c3e6692869 Mon Sep 17 00:00:00 2001 From: Max Date: Tue, 11 Feb 2025 21:18:55 -0500 Subject: [PATCH] fix: restore command now seperated --- Taskfile.yml | 8 ++++++-- playbooks/build.yml | 3 ++- playbooks/deploy.yml | 15 +++++++++------ playbooks/restore.yml | 32 ++++++++++++++++++++++++-------- terraform/Taskfile.yml | 15 ++++++++++----- terraform/iam.tf | 4 ++-- terraform/install.sh | 4 +++- terraform/main.tf | 15 ++++++++++++--- terraform/network.tf | 2 +- terraform/output.tf | 10 +++++----- terraform/variables.tf | 6 +++--- 11 files changed, 77 insertions(+), 37 deletions(-) diff --git a/Taskfile.yml b/Taskfile.yml index 4b5ea57..d6313fb 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -7,9 +7,10 @@ includes: tasks: dev: docker compose -f compose.dev.yml up --build --force-recreate --no-deps + build: ansible-playbook playbooks/build.yml deploy: ansible-playbook playbooks/deploy.yml - restore: ansible-playbook playbooks/restore.yml + restore: ansible-playbook playbooks/restore.yml -e "restore_bucket={{.BUCKET}} restore_key={{.KEY}}" run: - task: build - task: deploy @@ -17,7 +18,10 @@ tasks: enter: cmd: aws ssm start-session --target $INSTANCE_ID env: - INSTANCE_ID: { sh: jq -r .instance_id.value < secrets.tf.json } + INSTANCE_ID: { sh: jq -r .instance_id.value < config/infrastructure.secret.tf.json } + AWS_REGION: { sh: jq -r .aws_region < config/ansible.secret.json } + AWS_ACCESS_KEY_ID: { sh: jq -r .aws_access_key < config/ansible.secret.json } + AWS_SECRET_ACCESS_KEY: { sh: jq -r .aws_secret_key < config/ansible.secret.json } push: dir: gitea diff --git a/playbooks/build.yml b/playbooks/build.yml index 0fbd933..4a35e37 100644 --- a/playbooks/build.yml +++ b/playbooks/build.yml @@ -1,6 +1,7 @@ - name: Make build artifact. hosts: localhost - vars_files: ../config/ansible.json + vars_files: ../config/ansible.secret.json + gather_facts: false tasks: - name: Build image. community.docker.docker_image_build: diff --git a/playbooks/deploy.yml b/playbooks/deploy.yml index a731ea6..d366242 100644 --- a/playbooks/deploy.yml +++ b/playbooks/deploy.yml @@ -1,9 +1,10 @@ - name: Deploy artifact to instance. hosts: localhost become: true + gather_facts: false vars_files: - - ../config/ansible.json - - ../config/infrastructure.json + - ../config/ansible.secret.json + - ../config/infrastructure.secret.tf.json vars: ansible_connection: aws_ssm ansible_python_interpreter: /usr/bin/python3 @@ -26,15 +27,17 @@ access_key: "{{ aws_access_key }}" secret_key: "{{ aws_secret_key }}" + - name: Create data directory. + ansible.builtin.file: + path: /home/ssm-user/data + state: directory + mode: '0777' + - name: Load image. community.docker.docker_image_load: path: /root/image.tar.gz register: image - - name: Create a volume. - community.docker.docker_volume: - name: data - - name: Run image. community.docker.docker_container: name: server diff --git a/playbooks/restore.yml b/playbooks/restore.yml index 4a15e0e..79c8eb1 100644 --- a/playbooks/restore.yml +++ b/playbooks/restore.yml @@ -1,9 +1,10 @@ - name: Deploy artifact to instance. hosts: localhost become: true + gather_facts: false vars_files: - - ../config/ansible.json - - ../config/infrastructure.json + - ../config/ansible.secret.json + - ../config/infrastructure.secret.tf.json vars: ansible_connection: aws_ssm ansible_python_interpreter: /usr/bin/python3 @@ -27,23 +28,38 @@ access_key: "{{ boot_id.value }}" secret_key: "{{ boot_secret.value }}" amazon.aws.s3_object: - bucket: "{{ boot_bucket }}" - object: "{{ boot_key }}" - dest: /home/ssm-user/backup.tar.xz + bucket: "{{ restore_bucket | mandatory(msg='You must specify the bucket of the data.') }}" + object: "{{ restore_key | mandatory(msg='You must specify the key of the data.') }}" + dest: /home/ssm-user/backup.tar.gz mode: get - name: Ensure backup directory exists. ansible.builtin.file: - path: /home/ssm-user/data + path: /home/ssm-user/backup state: directory mode: '0777' - name: Extract backup. ansible.builtin.unarchive: - src: /home/ssm-user/backup.tar.xz - dest: /home/ssm-user/data + src: /home/ssm-user/backup.tar.gz + dest: /home/ssm-user/backup remote_src: true + - name: Move backup files to data folder. + ansible.builtin.copy: + remote_src: true + src: /home/ssm-user/backup/backup/my-app-backup/ + dest: /home/ssm-user/data/ + mode: '0777' + + - name: Update permissions. + ansible.builtin.file: + path: /home/ssm-user/data + recurse: true + mode: '0777' + owner: 1000 + group: 1000 + - name: Restart containers. community.docker.docker_container: name: "{{ item }}" diff --git a/terraform/Taskfile.yml b/terraform/Taskfile.yml index 994f0a1..8529653 100644 --- a/terraform/Taskfile.yml +++ b/terraform/Taskfile.yml @@ -1,13 +1,18 @@ version: 3 silent: true +vars: + BACKEND: ../config/backend.secret.tf.json + VARIABLES: ../config/variables.secret.tf.json + OUTPUT: ../config/infrastructure.secret.tf.json + tasks: - init: terraform init -backend-config=../config/backend.tf.json - plan: terraform plan -var-file=../config/variables.tf.json + init: terraform init -backend-config={{.BACKEND}} + plan: terraform plan -var-file={{.VARIABLES}} destroy: terraform destroy format: terraform fmt -recursive - out: terraform output -json > ../config/infrastructure.tf.json + out: terraform output -json > {{.OUTPUT}} apply: - - terraform apply -var-file=../config/variables.tf.json + - terraform apply -var-file={{.VARIABLES}} - task: out - import: terraform import -var-file=../config/variables.tf.json {{.CLI_ARGS}} \ No newline at end of file + import: terraform import -var-file={{.VARIABLES}} {{.CLI_ARGS}} \ No newline at end of file diff --git a/terraform/iam.tf b/terraform/iam.tf index 57cdc1d..eba76dc 100644 --- a/terraform/iam.tf +++ b/terraform/iam.tf @@ -4,8 +4,8 @@ data "aws_s3_bucket" "storage_bucket" { data "aws_iam_policy_document" "boot" { statement { - effect = "Allow" - actions = ["s3:*", "s3-object-lambda:*"] + effect = "Allow" + actions = ["s3:*", "s3-object-lambda:*"] resources = [ "${data.aws_s3_bucket.storage_bucket.arn}/${var.boot_key}", "${data.aws_s3_bucket.storage_bucket.arn}/${var.boot_key}/*", diff --git a/terraform/install.sh b/terraform/install.sh index 61411b2..bcb2efd 100755 --- a/terraform/install.sh +++ b/terraform/install.sh @@ -6,7 +6,6 @@ amazon-linux-extras install docker ansible2 python3.8 -y # Make Docker work. systemctl enable docker systemctl start docker -sudo usermod -aG docker ssm-user # Set up the correct version of Python (for Ansible). ln -sf /usr/bin/python3.8 /usr/bin/python3 @@ -26,3 +25,6 @@ service sshd stop # Install Docker Compose. curl -L "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose + +# ERROR: SSM User not created yet. +sudo usermod -aG docker ssm-user diff --git a/terraform/main.tf b/terraform/main.tf index 1f61477..6b4a5e3 100644 --- a/terraform/main.tf +++ b/terraform/main.tf @@ -1,6 +1,6 @@ # An elastic IP, so if the reverse proxy is modified, the route tables won't. resource "aws_eip" "public" { - instance = aws_instance.gitea.id + instance = aws_instance.this.id domain = "vpc" } @@ -9,17 +9,21 @@ data "aws_iam_instance_profile" "ssm" { } # The Gitea instance. -resource "aws_instance" "gitea" { +resource "aws_instance" "this" { # ami = data.aws_ami.amazon-linux-2.id ami = "ami-0adec96dc0cdc7bca" instance_type = "t4g.nano" subnet_id = module.vpc.public_subnets[0] user_data = file("install.sh") - user_data_replace_on_change = false + user_data_replace_on_change = true iam_instance_profile = data.aws_iam_instance_profile.ssm.name vpc_security_group_ids = [aws_security_group.public_access.id] + + metadata_options { + http_tokens = "required" + } root_block_device { volume_type = "gp3" @@ -30,3 +34,8 @@ resource "aws_instance" "gitea" { Name = "Codebase: Gitea" } } + +resource "aws_ec2_instance_state" "this" { + instance_id = aws_instance.this.id + state = "running" +} diff --git a/terraform/network.tf b/terraform/network.tf index 40df6c4..adbae49 100644 --- a/terraform/network.tf +++ b/terraform/network.tf @@ -20,7 +20,7 @@ module "vpc" { public_subnets = [cidrsubnet(local.vpc_cidr, 8, 4)] private_subnet_tags = { SubnetOf = "Main", SubnetType = "Private" } - public_subnet_tags = { SubnetOf = "Main", SubnetType = "Public" } + public_subnet_tags = { SubnetOf = "Main", SubnetType = "Public" } map_public_ip_on_launch = true enable_dns_hostnames = true diff --git a/terraform/output.tf b/terraform/output.tf index 9797cd9..253a433 100644 --- a/terraform/output.tf +++ b/terraform/output.tf @@ -1,27 +1,27 @@ output "instance_id" { - value = aws_instance.gitea.id + value = aws_instance.this.id description = "The instance ID of the Gitea instance." } output "ip_address" { - value = aws_instance.gitea.private_ip + value = aws_instance.this.private_ip description = "The Gitea IP address." } output "boot_region" { - value = var.aws_region + value = var.aws_region description = "The region to manipulate the codebase repository boot." sensitive = true } output "boot_id" { - value = module.boot_user.iam_access_key_id + value = module.boot_user.iam_access_key_id description = "The access id to manipulate the codebase repository boot." sensitive = true } output "boot_secret" { - value = module.boot_user.iam_access_key_secret + value = module.boot_user.iam_access_key_secret description = "The access secret to manipulate the codebase repository boot." sensitive = true } diff --git a/terraform/variables.tf b/terraform/variables.tf index 2eb970a..f316d5b 100644 --- a/terraform/variables.tf +++ b/terraform/variables.tf @@ -14,16 +14,16 @@ variable "aws_secret" { } variable "boot_bucket" { - type = string + type = string description = "The name of the bucket to store the boot in." } variable "boot_key" { - type = string + type = string description = "The path that will hold the boot data." } variable "boot_role" { - type = string + type = string description = "The name of the role for boot access." }