Files
git/playbooks/slow.yml
Max 23cf397581 S3 Data Storage (#1)
Using the storage options [here](https://docs.gitea.com/administration/config-cheat-sheet#storage-storage), this commit pushes all packages, avatars, and LFS files to S3.

- Uses AWS S3.
- Frees up storage on EC2 instance, so packages do not eventually steal all of the free space.

Reviewed-on: #1
Co-authored-by: Max <git@maximhutz.me>
Co-committed-by: Max <git@maximhutz.me>
2025-02-19 06:02:46 +00:00

122 lines
3.9 KiB
YAML

- name: Make build artifact.
hosts: localhost
vars_files: ../config/ansible.secret.json
gather_facts: false
tasks:
- name: Build image.
community.docker.docker_image_build:
name: "{{ image_name }}"
path: ../gitea
nocache: true
rebuild: always
pull: true
- name: Push image to archive.
community.docker.docker_image:
name: "{{ image_name }}"
archive_path: ../dist/image.tar
source: local
- name: Compress archive to artifact.
register: compress_image
community.general.archive:
path: ../dist/image.tar
dest: ../dist/image.tar.xz
format: xz
mode: "0644"
- name: Push artifact to S3.
amazon.aws.s3_object:
bucket: "{{ image_bucket }}"
object: "{{ image_key }}"
src: ../dist/image.tar.xz
mode: put
region: "{{ aws_region }}"
access_key: "{{ aws_access_key }}"
secret_key: "{{ aws_secret_key }}"
- name: Deploy artifact to instance.
hosts: localhost
become: true
gather_facts: false
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.json
vars:
ansible_connection: aws_ssm
ansible_python_interpreter: /usr/bin/python3
ansible_aws_ssm_plugin: "{{ ssm_plugin }}"
ansible_aws_ssm_bucket_name: "{{ image_bucket }}"
ansible_aws_ssm_instance_id: "{{ instance_id.value }}"
ansible_aws_ssm_region: "{{ aws_region }}"
ansible_aws_ssm_access_key_id: "{{ aws_access_key }}"
ansible_aws_ssm_secret_access_key: "{{ aws_secret_key }}"
tasks:
- name: Fetch image.
amazon.aws.s3_object:
mode: get
bucket: "{{ image_bucket }}"
object: "{{ image_key }}"
dest: /root/image.tar.gz
region: "{{ aws_region }}"
access_key: "{{ aws_access_key }}"
secret_key: "{{ aws_secret_key }}"
- name: Create data directory.
ansible.builtin.file:
path: /home/ssm-user/data
state: directory
mode: '0777'
- name: Load image.
community.docker.docker_image_load:
path: /root/image.tar.gz
register: image
- name: Run image.
community.docker.docker_container:
name: server
image: "{{ image.image_names[0] }}"
state: started
recreate: true
restart_policy: unless-stopped
memory: 425m
memory_swap: 900m
ports: [80:80, 2222:2222, 443:443, "22:22"]
env:
GITEA__security__INTERNAL_TOKEN: "{{ internal_secret }}"
GITEA__server__LFS_JWT_SECRET: "{{ lfs_secret }}"
GITEA__oauth2__JWT_SECRET: "{{ jwt_secret }}"
GITEA__server__ACME_EMAIL: "{{ email }}"
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
GITEA__server__DOMAIN: "{{ full_domain.value }}"
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
GITEA__storage__MINIO_ACCESS_KEY_ID: "{{ minio_access_key }}"
GITEA__storage__MINIO_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
labels:
docker-volume-backup.stop-during-backup: "true"
volumes:
- /home/ssm-user/data:/var/lib/gitea
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
- name: Run backup.
community.docker.docker_container:
name: backup
image: offen/docker-volume-backup:v2
state: started
recreate: true
restart_policy: unless-stopped
volumes:
- /home/ssm-user/data:/backup/my-app-backup:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
env:
AWS_S3_BUCKET_NAME: "{{ boot_bucket }}"
AWS_S3_PATH: "{{ boot_key }}"
AWS_REGION: "{{ boot_region.value }}"
AWS_ACCESS_KEY_ID: "{{ boot_id.value }}"
AWS_SECRET_ACCESS_KEY: "{{ boot_secret.value }}"
BACKUP_CRON_EXPRESSION: "0 0 * * *"