Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
b6b4978e68
|
|||
| b41d9f0e82 | |||
| 06646e7ec7 | |||
| 80270b9a93 | |||
| 2827653bd5 | |||
| 44e1d6d0e6 |
18
Taskfile.yml
18
Taskfile.yml
@@ -1,19 +1,13 @@
|
|||||||
version: 3
|
version: 3
|
||||||
|
|
||||||
includes:
|
|
||||||
tf: { taskfile: terraform, dir: terraform }
|
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
dev: docker compose -f compose.dev.yml up --build --force-recreate --no-deps
|
dev:
|
||||||
|
- docker compose -f compose.dev.yml rm -fsv
|
||||||
|
- docker compose -f compose.dev.yml up --build --force-recreate --no-deps
|
||||||
|
|
||||||
deploy:fast: ansible-playbook playbooks/fast.yml
|
deploy:fast: ansible-playbook playbooks/fast.yml
|
||||||
deploy:slow: ansible-playbook playbooks/slow.yml
|
deploy:slow: ansible-playbook playbooks/slow.yml {{.CLI_ARGS}}
|
||||||
deploy:restore: ansible-playbook playbooks/restore.yml -e "restore_bucket={{.BUCKET}} restore_key={{.KEY}}"
|
deploy:restore: ansible-playbook playbooks/restore.yml -e "restore_bucket={{.BUCKET}} restore_key={{.KEY}}"
|
||||||
|
|
||||||
enter:
|
vault: ansible-vault edit vault.yml
|
||||||
cmd: aws ssm start-session --target $INSTANCE_ID
|
inventory: ansible-vault edit inventory.ini
|
||||||
env:
|
|
||||||
INSTANCE_ID: { sh: jq -r .instance_id.value < config/infrastructure.secret.json }
|
|
||||||
AWS_REGION: { sh: jq -r .aws_region < config/ansible.secret.json }
|
|
||||||
AWS_ACCESS_KEY_ID: { sh: jq -r .aws_access_key < config/ansible.secret.json }
|
|
||||||
AWS_SECRET_ACCESS_KEY: { sh: jq -r .aws_secret_key < config/ansible.secret.json }
|
|
||||||
|
|||||||
10
ansible.cfg
10
ansible.cfg
@@ -1,6 +1,14 @@
|
|||||||
[defaults]
|
[defaults]
|
||||||
callbacks_enabled = profile_tasks
|
|
||||||
localhost_warning = False
|
localhost_warning = False
|
||||||
|
inventory = inventory.ini
|
||||||
|
host_key_checking = False
|
||||||
|
interpreter_python = /usr/bin/python3
|
||||||
|
vault_password_file = secret.key
|
||||||
|
|
||||||
[inventory]
|
[inventory]
|
||||||
inventory_unparsed_warning = False
|
inventory_unparsed_warning = False
|
||||||
|
|
||||||
|
[ssh_connection]
|
||||||
|
ssh_args = -o ControlMaster=auto -o ControlPersist=60s -o ForwardAgent=yes -o IdentityAgent=none
|
||||||
|
pipelining = True
|
||||||
|
retries = 256
|
||||||
|
|||||||
@@ -3,8 +3,10 @@ services:
|
|||||||
# Gitea itself.
|
# Gitea itself.
|
||||||
gitea:
|
gitea:
|
||||||
container_name: web-git-instance
|
container_name: web-git-instance
|
||||||
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- backup
|
- backup
|
||||||
|
- bucket-script
|
||||||
build:
|
build:
|
||||||
context: gitea
|
context: gitea
|
||||||
dockerfile: Dockerfile.dev
|
dockerfile: Dockerfile.dev
|
||||||
@@ -31,7 +33,7 @@ services:
|
|||||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
environment:
|
environment:
|
||||||
AWS_ENDPOINT: localstack:4566
|
AWS_ENDPOINT: localstack:4566
|
||||||
AWS_S3_BUCKET_NAME: test
|
AWS_S3_BUCKET_NAME: backup
|
||||||
AWS_ACCESS_KEY_ID: _
|
AWS_ACCESS_KEY_ID: _
|
||||||
AWS_SECRET_ACCESS_KEY: _
|
AWS_SECRET_ACCESS_KEY: _
|
||||||
BACKUP_CRON_EXPRESSION: "* * * * *"
|
BACKUP_CRON_EXPRESSION: "* * * * *"
|
||||||
@@ -59,7 +61,11 @@ services:
|
|||||||
AWS_ACCESS_KEY_ID: _
|
AWS_ACCESS_KEY_ID: _
|
||||||
AWS_SECRET_ACCESS_KEY: _
|
AWS_SECRET_ACCESS_KEY: _
|
||||||
AWS_ENDPOINT_URL: http://localstack:4566
|
AWS_ENDPOINT_URL: http://localstack:4566
|
||||||
command: '"aws s3api create-bucket --bucket test"'
|
command: |
|
||||||
|
"
|
||||||
|
aws s3api create-bucket --bucket backup
|
||||||
|
aws s3api create-bucket --bucket storage
|
||||||
|
"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ FROM gitea/gitea:latest-rootless
|
|||||||
ADD --chown=git:git config /etc/gitea
|
ADD --chown=git:git config /etc/gitea
|
||||||
ADD --chown=git:git custom /etc/gitea-custom
|
ADD --chown=git:git custom /etc/gitea-custom
|
||||||
|
|
||||||
|
ENV GITEA_CUSTOM=/etc/gitea-custom
|
||||||
|
|
||||||
RUN rm /etc/gitea/app.ini
|
RUN rm /etc/gitea/app.ini
|
||||||
RUN mv /etc/gitea/dev.app.ini /etc/gitea/app.ini
|
RUN mv /etc/gitea/dev.app.ini /etc/gitea/app.ini
|
||||||
|
|
||||||
|
|||||||
@@ -94,3 +94,10 @@ DEFAULT_MERGE_STYLE = merge
|
|||||||
|
|
||||||
[repository.signing]
|
[repository.signing]
|
||||||
DEFAULT_TRUST_MODEL = committer
|
DEFAULT_TRUST_MODEL = committer
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = minio
|
||||||
|
MINIO_ENDPOINT = s3.us-east-1.amazonaws.com
|
||||||
|
MINIO_BUCKET = myrica-faya
|
||||||
|
MINIO_USE_SSL = true
|
||||||
|
MINIO_INSECURE_SKIP_VERIFY = false
|
||||||
@@ -32,8 +32,8 @@ PROTOCOL = https
|
|||||||
ROOT_URL = https://localhost:443/
|
ROOT_URL = https://localhost:443/
|
||||||
DOMAIN = localhost
|
DOMAIN = localhost
|
||||||
HTTP_PORT = 443
|
HTTP_PORT = 443
|
||||||
CERT_FILE = cert.pem
|
CERT_FILE = /etc/gitea-custom/cert.pem
|
||||||
KEY_FILE = key.pem
|
KEY_FILE = /etc/gitea-custom/key.pem
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
DB_TYPE = sqlite3
|
DB_TYPE = sqlite3
|
||||||
@@ -95,3 +95,12 @@ DEFAULT_TRUST_MODEL = committer
|
|||||||
|
|
||||||
[oauth2]
|
[oauth2]
|
||||||
JWT_SECRET = x-----------------------------------------x
|
JWT_SECRET = x-----------------------------------------x
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = minio
|
||||||
|
MINIO_ENDPOINT = localstack:4566
|
||||||
|
MINIO_ACCESS_KEY_ID = test
|
||||||
|
MINIO_SECRET_ACCESS_KEY = test
|
||||||
|
MINIO_BUCKET = storage
|
||||||
|
MINIO_USE_SSL = false
|
||||||
|
MINIO_INSECURE_SKIP_VERIFY = true
|
||||||
26
inventory.ini
Normal file
26
inventory.ini
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
36326131353430646433363636333433313266666434623134633136373566306534323231373637
|
||||||
|
6532386232643232343464393964623065326639643866640a353461626332623134613530663136
|
||||||
|
35643737623066313565633035623161366631663630663664313736613063303333373634353064
|
||||||
|
3333376338656539640a313561666239643466616161383561613833323765356238393034663865
|
||||||
|
36643538346263653263646334343063326464656264633461363136383530393931393764356534
|
||||||
|
38663963303737666632363239613836386235343730383530363536386165616339376435326639
|
||||||
|
37333866323262336637383431323538393334393136623838343766636634316338633566343366
|
||||||
|
32306461396134373161633437373730383933343865326363326435393232646163663461666437
|
||||||
|
36613664633633306264656230363862306661363930376666616630363036396639643639343336
|
||||||
|
65653162303435663166383934343936313935643936656235383930616539393239643634323237
|
||||||
|
38613032323336333764633339396163306665666430333762343631383430613463666339323361
|
||||||
|
63333964313832366532363334623236626232633132653639333231386663333865663665343530
|
||||||
|
33613364386531633561373537353432643332663735663833663532373763383237316331306366
|
||||||
|
64616462323739623833303661353764623537313432646137336230383830643761646131386263
|
||||||
|
33663034303064373066363731653737363033373163386466663734643065613039336330313664
|
||||||
|
37666332623264346132343638623332323661363338623335366230323737333961613035646366
|
||||||
|
63316164316135633136326337363464373036383433333830646131363533363338383262623261
|
||||||
|
63353737343236356561323738396631333133366338366538356232663834316230333265626562
|
||||||
|
32363862396362376332383131316665383166386631336631656231636130323339623032386535
|
||||||
|
38383339636339633961393632393063613261653061623465356238306330346464333039393134
|
||||||
|
37356232626434646566346464636131396339646663383333393963336332313931656436353334
|
||||||
|
36303039643837663130336362656636393737633962396531326231383862646631613061323737
|
||||||
|
32346166396139383231663233356233646634633361346564356366343834313835343332363565
|
||||||
|
66303933353231386331326462366239336361386638383861326662613732373661306330616334
|
||||||
|
6634
|
||||||
@@ -63,6 +63,8 @@
|
|||||||
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
|
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
|
||||||
GITEA__server__DOMAIN: "{{ full_domain.value }}"
|
GITEA__server__DOMAIN: "{{ full_domain.value }}"
|
||||||
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
|
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
|
||||||
|
GITEA__storage__MINIO_ACCESS_KEY_ID: "{{ minio_access_key }}"
|
||||||
|
GITEA__storage__MINIO_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
|
||||||
labels:
|
labels:
|
||||||
docker-volume-backup.stop-during-backup: "true"
|
docker-volume-backup.stop-during-backup: "true"
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
19
playbooks/route.yml
Normal file
19
playbooks/route.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
- name: "Create route to instance."
|
||||||
|
hosts: router
|
||||||
|
gather_facts: false
|
||||||
|
tasks:
|
||||||
|
- name: Modify base Caddyfile.
|
||||||
|
ansible.builtin.blockinfile:
|
||||||
|
dest: ~/app/caddy/etc/Caddyfile
|
||||||
|
marker: '# GITEA {mark}'
|
||||||
|
content: "{{ lookup('file', '../router/Caddyfile') }}"
|
||||||
|
notify:
|
||||||
|
- Restart Caddy.
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
- name: Restart Caddy.
|
||||||
|
ansible.builtin.systemd_service:
|
||||||
|
name: container-caddy
|
||||||
|
state: restarted
|
||||||
|
enabled: true
|
||||||
|
scope: user
|
||||||
@@ -2,6 +2,8 @@
|
|||||||
hosts: localhost
|
hosts: localhost
|
||||||
vars_files: ../config/ansible.secret.json
|
vars_files: ../config/ansible.secret.json
|
||||||
gather_facts: false
|
gather_facts: false
|
||||||
|
vars:
|
||||||
|
image_name: "service/gitea"
|
||||||
tasks:
|
tasks:
|
||||||
- name: Build image.
|
- name: Build image.
|
||||||
community.docker.docker_image_build:
|
community.docker.docker_image_build:
|
||||||
@@ -11,114 +13,72 @@
|
|||||||
rebuild: always
|
rebuild: always
|
||||||
pull: true
|
pull: true
|
||||||
|
|
||||||
- name: Make temp file.
|
- name: Create build directory.
|
||||||
ansible.builtin.tempfile:
|
ansible.builtin.file:
|
||||||
suffix: .tar
|
path: ../dist
|
||||||
register: tar_file
|
state: directory
|
||||||
|
mode: '0777'
|
||||||
|
|
||||||
- name: Push image to archive.
|
- name: Push image to archive.
|
||||||
community.docker.docker_image:
|
community.docker.docker_image:
|
||||||
name: "{{ image_name }}"
|
name: "{{ image_name }}"
|
||||||
archive_path: "{{ tar_file.path }}"
|
archive_path: ../dist/image.tar
|
||||||
source: local
|
source: local
|
||||||
|
|
||||||
- name: Compress archive to artifact.
|
- name: Compress archive to artifact.
|
||||||
register: compress_image
|
register: compress_image
|
||||||
community.general.archive:
|
community.general.archive:
|
||||||
path: "{{ tar_file.path }}"
|
path: ../dist/image.tar
|
||||||
dest: "{{ tar_file.path }}.xz"
|
dest: ../dist/image.tar.gz
|
||||||
format: xz
|
format: gz
|
||||||
mode: "0644"
|
mode: "0644"
|
||||||
|
|
||||||
- name: Push artifact to S3.
|
|
||||||
amazon.aws.s3_object:
|
|
||||||
bucket: "{{ image_bucket }}"
|
|
||||||
object: "{{ image_key }}"
|
|
||||||
src: "{{ tar_file.path }}.xz"
|
|
||||||
mode: put
|
|
||||||
|
|
||||||
region: "{{ aws_region }}"
|
|
||||||
access_key: "{{ aws_access_key }}"
|
|
||||||
secret_key: "{{ aws_secret_key }}"
|
|
||||||
|
|
||||||
- name: Deploy artifact to instance.
|
- name: Deploy artifact to instance.
|
||||||
hosts: localhost
|
hosts: compute
|
||||||
become: true
|
|
||||||
gather_facts: false
|
gather_facts: false
|
||||||
vars_files:
|
|
||||||
- ../config/ansible.secret.json
|
|
||||||
- ../config/infrastructure.secret.json
|
|
||||||
vars:
|
|
||||||
ansible_connection: aws_ssm
|
|
||||||
ansible_python_interpreter: /usr/bin/python3
|
|
||||||
ansible_aws_ssm_plugin: "{{ ssm_plugin }}"
|
|
||||||
ansible_aws_ssm_bucket_name: "{{ image_bucket }}"
|
|
||||||
ansible_aws_ssm_instance_id: "{{ instance_id.value }}"
|
|
||||||
ansible_aws_ssm_region: "{{ aws_region }}"
|
|
||||||
ansible_aws_ssm_access_key_id: "{{ aws_access_key }}"
|
|
||||||
ansible_aws_ssm_secret_access_key: "{{ aws_secret_key }}"
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Fetch image.
|
|
||||||
amazon.aws.s3_object:
|
|
||||||
mode: get
|
|
||||||
bucket: "{{ image_bucket }}"
|
|
||||||
object: "{{ image_key }}"
|
|
||||||
dest: /root/image.tar.gz
|
|
||||||
|
|
||||||
region: "{{ aws_region }}"
|
|
||||||
access_key: "{{ aws_access_key }}"
|
|
||||||
secret_key: "{{ aws_secret_key }}"
|
|
||||||
|
|
||||||
- name: Create data directory.
|
- name: Create data directory.
|
||||||
ansible.builtin.file:
|
ansible.builtin.file:
|
||||||
path: /home/ssm-user/data
|
path: "{{ item }}"
|
||||||
state: directory
|
state: directory
|
||||||
mode: '0777'
|
mode: '0777'
|
||||||
|
loop:
|
||||||
|
- ~/app
|
||||||
|
- ~/app/gitea
|
||||||
|
|
||||||
|
- name: Pull image to remote.
|
||||||
|
ansible.posix.synchronize:
|
||||||
|
src: ../dist/image.tar.gz
|
||||||
|
dest: ~/app/gitea/image.tar.gz
|
||||||
|
|
||||||
- name: Load image.
|
- name: Load image.
|
||||||
community.docker.docker_image_load:
|
containers.podman.podman_load:
|
||||||
path: /root/image.tar.gz
|
path: ~/app/gitea/image.tar.gz
|
||||||
register: image
|
register: image
|
||||||
|
|
||||||
- name: Run image.
|
# - name: Run image.
|
||||||
community.docker.docker_container:
|
# community.docker.docker_container:
|
||||||
name: server
|
# name: server
|
||||||
image: "{{ image.image_names[0] }}"
|
# image: "{{ image.image_names[0] }}"
|
||||||
state: started
|
# state: started
|
||||||
recreate: true
|
# recreate: true
|
||||||
restart_policy: unless-stopped
|
# restart_policy: unless-stopped
|
||||||
memory: 425m
|
# memory: 425m
|
||||||
memory_swap: 900m
|
# memory_swap: 900m
|
||||||
ports: [80:80, 2222:2222, 443:443, "22:22"]
|
# ports: [80:80, 2222:2222, 443:443, "22:22"]
|
||||||
env:
|
# env:
|
||||||
GITEA__security__INTERNAL_TOKEN: "{{ internal_secret }}"
|
# GITEA__security__INTERNAL_TOKEN: "{{ internal_secret }}"
|
||||||
GITEA__server__LFS_JWT_SECRET: "{{ lfs_secret }}"
|
# GITEA__server__LFS_JWT_SECRET: "{{ lfs_secret }}"
|
||||||
GITEA__oauth2__JWT_SECRET: "{{ jwt_secret }}"
|
# GITEA__oauth2__JWT_SECRET: "{{ jwt_secret }}"
|
||||||
GITEA__server__ACME_EMAIL: "{{ email }}"
|
# GITEA__server__ACME_EMAIL: "{{ email }}"
|
||||||
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
|
# GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
|
||||||
GITEA__server__DOMAIN: "{{ full_domain.value }}"
|
# GITEA__server__DOMAIN: "{{ full_domain.value }}"
|
||||||
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
|
# GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
|
||||||
labels:
|
# GITEA__storage__MINIO_ACCESS_KEY_ID: "{{ minio_access_key }}"
|
||||||
docker-volume-backup.stop-during-backup: "true"
|
# GITEA__storage__MINIO_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
|
||||||
volumes:
|
# labels:
|
||||||
- /home/ssm-user/data:/var/lib/gitea
|
# docker-volume-backup.stop-during-backup: "true"
|
||||||
- /etc/timezone:/etc/timezone:ro
|
# volumes:
|
||||||
- /etc/localtime:/etc/localtime:ro
|
# - /home/ssm-user/data:/var/lib/gitea
|
||||||
|
# - /etc/timezone:/etc/timezone:ro
|
||||||
- name: Run backup.
|
# - /etc/localtime:/etc/localtime:ro
|
||||||
community.docker.docker_container:
|
|
||||||
name: backup
|
|
||||||
image: offen/docker-volume-backup:v2
|
|
||||||
state: started
|
|
||||||
recreate: true
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- /home/ssm-user/data:/backup/my-app-backup:ro
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
|
||||||
env:
|
|
||||||
AWS_S3_BUCKET_NAME: "{{ boot_bucket }}"
|
|
||||||
AWS_S3_PATH: "{{ boot_key }}"
|
|
||||||
AWS_REGION: "{{ boot_region.value }}"
|
|
||||||
AWS_ACCESS_KEY_ID: "{{ boot_id.value }}"
|
|
||||||
AWS_SECRET_ACCESS_KEY: "{{ boot_secret.value }}"
|
|
||||||
BACKUP_CRON_EXPRESSION: "0 0 * * *"
|
|
||||||
|
|||||||
3
router/Caddyfile
Normal file
3
router/Caddyfile
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
code.maximhutz.com {
|
||||||
|
respond "WIP!"
|
||||||
|
}
|
||||||
24
terraform/.terraform.lock.hcl
generated
24
terraform/.terraform.lock.hcl
generated
@@ -1,24 +0,0 @@
|
|||||||
# This file is maintained automatically by "terraform init".
|
|
||||||
# Manual edits may be lost in future updates.
|
|
||||||
|
|
||||||
provider "registry.terraform.io/hashicorp/aws" {
|
|
||||||
version = "5.83.1"
|
|
||||||
hashes = [
|
|
||||||
"h1:Yy3K7R7881H72rQDzG6qjZVkrWA6DGJzfE21TionY7w=",
|
|
||||||
"zh:0313253c78f195973752c4d1f62bfdd345a9c99c1bc7a612a8c1f1e27d51e49e",
|
|
||||||
"zh:108523f3e9ebc93f7d900c51681f6edbd3f3a56b8a62b0afc31d8214892f91e0",
|
|
||||||
"zh:175b9bf2a00bea6ac1c73796ad77b0e00dcbbde166235017c49377d7763861d8",
|
|
||||||
"zh:1c8bf55b8548bbad683cd6d7bdb03e8840a00b2422dc1529ffb9892820657130",
|
|
||||||
"zh:22338f09bae62d5ff646de00182417f992548da534fee7d98c5d0136d4bd5d7a",
|
|
||||||
"zh:92de1107ec43de60612be5f6255616f16a9cf82d88df1af1c0471b81f3a82c16",
|
|
||||||
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
|
|
||||||
"zh:9c7bfb7afea330e6d90e1466125a8cba3db1ed4043c5da52f737459c89290a6e",
|
|
||||||
"zh:ba59b374d477e5610674b70f5abfe0408e8f809390347372751384151440d3d0",
|
|
||||||
"zh:bd1c433966002f586d63cb1e3e16326991f238bc6beeb2352be36ec651917b0b",
|
|
||||||
"zh:ca2b4d1d02651c15261fffa4b142e45def9a22c6069353f0f663fd2046e268f8",
|
|
||||||
"zh:d8ed98c748f7a3f1a72277cfee9afe346aca39ab319d17402277852551d8f14a",
|
|
||||||
"zh:ed3d8bc89de5f35f3c5f4802ff7c749fda2e2be267f9af4a850694f099960a72",
|
|
||||||
"zh:f698732a4391c3f4d7079b4aaa52389da2a460cac5eed438ed688f147d603689",
|
|
||||||
"zh:f9f51b17f2978394954e9f6ab9ef293b8e11f1443117294ccf87f7f8212b3439",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
version: 3
|
|
||||||
silent: true
|
|
||||||
|
|
||||||
vars:
|
|
||||||
BACKEND: ../config/backend.secret.json
|
|
||||||
VARIABLES: ../config/variables.secret.json
|
|
||||||
OUTPUT: ../config/infrastructure.secret.json
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
init: terraform init -backend-config={{.BACKEND}}
|
|
||||||
plan: terraform plan -var-file={{.VARIABLES}}
|
|
||||||
destroy: terraform destroy
|
|
||||||
format: terraform fmt -recursive
|
|
||||||
out: terraform output -json > {{.OUTPUT}}
|
|
||||||
apply:
|
|
||||||
- terraform apply -var-file={{.VARIABLES}}
|
|
||||||
- task: out
|
|
||||||
import: terraform import -var-file={{.VARIABLES}} {{.CLI_ARGS}}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
data "aws_s3_bucket" "storage_bucket" {
|
|
||||||
bucket = var.boot_bucket
|
|
||||||
}
|
|
||||||
|
|
||||||
data "aws_iam_policy_document" "boot" {
|
|
||||||
statement {
|
|
||||||
effect = "Allow"
|
|
||||||
actions = ["s3:*", "s3-object-lambda:*"]
|
|
||||||
resources = [
|
|
||||||
"${data.aws_s3_bucket.storage_bucket.arn}/${var.boot_key}",
|
|
||||||
"${data.aws_s3_bucket.storage_bucket.arn}/${var.boot_key}/*",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "aws_iam_policy" "boot" {
|
|
||||||
name = "${var.boot_role}Policy"
|
|
||||||
description = "The policy that manages the Gitea Boot."
|
|
||||||
|
|
||||||
policy = data.aws_iam_policy_document.boot.json
|
|
||||||
}
|
|
||||||
|
|
||||||
module "boot_user" {
|
|
||||||
source = "terraform-aws-modules/iam/aws//modules/iam-user"
|
|
||||||
version = "5.52.2"
|
|
||||||
|
|
||||||
create_iam_user_login_profile = false
|
|
||||||
name = "${var.boot_role}User"
|
|
||||||
password_reset_required = false
|
|
||||||
policy_arns = [aws_iam_policy.boot.arn]
|
|
||||||
}
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
rpm --rebuilddb
|
|
||||||
amazon-linux-extras install docker ansible2 python3.8 -y
|
|
||||||
|
|
||||||
# Make Docker work.
|
|
||||||
systemctl enable docker
|
|
||||||
systemctl start docker
|
|
||||||
|
|
||||||
# Set up the correct version of Python (for Ansible).
|
|
||||||
ln -sf /usr/bin/python3.8 /usr/bin/python3
|
|
||||||
ln -sf /usr/bin/pip3.8 /usr/bin/pip3
|
|
||||||
pip3 install botocore boto3 requests packaging
|
|
||||||
python3 -m pip install -U pip
|
|
||||||
|
|
||||||
# Add some swap space.
|
|
||||||
dd if=/dev/zero of=/swapfile bs=128M count=8
|
|
||||||
chmod 600 /swapfile
|
|
||||||
mkswap /swapfile
|
|
||||||
swapon /swapfile
|
|
||||||
|
|
||||||
# Stop SSH (because we have SSM.)
|
|
||||||
service sshd stop
|
|
||||||
|
|
||||||
# Install Docker Compose.
|
|
||||||
curl -L "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
|
||||||
chmod +x /usr/local/bin/docker-compose
|
|
||||||
|
|
||||||
# ERROR: SSM User not created yet.
|
|
||||||
sudo usermod -aG docker ssm-user
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
# An elastic IP, so if the reverse proxy is modified, the route tables won't.
|
|
||||||
resource "aws_eip" "public" {
|
|
||||||
instance = aws_instance.this.id
|
|
||||||
domain = "vpc"
|
|
||||||
}
|
|
||||||
|
|
||||||
# An instance profile for access via AWS SSM.
|
|
||||||
resource "aws_iam_instance_profile" "ssm" {
|
|
||||||
name = "SSMInstanceProfile"
|
|
||||||
role = "AmazonSSMRoleForInstancesQuickSetup"
|
|
||||||
}
|
|
||||||
|
|
||||||
# The Gitea instance.
|
|
||||||
resource "aws_instance" "this" {
|
|
||||||
# ami = data.aws_ami.amazon-linux-2.id
|
|
||||||
ami = "ami-0adec96dc0cdc7bca"
|
|
||||||
instance_type = "t4g.nano"
|
|
||||||
subnet_id = module.vpc.public_subnets[0]
|
|
||||||
|
|
||||||
user_data = file("install.sh")
|
|
||||||
user_data_replace_on_change = false
|
|
||||||
|
|
||||||
iam_instance_profile = aws_iam_instance_profile.ssm.name
|
|
||||||
vpc_security_group_ids = [aws_security_group.public_access.id]
|
|
||||||
|
|
||||||
metadata_options {
|
|
||||||
http_tokens = "required"
|
|
||||||
}
|
|
||||||
|
|
||||||
root_block_device {
|
|
||||||
volume_type = "gp3"
|
|
||||||
volume_size = 8
|
|
||||||
}
|
|
||||||
|
|
||||||
tags = {
|
|
||||||
Name = "Codebase: Gitea"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "aws_ec2_instance_state" "this" {
|
|
||||||
instance_id = aws_instance.this.id
|
|
||||||
state = "running"
|
|
||||||
}
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
locals {
|
|
||||||
# The IP block for the VPC.
|
|
||||||
vpc_cidr = "10.0.0.0/16"
|
|
||||||
}
|
|
||||||
|
|
||||||
data "aws_availability_zones" "all" {}
|
|
||||||
|
|
||||||
# The main VPC.
|
|
||||||
module "vpc" {
|
|
||||||
source = "terraform-aws-modules/vpc/aws"
|
|
||||||
|
|
||||||
name = "Main"
|
|
||||||
cidr = local.vpc_cidr
|
|
||||||
|
|
||||||
azs = [data.aws_availability_zones.all.names[0]]
|
|
||||||
private_subnets = [cidrsubnet(local.vpc_cidr, 8, 0)]
|
|
||||||
public_subnets = [cidrsubnet(local.vpc_cidr, 8, 4)]
|
|
||||||
|
|
||||||
private_subnet_tags = { SubnetOf = "Main", SubnetType = "Private" }
|
|
||||||
public_subnet_tags = { SubnetOf = "Main", SubnetType = "Public" }
|
|
||||||
|
|
||||||
map_public_ip_on_launch = true
|
|
||||||
enable_dns_hostnames = true
|
|
||||||
enable_dns_support = true
|
|
||||||
|
|
||||||
private_route_table_tags = { TableOf = "Main", TableType = "Public" }
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only allow HTTP(s) and SSH traffic. Allow full access to internet.
|
|
||||||
resource "aws_security_group" "public_access" {
|
|
||||||
vpc_id = module.vpc.vpc_id
|
|
||||||
tags = { GroupOf = "Main", GroupType = "Public" }
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "aws_vpc_security_group_ingress_rule" "ingress" {
|
|
||||||
for_each = toset(["80", "443", "22", "2222", "81", "8080", "4321", "1234"])
|
|
||||||
|
|
||||||
security_group_id = aws_security_group.public_access.id
|
|
||||||
|
|
||||||
from_port = each.value
|
|
||||||
to_port = each.value
|
|
||||||
ip_protocol = "tcp"
|
|
||||||
cidr_ipv4 = "0.0.0.0/0"
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "aws_vpc_security_group_egress_rule" "egress" {
|
|
||||||
for_each = toset(["-1"])
|
|
||||||
|
|
||||||
security_group_id = aws_security_group.public_access.id
|
|
||||||
|
|
||||||
from_port = each.value
|
|
||||||
to_port = each.value
|
|
||||||
ip_protocol = "-1"
|
|
||||||
cidr_ipv4 = "0.0.0.0/0"
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
output "instance_id" {
|
|
||||||
value = aws_instance.this.id
|
|
||||||
description = "The instance ID of the Gitea instance."
|
|
||||||
}
|
|
||||||
|
|
||||||
output "ip_address" {
|
|
||||||
value = aws_instance.this.private_ip
|
|
||||||
description = "The Gitea IP address."
|
|
||||||
}
|
|
||||||
|
|
||||||
output "boot_region" {
|
|
||||||
value = var.aws_region
|
|
||||||
description = "The region to manipulate the codebase repository boot."
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "boot_id" {
|
|
||||||
value = module.boot_user.iam_access_key_id
|
|
||||||
description = "The access id to manipulate the codebase repository boot."
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "boot_secret" {
|
|
||||||
value = module.boot_user.iam_access_key_secret
|
|
||||||
description = "The access secret to manipulate the codebase repository boot."
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
|
|
||||||
output "full_domain" {
|
|
||||||
value = "${var.subdomain}.${var.domain}"
|
|
||||||
description = "The domain of the Gitea instance."
|
|
||||||
sensitive = true
|
|
||||||
}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
terraform {
|
|
||||||
# The backend is stored in an S3 bucket.
|
|
||||||
backend "s3" {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Access AWS through the IaC roles.
|
|
||||||
provider "aws" {
|
|
||||||
region = var.aws_region
|
|
||||||
access_key = var.aws_access
|
|
||||||
secret_key = var.aws_secret
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
# The Route53 DNS zone.
|
|
||||||
data "aws_route53_zone" "main" {
|
|
||||||
name = var.domain
|
|
||||||
}
|
|
||||||
|
|
||||||
# Push all domain traffic through the reverse proxy.
|
|
||||||
resource "aws_route53_record" "domain" {
|
|
||||||
zone_id = data.aws_route53_zone.main.zone_id
|
|
||||||
name = "${var.subdomain}.${data.aws_route53_zone.main.name}"
|
|
||||||
type = "A"
|
|
||||||
ttl = "60"
|
|
||||||
records = [aws_eip.public.public_ip]
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
variable "aws_region" {
|
|
||||||
type = string
|
|
||||||
description = "The AWS region things are created in."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "aws_access" {
|
|
||||||
type = string
|
|
||||||
description = "The access key to generate the Gitea instance."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "aws_secret" {
|
|
||||||
type = string
|
|
||||||
description = "The access secret to generate the Gitea instance."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "boot_bucket" {
|
|
||||||
type = string
|
|
||||||
description = "The name of the bucket to store the boot in."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "boot_key" {
|
|
||||||
type = string
|
|
||||||
description = "The path that will hold the boot data."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "boot_role" {
|
|
||||||
type = string
|
|
||||||
description = "The name of the role for boot access."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "domain" {
|
|
||||||
type = string
|
|
||||||
description = "The name of the domain."
|
|
||||||
}
|
|
||||||
|
|
||||||
variable "subdomain" {
|
|
||||||
type = string
|
|
||||||
description = "The name of the subdomain."
|
|
||||||
}
|
|
||||||
6
vault.yml
Normal file
6
vault.yml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
66386561623561353461656662653566353665646466336635626561303031383735636666623063
|
||||||
|
3336313938313562323336383765323932666638373232390a653163323131323564306438363864
|
||||||
|
31333632326135353966656363633962616165623036373764646433353966616364376162636234
|
||||||
|
3837333961383333640a383732346534346465353062653531353638663465306432396266663534
|
||||||
|
3731
|
||||||
Reference in New Issue
Block a user