10 Commits

Author SHA1 Message Date
Max
24a1931c47 feat: stuff 2025-12-22 10:14:31 -05:00
Max
2827653bd5 feat: works on production 2025-02-19 00:57:32 -05:00
Max
44e1d6d0e6 feat: S3 storage for development environmne 2025-02-18 23:25:42 -05:00
Max
225489f678 feat: change security group tags, not name 2025-02-18 15:18:20 -05:00
Max
53ad9c161c feat: added tags to parts of vpc module, to use with proxy 2025-02-18 15:16:06 -05:00
Max
d961d8ffb6 fix: no ds_store 2025-02-13 22:09:15 -05:00
Max
aa7c0d4dee fix: gitignore is proper 2025-02-13 22:08:34 -05:00
Max
093368f6fb chore: requirements.txt is correct now 2025-02-13 22:07:37 -05:00
Max
143e6be9a0 style: renaming scheme for config files 2025-02-13 15:35:00 -05:00
Max
6193d99e1f chore: cleaned up ansible playbooks 2025-02-13 15:13:55 -05:00
16 changed files with 313 additions and 104 deletions

174
.gitignore vendored
View File

@@ -37,7 +37,179 @@ override.tf.json
.terraformrc
terraform.rc
# ---> Ansible
*.retry
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
*secret*
.vscode
.env
.DS_Store

View File

@@ -1,25 +1,21 @@
version: 3
# silent: true
includes:
tf: { taskfile: terraform, dir: terraform }
tasks:
dev: docker compose -f compose.dev.yml up --build --force-recreate --no-deps
dev:
- docker compose -f compose.dev.yml rm -fsv
- docker compose -f compose.dev.yml up --build --force-recreate --no-deps
build:slow: ansible-playbook playbooks/build.yml
deploy:slow: ansible-playbook playbooks/deploy.yml
build:fast: ansible-playbook playbooks/fast-build.yml
deploy:fast: ansible-playbook playbooks/fast-deploy.yml
restore: ansible-playbook playbooks/restore.yml -e "restore_bucket={{.BUCKET}} restore_key={{.KEY}}"
run:slow: [task: build:slow, task: deploy:slow]
run:fast: [task: build:fast, task: deploy:fast]
deploy:fast: ansible-playbook playbooks/fast.yml
deploy:slow: ansible-playbook playbooks/slow.yml
deploy:restore: ansible-playbook playbooks/restore.yml -e "restore_bucket={{.BUCKET}} restore_key={{.KEY}}"
enter:
cmd: aws ssm start-session --target $INSTANCE_ID
env:
INSTANCE_ID: { sh: jq -r .instance_id.value < config/infrastructure.secret.tf.json }
INSTANCE_ID: { sh: jq -r .instance_id.value < config/infrastructure.secret.json }
AWS_REGION: { sh: jq -r .aws_region < config/ansible.secret.json }
AWS_ACCESS_KEY_ID: { sh: jq -r .aws_access_key < config/ansible.secret.json }
AWS_SECRET_ACCESS_KEY: { sh: jq -r .aws_secret_key < config/ansible.secret.json }

View File

@@ -3,8 +3,10 @@ services:
# Gitea itself.
gitea:
container_name: web-git-instance
restart: unless-stopped
depends_on:
- backup
- bucket-script
build:
context: gitea
dockerfile: Dockerfile.dev
@@ -31,7 +33,7 @@ services:
- /var/run/docker.sock:/var/run/docker.sock:ro
environment:
AWS_ENDPOINT: localstack:4566
AWS_S3_BUCKET_NAME: test
AWS_S3_BUCKET_NAME: backup
AWS_ACCESS_KEY_ID: _
AWS_SECRET_ACCESS_KEY: _
BACKUP_CRON_EXPRESSION: "* * * * *"
@@ -59,7 +61,11 @@ services:
AWS_ACCESS_KEY_ID: _
AWS_SECRET_ACCESS_KEY: _
AWS_ENDPOINT_URL: http://localstack:4566
command: '"aws s3api create-bucket --bucket test"'
command: |
"
aws s3api create-bucket --bucket backup
aws s3api create-bucket --bucket storage
"
volumes:
data:

View File

@@ -3,6 +3,8 @@ FROM gitea/gitea:latest-rootless
ADD --chown=git:git config /etc/gitea
ADD --chown=git:git custom /etc/gitea-custom
ENV GITEA_CUSTOM=/etc/gitea-custom
RUN rm /etc/gitea/app.ini
RUN mv /etc/gitea/dev.app.ini /etc/gitea/app.ini

View File

@@ -94,3 +94,10 @@ DEFAULT_MERGE_STYLE = merge
[repository.signing]
DEFAULT_TRUST_MODEL = committer
[storage]
STORAGE_TYPE = minio
MINIO_ENDPOINT = s3.us-east-1.amazonaws.com
MINIO_BUCKET = myrica-faya
MINIO_USE_SSL = true
MINIO_INSECURE_SKIP_VERIFY = false

View File

@@ -32,8 +32,8 @@ PROTOCOL = https
ROOT_URL = https://localhost:443/
DOMAIN = localhost
HTTP_PORT = 443
CERT_FILE = cert.pem
KEY_FILE = key.pem
CERT_FILE = /etc/gitea-custom/cert.pem
KEY_FILE = /etc/gitea-custom/key.pem
[database]
DB_TYPE = sqlite3
@@ -95,3 +95,12 @@ DEFAULT_TRUST_MODEL = committer
[oauth2]
JWT_SECRET = x-----------------------------------------x
[storage]
STORAGE_TYPE = minio
MINIO_ENDPOINT = localstack:4566
MINIO_ACCESS_KEY_ID = test
MINIO_SECRET_ACCESS_KEY = test
MINIO_BUCKET = storage
MINIO_USE_SSL = false
MINIO_INSECURE_SKIP_VERIFY = true

View File

@@ -1,42 +0,0 @@
- name: Make build artifact.
hosts: localhost
vars_files: ../config/ansible.secret.json
gather_facts: false
tasks:
- name: Build image.
community.docker.docker_image_build:
name: "{{ image_name }}"
path: ../gitea
nocache: true
rebuild: always
pull: true
- name: Make temp file.
ansible.builtin.tempfile:
suffix: .tar
register: tar_file
- name: Push image to archive.
community.docker.docker_image:
name: "{{ image_name }}"
archive_path: "{{ tar_file.path }}"
source: local
- name: Compress archive to artifact.
register: compress_image
community.general.archive:
path: "{{ tar_file.path }}"
dest: "{{ tar_file.path }}.xz"
format: xz
mode: "0644"
- name: Push artifact to S3.
amazon.aws.s3_object:
bucket: "{{ image_bucket }}"
object: "{{ image_key }}"
src: "{{ tar_file.path }}.xz"
mode: put
region: "{{ aws_region }}"
access_key: "{{ aws_access_key }}"
secret_key: "{{ aws_secret_key }}"

View File

@@ -1,29 +0,0 @@
- name: Make build artifact.
hosts: localhost
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.tf.json
gather_facts: false
tasks:
- name: Log into Docker.
community.docker.docker_login:
registry_url: '{{ full_domain.value }}'
username: '{{ username }}'
password: '{{ api_key }}'
reauthorize: true
- name: Build image.
community.docker.docker_image_build:
name: "{{ full_domain.value }}/{{ image_name }}:latest"
path: ../gitea
nocache: true
rebuild: always
pull: true
outputs: [{ type: image, push: true }]
platform:
- linux/amd64
- linux/arm64/v8
- name: Log out of Docker.
community.docker.docker_login:
state: absent

View File

@@ -1,17 +1,46 @@
- name: Make build artifact.
hosts: localhost
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.json
gather_facts: false
tasks:
- name: Log into Docker.
community.docker.docker_login:
registry_url: '{{ full_domain.value }}'
username: '{{ username }}'
password: '{{ api_key }}'
reauthorize: true
- name: Build image.
community.docker.docker_image_build:
name: "{{ full_domain.value }}/{{ image_name }}:latest"
path: ../gitea
nocache: true
rebuild: always
pull: true
outputs: [{ type: image, push: true }]
platform:
- linux/amd64
- linux/arm64/v8
- name: Log out of Docker.
community.docker.docker_login:
state: absent
- name: Deploy artifact to instance.
hosts: localhost
become: true
gather_facts: false
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.tf.json
- ../config/infrastructure.secret.json
vars:
ansible_connection: aws_ssm
ansible_python_interpreter: /usr/bin/python3
ansible_aws_ssm_plugin: "{{ ssm_plugin }}"
ansible_aws_ssm_bucket_name: "{{ image_bucket }}"
ansible_aws_ssm_instance_id: "{{ instance_id.value }}"
ansible_python_interpreter: /usr/bin/python3
ansible_aws_ssm_region: "{{ aws_region }}"
ansible_aws_ssm_access_key_id: "{{ aws_access_key }}"
ansible_aws_ssm_secret_access_key: "{{ aws_secret_key }}"
@@ -23,7 +52,7 @@
state: started
recreate: true
restart_policy: unless-stopped
memory: 425m
memory: 300m
memory_swap: 900m
ports: [80:80, 2222:2222, 443:443, "22:22"]
env:
@@ -34,6 +63,8 @@
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
GITEA__server__DOMAIN: "{{ full_domain.value }}"
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
GITEA__storage__MINIO_ACCESS_KEY_ID: "{{ minio_access_key }}"
GITEA__storage__MINIO_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
labels:
docker-volume-backup.stop-during-backup: "true"
volumes:

View File

@@ -4,14 +4,13 @@
gather_facts: false
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.tf.json
- ../config/infrastructure.secret.json
vars:
ansible_connection: aws_ssm
ansible_python_interpreter: /usr/bin/python3
ansible_aws_ssm_plugin: "{{ ssm_plugin }}"
ansible_aws_ssm_bucket_name: "{{ image_bucket }}"
ansible_aws_ssm_instance_id: "{{ instance_id.value }}"
ansible_aws_ssm_region: "{{ aws_region }}"
ansible_aws_ssm_access_key_id: "{{ aws_access_key }}"
ansible_aws_ssm_secret_access_key: "{{ aws_secret_key }}"

View File

@@ -1,17 +1,54 @@
- name: Make build artifact.
hosts: localhost
vars_files: ../config/ansible.secret.json
gather_facts: false
tasks:
- name: Build image.
community.docker.docker_image_build:
name: "{{ image_name }}"
path: ../gitea
nocache: true
rebuild: always
pull: true
- name: Push image to archive.
community.docker.docker_image:
name: "{{ image_name }}"
archive_path: ../dist/image.tar
source: local
- name: Compress archive to artifact.
register: compress_image
community.general.archive:
path: ../dist/image.tar
dest: ../dist/image.tar.xz
format: xz
mode: "0644"
- name: Push artifact to S3.
amazon.aws.s3_object:
bucket: "{{ image_bucket }}"
object: "{{ image_key }}"
src: ../dist/image.tar.xz
mode: put
region: "{{ aws_region }}"
access_key: "{{ aws_access_key }}"
secret_key: "{{ aws_secret_key }}"
- name: Deploy artifact to instance.
hosts: localhost
become: true
gather_facts: false
vars_files:
- ../config/ansible.secret.json
- ../config/infrastructure.secret.tf.json
- ../config/infrastructure.secret.json
vars:
ansible_connection: aws_ssm
ansible_python_interpreter: /usr/bin/python3
ansible_aws_ssm_plugin: "{{ ssm_plugin }}"
ansible_aws_ssm_bucket_name: "{{ image_bucket }}"
ansible_aws_ssm_instance_id: "{{ instance_id.value }}"
ansible_aws_ssm_region: "{{ aws_region }}"
ansible_aws_ssm_access_key_id: "{{ aws_access_key }}"
ansible_aws_ssm_secret_access_key: "{{ aws_secret_key }}"
@@ -45,7 +82,7 @@
state: started
recreate: true
restart_policy: unless-stopped
memory: 425m
memory: 300m
memory_swap: 900m
ports: [80:80, 2222:2222, 443:443, "22:22"]
env:
@@ -56,6 +93,8 @@
GITEA__server__SSH_DOMAIN: "{{ full_domain.value }}"
GITEA__server__DOMAIN: "{{ full_domain.value }}"
GITEA__server__ROOT_URL: "https://{{ full_domain.value }}/"
GITEA__storage__MINIO_ACCESS_KEY_ID: "{{ minio_access_key }}"
GITEA__storage__MINIO_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
labels:
docker-volume-backup.stop-during-backup: "true"
volumes:

View File

@@ -3,6 +3,7 @@ ansible-compat==24.10.0
ansible-core==2.18.1
ansible-lint==24.12.2
attrs==24.3.0
awscli-local==0.22.0
black==24.10.0
boto3==1.35.95
botocore==1.35.95
@@ -19,6 +20,7 @@ Jinja2==3.1.5
jmespath==1.0.1
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
localstack-client==2.7
MarkupSafe==3.0.2
mypy-extensions==1.0.0
packaging==24.2

View File

@@ -2,9 +2,9 @@ version: 3
silent: true
vars:
BACKEND: ../config/backend.secret.tf.json
VARIABLES: ../config/variables.secret.tf.json
OUTPUT: ../config/infrastructure.secret.tf.json
BACKEND: ../config/backend.secret.json
VARIABLES: ../config/variables.secret.json
OUTPUT: ../config/infrastructure.secret.json
tasks:
init: terraform init -backend-config={{.BACKEND}}

View File

@@ -1,5 +1,6 @@
#!/bin/sh
## Install extras.
rpm --rebuilddb
amazon-linux-extras install docker ansible2 python3.8 -y
@@ -10,7 +11,7 @@ systemctl start docker
# Set up the correct version of Python (for Ansible).
ln -sf /usr/bin/python3.8 /usr/bin/python3
ln -sf /usr/bin/pip3.8 /usr/bin/pip3
pip3 install botocore boto3 requests packaging
pip3 install botocore boto3 requests packaging --user ssm-user
python3 -m pip install -U pip
# Add some swap space.

View File

@@ -4,8 +4,10 @@ resource "aws_eip" "public" {
domain = "vpc"
}
data "aws_iam_instance_profile" "ssm" {
# An instance profile for access via AWS SSM.
resource "aws_iam_instance_profile" "ssm" {
name = "SSMInstanceProfile"
role = "AmazonSSMRoleForInstancesQuickSetup"
}
# The Gitea instance.
@@ -18,7 +20,7 @@ resource "aws_instance" "this" {
user_data = file("install.sh")
user_data_replace_on_change = false
iam_instance_profile = data.aws_iam_instance_profile.ssm.name
iam_instance_profile = aws_iam_instance_profile.ssm.name
vpc_security_group_ids = [aws_security_group.public_access.id]
metadata_options {

View File

@@ -22,15 +22,18 @@ module "vpc" {
map_public_ip_on_launch = true
enable_dns_hostnames = true
enable_dns_support = true
private_route_table_tags = { TableOf = "Main", TableType = "Public" }
}
# Only allow HTTP(s) and SSH traffic. Allow full access to internet.
resource "aws_security_group" "public_access" {
vpc_id = module.vpc.vpc_id
tags = { GroupOf = "Main", GroupType = "Public" }
}
resource "aws_vpc_security_group_ingress_rule" "ingress" {
for_each = toset(["80", "443", "22", "2222", "81", "8080", "4321", "1234"])
resource "aws_vpc_security_group_ingress_rule" "tcp" {
for_each = toset(["80", "443", "22", "51821"])
security_group_id = aws_security_group.public_access.id
@@ -40,6 +43,17 @@ resource "aws_vpc_security_group_ingress_rule" "ingress" {
cidr_ipv4 = "0.0.0.0/0"
}
resource "aws_vpc_security_group_ingress_rule" "udp" {
for_each = toset(["51820", "53"])
security_group_id = aws_security_group.public_access.id
from_port = each.value
to_port = each.value
ip_protocol = "udp"
cidr_ipv4 = "0.0.0.0/0"
}
resource "aws_vpc_security_group_egress_rule" "egress" {
for_each = toset(["-1"])