diff --git a/.gitignore b/.gitignore index e69de29..fe7d8e3 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1,11 @@ +.*.sw* +.terraform* +terraform.tfstate* +privkey.pem +inventory.yaml +config.mk +terraform.mk +terraform.tfstate* +config.tf +privkey +pubkey diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..b4cb3cc --- /dev/null +++ b/Makefile @@ -0,0 +1,36 @@ + + +define CONFIG_MSG = + +You do not have a config.mk file. + +Please run "./configure" or copy `config.mk.in` to `config.mk` and edit its settings + +endef + + + +default: terraform ansible + +terraform ansible: config.mk + @$(MAKE) -C $@ + +ssh: config.mk + $(MAKE) -C ansible ssh + +#ansible: +# @$(MAKE) -C ansible + +config.mk: + @ $(info $(CONFIG_MSG) ) + @ exit 1 + +clean: + rm -f config.mk + @$(MAKE) -C terraform clean + @$(MAKE) -C ansible clean + + +.PHONY: terraform ansible + + diff --git a/README.md b/README.md index e69de29..29f66d3 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,11 @@ +The intent here is to create an all-in-one social server build + +## Requirements + +* GNU Make +* Ansible +* Terraform +* AWS CLI +* AWS SessionManager plugin (http://docs.aws.amazon.com/console/systems-manager/session-manager-plugin-not-found) + + diff --git a/ansible/Makefile b/ansible/Makefile new file mode 100644 index 0000000..4e8371c --- /dev/null +++ b/ansible/Makefile @@ -0,0 +1,61 @@ + +include ../config.mk +include ../terraform/terraform.mk + +# XXX parameterize +AWS_REGION = us-west-2 + +SSH := ssh -o "StrictHostKeyChecking=no" -o UserKnownHostsFile=/dev/null -o ProxyCommand="sh -c \"aws --region $(AWS_REGION) ssm start-session --target %h --document-name AWS-StartSSHSession --parameters portNumber=%p\"" -i ../terraform/privkey -l ubuntu + +default: ansible + +ansible: toolcheck inventory.yaml + timeout --foreground 300 bash -c -- 'until $(SSH) $(INSTANCE_ID) "/bin/true"; do sleep 0.5; done' + export ANSIBLE_NOCOWS=1; ansible-playbook -i inventory.yaml --private-key ../terraform/privkey -l social site.yaml + +ssh: inventory.yaml + $(SSH) $(INSTANCE_ID) + +inventory.yaml: inventory.tmpl.yaml sedline + sed $(SEDLINE) inventory.tmpl.yaml > inventory.yaml + +SEDLINE = + +sedline: terraform_sedline config_sedline + +config_sedline: $(addprefix __sed_,$(shell grep '^[0-9A-Z_]' ../config.mk | awk '{print $$1}')) + +terraform_sedline: $(addprefix __sed_,$(shell grep '^[0-9A-Z_]' ../terraform/terraform.mk | awk '{print $$1}')) + +__sed_%: + $(eval SEDLINE := $$(SEDLINE) -e 's/{{$*}}/$($*)/') + + + +# +#TF_OUTPUTS = +# +#tf_outputs: ../terraform/terraform.tfstate +# + +# FIXME: DRY this target + +CHECK_TOOLS = ansible + +toolcheck: + @echo + @echo "Checking applications..." + @ FAIL=""; \ + for TOOL in $(CHECK_TOOLS); do \ + which $${TOOL} >/dev/null || FAIL="$${FAIL} $${TOOL}"; \ + done; \ + if test -n "$${FAIL}"; then \ + echo "ERROR: You are missing the following:$${FAIL}"; \ + echo "Please make sure all necessary tools are installed and available in your path"; \ + echo; \ + exit 1; \ + fi + + @echo + + diff --git a/ansible/inventory.tmpl.yaml b/ansible/inventory.tmpl.yaml new file mode 100644 index 0000000..148bef2 --- /dev/null +++ b/ansible/inventory.tmpl.yaml @@ -0,0 +1,13 @@ +social: + hosts: + social: + ansible_host: "{{INSTANCE_ID}}" + ansible_ssh_common_args: -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ProxyCommand="sh -c \"aws --region {{AWS_REGION}} ssm start-session --target %h --document-name AWS-StartSSHSession --parameters portNumber=%p\"" + ansible_user: ubuntu + hostname: social + vars: + ansible_ssh_common_args: -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ProxyCommand="sh -c \"aws --region {{AWS_REGION}} ssm start-session --target %h --document-name AWS-StartSSHSession --parameters portNumber=%p\"" + public_ip: "{{PUBLIC_IP}}" + mastodon_sidekiq_count: {{MASTODON_SIDEKIQ_COUNT}} + mastodon_sidekiq_threads: {{MASTODON_SIDEKIQ_THREADS}} + diff --git a/ansible/roles/common/handlers/main.yml b/ansible/roles/common/handlers/main.yml new file mode 100644 index 0000000..57b6b31 --- /dev/null +++ b/ansible/roles/common/handlers/main.yml @@ -0,0 +1,6 @@ +--- + +- name: reboot + reboot: + reboot_timeout: 3600 + diff --git a/ansible/roles/common/tasks/main.yaml b/ansible/roles/common/tasks/main.yaml new file mode 100644 index 0000000..dea1a3b --- /dev/null +++ b/ansible/roles/common/tasks/main.yaml @@ -0,0 +1,67 @@ +--- + +# configure system + +- name: Configure hostname + copy: + content: "{{ hostname }}" + dest: /etc/hostname + notify: reboot +# yep we reboot for this + +- name: hostname in hosts + lineinfile: + path: /etc/hosts + regexp: "^127.0.0.1" + line: "127.0.0.1 {{ hostname }} localhost" + +- name: Set timezone + file: + src: /usr/share/zoneinfo/America/Los_Angeles + dest: /etc/localtime + state: link + notify: reboot + +- name: Set keyboard + lineinfile: + path: /etc/default/keyboard + regexp: '^XKBLAYOUT=' + line: 'XKBLAYOUT="us"' + notify: reboot + +- name: Shaboom!!! + apt: + update_cache: yes + upgrade: dist + force_apt_get: yes + retries: 2 + delay: 10 + +- name: install base apps + apt: + force_apt_get: yes + name: + - vim + - less + - tmux + - telnet + - ntp + - lsof + +- name: edit bashrc + blockinfile: + path: /etc/bash.bashrc + marker: "### {mark} ANSIBLE MANAGED BLOCK {{ item.name }} ###" + block: "{{ item.block }}" + with_items: + - name: prompt + block: | + if [[ $USER == 'root' ]]; then + PS1='${debian_chroot:+($debian_chroot)}\[\033[01;31m\]\u@\h\[\033[00m\]:\[\033[01;33m\]\w\[\033[00m\]# ' + else + PS1='${debian_chroot:+($debian_chroot)}\[\033[01;36m\]\u@\h\[\033[00m\]:\[\033[01;32m\]\w\[\033[00m\]\$ ' + fi + - name: lscolor + block: | + alias ls='ls --color=auto' + diff --git a/ansible/roles/mastodon/tasks/main.yaml b/ansible/roles/mastodon/tasks/main.yaml new file mode 100644 index 0000000..d32bff4 --- /dev/null +++ b/ansible/roles/mastodon/tasks/main.yaml @@ -0,0 +1,28 @@ +--- + +- name: install base apps + apt: + force_apt_get: yes + name: + - docker-compose-v2 + - git + +- name: Mastodon path + file: + path: "/srv/mastodon" + state: directory + recurse: true + +- name: mastodon source + git: + repo: "https://tea.entar.net/teh/mastodon.git" + dest: /srv/mastodon/src + +- name: mastodon docker-compose + template: + src: templates/docker-compose.mastodon.yaml + dest: /srv/mastodon/docker-compose.yaml + +# add env file + + diff --git a/ansible/roles/mastodon/templates/docker-compose.mastodon.yaml b/ansible/roles/mastodon/templates/docker-compose.mastodon.yaml new file mode 100644 index 0000000..7ddbc58 --- /dev/null +++ b/ansible/roles/mastodon/templates/docker-compose.mastodon.yaml @@ -0,0 +1,166 @@ +version: '3' +services: + mastodon_db: + container_name: mastodon_db + restart: always + image: postgres:14-alpine + shm_size: 256mb + networks: + - mastodon + healthcheck: + test: ['CMD', 'pg_isready', '-U', 'postgres'] + volumes: + - ./postgres14:/var/lib/postgresql/data + env_file: .env.production + environment: + - 'POSTGRES_HOST_AUTH_METHOD=trust' + command: postgres -c 'max_connections={{mastodon_sidekiq_count * mastodon_sidekiq_threads + 50}}' + + mastodon_redis: + container_name: mastodon_redis + restart: always + image: redis:7-alpine + networks: + - mastodon + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + volumes: + - ./redis:/data + + # es: + # restart: always + # image: docker.elastic.co/elasticsearch/elasticsearch:7.17.4 + # environment: + # - "ES_JAVA_OPTS=-Xms512m -Xmx512m -Des.enforce.bootstrap.checks=true" + # - "xpack.license.self_generated.type=basic" + # - "xpack.security.enabled=false" + # - "xpack.watcher.enabled=false" + # - "xpack.graph.enabled=false" + # - "xpack.ml.enabled=false" + # - "bootstrap.memory_lock=true" + # - "cluster.name=es-mastodon" + # - "discovery.type=single-node" + # - "thread_pool.write.queue_size=1000" +# networks: +# - mastodon +# - nginx + # healthcheck: + # test: ["CMD-SHELL", "curl --silent --fail localhost:9200/_cluster/health || exit 1"] + # volumes: + # - ./elasticsearch:/usr/share/elasticsearch/data + # ulimits: + # memlock: + # soft: -1 + # hard: -1 + # nofile: + # soft: 65536 + # hard: 65536 + # ports: + # - '127.0.0.1:9200:9200' + + mastodon_web: + container_name: mastodon_web + build: src + image: ghcr.io/mastodon/mastodon:v4.2.1 + restart: always + env_file: .env.production + command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000" + networks: + - mastodon + - nginx + healthcheck: + # prettier-ignore + test: ['CMD-SHELL', 'wget -q --spider --proxy=off localhost:3000/health || exit 1'] + ports: + - '127.0.0.1:3000:3000' + depends_on: + - mastodon_db + - mastodon_redis + # - es + volumes: + - ./public/system:/mastodon/public/system + + mastodon_streaming: + container_name: mastodon_streaming + build: src + image: ghcr.io/mastodon/mastodon:v4.2.1 + restart: always + env_file: .env.production + environment: + - PORT=5000 + command: node ./streaming + networks: + - mastodon + - nginx + healthcheck: + # prettier-ignore + test: ['CMD-SHELL', 'wget -q --spider --proxy=off localhost:4000/api/v1/streaming/health || exit 1'] + ports: + - '127.0.0.1:5000:5000' + depends_on: + - mastodon_db + - mastodon_redis + + {% for i in range(mastodon_sidekiq_count) %} + mastodon_sidekiq_{{i}}: + container_name: mastodon_sidekiq + build: src + image: ghcr.io/mastodon/mastodon:v4.2.1 + restart: always + env_file: .env.production + environment: + - DB_POOL={{ mastodon_sidekiq_threads}} + command: bundle exec sidekiq -c {{ mastodon_sidekiq_threads}} + depends_on: + - mastodon_db + - mastodon_redis + networks: + - mastodon + - nginx + volumes: + - ./public/system:/mastodon/public/system + healthcheck: + test: ['CMD-SHELL', "ps aux | grep '[s]idekiq\ 6' || false"] + + {% endfor %} + + ## Uncomment to enable federation with tor instances along with adding the following ENV variables + ## http_proxy=http://privoxy:8118 + ## ALLOW_ACCESS_TO_HIDDEN_SERVICE=true + # tor: + # image: sirboops/tor + # networks: + # - external_network + # - internal_network + # + # privoxy: + # image: sirboops/privoxy + # volumes: + # - ./priv-config:/opt/config + # networks: + # - external_network + # - internal_network + + statsd: + image: prom/statsd-exporter + container_name: mastodon_statsd + restart: always + ports: + - 0.0.0.0:9102:9102 + command: + "--statsd.mapping-config=/statsd-mapping.yaml" + volumes: + - ./statsd-mapping.yaml:/statsd-mapping.yaml + networks: + - mastodon + + +networks: + mastodon: + ipam: + driver: default + config: + - subnet: 172.42.0.0/16 + nginx: + external: true + diff --git a/ansible/site.yaml b/ansible/site.yaml new file mode 100644 index 0000000..a38c7aa --- /dev/null +++ b/ansible/site.yaml @@ -0,0 +1,12 @@ +--- + +- name: apply common config + hosts: all + roles: + - { role: common, become: yes } + +- name: mastodon instance + hosts: social + roles: + - { role: mastodon, become: yes } + diff --git a/config.mk.in b/config.mk.in new file mode 100644 index 0000000..2d84ae5 --- /dev/null +++ b/config.mk.in @@ -0,0 +1,36 @@ + +## config.mk.in +## +## Template for configuration. +## The comment preceding any variable is printed as its prompt. + +# Right now AWS is the only option. This is only here for future use. + +# AWS: Are we using AWS? 1 means yes, 0 means no (only 1 works right now!) +#AWS = 1 + + +# AWS_REGION: what region is all this stuff going in? +AWS_REGION = us-west-2 + +# Instance type is configured for a single instance only + +# AWS_SINGLE_INSTANCE_TYPE: What size instance should we be using +AWS_SINGLE_INSTANCE_TYPE = t4g.small + +# Paste (one-line!) the root SSH public key for the AWS instance, or leave blank to generate new private/public keys +AWS_INSTANCE_PUBLIC_KEY = + +# What is the DNS subdomain to be delegated for these services? Leave blank to skip. +AWS_ROUTE53_ZONE = + + + +# TODO: more detailed sidekiq tuning per https://thomas-leister.de/en/scaling-up-mastodon/ + +# How many sidekiq containers should Mastodon have? +MASTODON_SIDEKIQ_COUNT = 2 + +# How many threads in each sidekiq container? +MASTODON_SIDEKIQ_THREADS = 100 + diff --git a/configure b/configure new file mode 100755 index 0000000..f24747a --- /dev/null +++ b/configure @@ -0,0 +1,62 @@ +#!/bin/bash + + +# I acknowledge that this is weird. Autoconf/automake are just too heavy for this task. + +echo "Configuring. Please answer the following questions:" + +# Read through config.mk.in. Every time a variable pops up, emit the comment +# before it and prompt the user for a value, using the setting in config.mk.in as a default + +TEMPFILE=.config.temp +declare -A CONFIG + +COMMENT="" + +echo -n > $TEMPFILE + +while read LINE <&4; do + if echo "$LINE" | grep -q '^#'; then + COMMENT=$(echo "$LINE" | sed -e 's/^#\+ *//') + elif echo "$LINE" | grep -q '^[0-9A-Z_]\+ *= *'; then + + VARNAME=$(echo "$LINE" | sed -e 's/ *=.*//') + DEFAULT=$(echo "$LINE" | sed -e 's/^[0-9A-Z_]\+ *= *//') + + # if there are prefix vars that are false, we need to skip + SKIP=0 + for K in "${!CONFIG[@]}"; do + if echo "$VARNAME" | grep -q "^${K}"; then + if [[ ${CONFIG[$K]} -eq 0 ]]; then + SKIP=1 + break + fi + fi + done + + if [[ SKIP -eq 0 ]]; then + echo + echo "$COMMENT" + #echo "$LINE" + echo "(default: ${DEFAULT})" + echo -n "> " + read VALUE + + if [[ -z $VALUE ]]; then + VALUE="${DEFAULT}" + fi + + CONFIG[${VARNAME}]="${VALUE}" + + echo "# ${COMMENT}" >> $TEMPFILE + echo "${VARNAME} = ${VALUE}" >> $TEMPFILE + echo >> $TEMPFILE + fi + fi +done 4< <(cat config.mk.in) + +echo +echo "All done! Putting your configuration into config.mk" + +mv $TEMPFILE config.mk + diff --git a/terraform/Makefile b/terraform/Makefile new file mode 100644 index 0000000..8f046d0 --- /dev/null +++ b/terraform/Makefile @@ -0,0 +1,70 @@ + +include ../config.mk + +default: terraform + +# I hate sed too and I am so sorry for what I'm about to do +terraform: terraform-check *.tf + terraform init + terraform apply + terraform output | sed \ + -e 's/\(.*\) = /\U\1 = /' \ + -e 's/"//g' \ + -e '' \ + -e 'h;s/.* = //;s/\([.]\)/\\\1/g;x;s/ = .*//;G;s/\n/ = /' \ + -e 's/ \+$$//' \ + > terraform.mk + +terraform-check: toolcheck pubkey + $(eval AWS_INSTANCE_PUBLIC_KEY := $(shell sed -e 's/\//\\\//g' pubkey)) + + +#CONFIGVARS = AWS_INSTANCE_PUBLIC_KEY AWS_SINGLE_INSTANCE_TYPE +SEDLINE = + +config.tf: sedline config.tf.in ../config.mk + sed $(SEDLINE) config.tf.in > config.tf + +#configvars: ../config.mk +# $(eval CONFIGVARS := $(shell grep '^[0-9A-Z]' ../config.mk | awk '{print $1}')) +# echo $(CONFIGVARS) + +#sedline: configvars $(addprefix __sed_,$(CONFIGVARS)) +sedline: $(addprefix __sed_,$(shell grep '^[0-9A-Z_]' ../config.mk | awk '{print $$1}')) + +__sed_%: + $(eval SEDLINE := $$(SEDLINE) -e 's/{{$*}}/$($*)/') + + +CHECK_TOOLS = terraform aws + +toolcheck: + @echo + @echo "Checking applications..." + @ FAIL=""; \ + for TOOL in $(CHECK_TOOLS); do \ + which $${TOOL} >/dev/null || FAIL="$${FAIL} $${TOOL}"; \ + done; \ + if test -n "$${FAIL}"; then \ + echo "ERROR: You are missing the following:$${FAIL}"; \ + echo "Please make sure all necessary tools are installed and available in your path"; \ + echo; \ + exit 1; \ + fi + + @echo + @echo "Checking AWS configuration..." + aws iam get-user + +pubkey: + if test -n "$(AWS_INSTANCE_PUBLIC_KEY)"; then \ + echo "$(AWS_INSTANCE_PUBLIC_KEY)" > pubkey; \ + else \ + ssh-keygen -t rsa -N "" -f privkey && mv privkey.pub pubkey; \ + fi + +# clean doesn't touch tfstate because we're not insane +clean: + rm -f privkey pubkey + rm -rf .terraform* + diff --git a/terraform/config.tf.in b/terraform/config.tf.in new file mode 100644 index 0000000..8f67d11 --- /dev/null +++ b/terraform/config.tf.in @@ -0,0 +1,8 @@ + +locals { + public_key = "{{AWS_INSTANCE_PUBLIC_KEY}}" + instance_type = "{{AWS_SINGLE_INSTANCE_TYPE}}" + route53_zone = "{{AWS_ROUTE53_ZONE}}" + aws_region = "{{AWS_REGION}}" +} + diff --git a/terraform/main.tf b/terraform/main.tf new file mode 100644 index 0000000..f5b13d9 --- /dev/null +++ b/terraform/main.tf @@ -0,0 +1,118 @@ + + +# [X] aws provider +# [/] random pet +# not needed w/o s3 bucket +# [/] s3 bucket +# [/] use pet name! +# n/a +# [X] vpc +# [/] tls private key +# [X] aws key pair +# [/] aws key local file +# [X] instance +# [ ] "myip" +# [X] sg +# [X] EIP +# [X] iam_instance_profile +# [X] iam_role +# [X] policydoc +# [X] policy +# [X] policy attachment +# [X] iam policy data +# [ ] route53 records +# [/] adminpass for nextcloud +# [ ] outputs: +# [ ] instance ID +# [ ] public IP +# [ ] name servers +# [ ] bucket +# [ ] myip + + +provider "aws" { + region = local.aws_region +} + +#resource "random_pet" "name" () + +module "vpc" { + source = "terraform-aws-modules/vpc/aws" + + name = "social-vpc" + cidr = "10.42.0.0/16" + + azs = [ "${local.aws_region}a" ] # XXX probably a better way to pick AZs + private_subnets = [ "10.42.0.0/20" ] + public_subnets = [ "10.42.16.0/20" ] + + enable_nat_gateway = false # nat gateways cost money and who has any of that? + enable_vpn_gateway = false +} + +resource "aws_instance" "social" { + ami = data.aws_ami.ubuntu.id + instance_type = local.instance_type + subnet_id = module.vpc.public_subnets.0 + key_name = aws_key_pair.key.key_name + iam_instance_profile = aws_iam_instance_profile.ssm.name + + vpc_security_group_ids = [ module.sg.security_group_id ] + + user_data = <