Compare commits

...

32 commits
mail ... master

Author SHA1 Message Date
wizzdom
7b769cebfe
wiki: use utf8 encoding, improve db performance, more backups (#100)
* wiki: use utf8 encoding, improve db performance, more backups

* add medik skin colour, logo
2025-03-01 00:51:17 +00:00
Ayden
07f1f032b7
Esports: update discord bot job and add minecraft server (#101)
* socs: update esports discord bot

* esports: add minecraft server job
2025-02-28 19:58:56 +00:00
wizzdom
44ac151512
add uptime kuma (#99)
* add uptime kuma

* uptime-kuma: move to monitoring/
2025-02-28 14:34:00 +00:00
wizzdom
12278b1b44
bastion-vm-backup: remove unreliable backups over scp (#98) 2025-02-28 03:03:06 +00:00
wizzdom
737dd00e06
hedgedoc: bump image version 2025-02-28 01:01:16 +00:00
wizzdom
cfaf7a4309
mediawiki: bump db RAM 2025-02-28 00:58:55 +00:00
wizzdom
b58c812a3e
Use separate DB for all services (#95)
* migrate vaultwarden to seperate db

* plausible: add separate db, move click mount

* privatebin: separate db, cleanup

* add privatebin backup job

* remove postgres job
2025-02-09 19:54:17 +00:00
wizzdom
fc337777cb
add C&S room bookings job (#93) 2025-02-06 12:47:36 +00:00
wizzdom
24911a2907
add redbrick subdomain for style (#94) 2025-02-06 12:44:47 +00:00
wizzdom
8819180c25
add uri-meetups job (#92) 2025-02-05 18:18:04 +00:00
wizzdom
ad4cfbbaf6
bump version, players, view distance (#89)
we have the RAM for it anyways ;)
2025-01-13 18:42:53 +00:00
wizzdom
d0b3c14a85
atlas: add www & www redirects (#88)
make more old links work again ;)
2025-01-04 01:03:16 +00:00
Gavin Holahan
2263558f4a
Moved The Look Online to aperature (#87)
Co-authored-by: Wizzdom <wizzdom@redbrick.dcu.ie>
2025-01-03 00:38:23 +00:00
wizzdom
14e0b7eae3
traefik: add dynamic redirects via consul kv (#85) 2024-12-29 17:59:30 +00:00
wizzdom
e951e1ba17
thecollegeview: much caches, permissions (#84)
- add `redis` object cache
- add `nginx` static page cache with WP Super Cache
- add `nginx` to `www-data` group to avoid permissions conflicts
- increase PHP `max_children`, `upload_max_filesize`, `post_max_size`
  configs
2024-12-15 18:33:44 +00:00
wizzdom
c169d75001
traefik: add ssh, voice, tracing, access log (#83)
mumble voice configs taken from here: https://github.com/DistroByte/nomad/blob/master/jobs/traefik.hcl

Co-authored-by: DistroByte <james@distrobyte.io>
2024-12-15 18:12:06 +00:00
wizzdom
b22f9d8b75
minecraft/vanilla: increase memory (#82) 2024-12-12 01:50:19 +00:00
wizzdom
9f16d94cbb
add thecollegeview.ie (#80)
* add thecollegeview.ie

* thecollegeview: migrate to phpfpm + nginx

* thecollegeview: pass rest api to phpfpm

* mps-site: remove hacky workaround for tcv

* thecollegeview: pass all dirs to phpfpm

* backup the correct db
2024-12-11 14:35:46 +00:00
wizzdom
37e6facab6
amikon: update to support node docker image (#81) 2024-12-10 23:27:40 +00:00
Ayden
f3e5ae5e2b
update db job to be tied to postgres alpine 16 (#78)
Co-authored-by: wizzdom <dom@wizzdom.xyz>
2024-12-02 22:55:48 +00:00
wizzdom
a464a915f0
hedgeodoc: add mount for banner, set default permission (#76) 2024-11-24 09:31:42 +00:00
wizzdom
d38f434a13
postgres-backup: delete old backups from the correct location (#77) 2024-11-24 09:31:28 +00:00
Ayden
6ae4ea0c8f
update env variables (#75) 2024-11-18 15:30:13 +00:00
Ayden
7ae45f6cd9
update env vars for solar racing website (#74) 2024-11-18 00:10:11 +00:00
Ayden
2b1a8e68dc
add esports bot job (#72)
Co-authored-by: wizzdom <wizzdom@redbrick.dcu.ie>
2024-11-01 23:10:33 +00:00
wizzdom
198d269d37
add github actions runner for CI/CD deployments with Nomad (#71) 2024-10-24 16:01:33 +01:00
wizzdom
7dc24a13bd
add paperless for document tracking and indexing (#70) 2024-10-22 16:13:29 +01:00
wizzdom
4b64070d7c games/mc/vanilla: update plugins, increase RAM 2024-10-22 02:25:44 +01:00
Ayden
55926dd4e0
add dcusr listmonk job (#69)
Co-authored-by: wizzdom <wizzdom@redbrick.dcu.ie>
2024-10-21 13:51:03 +01:00
wizzdom
d251d0e154
ansible/nomad: enable bridge hairpin mode (#68) 2024-10-17 18:16:35 +01:00
wizzdom
c993ceb6ed
games/minecraft: add plugins, bluemap config (#64) 2024-10-15 13:35:08 +01:00
wizzdom
29d57b8081
hedgedoc: update to latest, move db, fix backups (#66) 2024-10-15 12:52:46 +01:00
34 changed files with 1632 additions and 242 deletions

View file

@ -1,5 +1,11 @@
client { client {
enabled = true enabled = true
# for minecraft modpack zip bombing allowance
artifact {
decompression_size_limit = "0"
decompression_file_count_limit = 12000
}
bridge_network_hairpin_mode = true
} }
plugin "raw_exec" { plugin "raw_exec" {
@ -15,4 +21,4 @@ plugin "docker" {
enabled = true enabled = true
} }
} }
} }

View file

@ -0,0 +1,64 @@
job "esports-minecraft" {
datacenters = ["aperture"]
type = "service"
group "esports-mc" {
count = 1
network {
port "mc" {
to = 25565
}
port "rcon" {
to = 25575
}
}
service {
name = "esports-mc"
port = "mc"
}
service {
name = "esports-mc-rcon"
port = "rcon"
}
task "esports-minecraft" {
driver = "docker"
config {
image = "itzg/minecraft-server"
ports = ["mc", "rcon"]
volumes = [
"/storage/nomad/${NOMAD_TASK_NAME}:/data"
]
}
resources {
cpu = 5000 # 5000 MHz
memory = 20480 # 20 GB
}
template {
data = <<EOF
EULA = "TRUE"
TYPE = "PAPER"
VERSION = "1.21.4"
ICON = "https://liquipedia.net/commons/images/thumb/5/53/DCU_Esports_allmode.png/37px-DCU_Esports_allmode.png"
USE_AIKAR_FLAGS = true
MAX_MEMORY = 18G
MOTD = "Powered by Redbrick"
MAX_PLAYERS = "32"
VIEW_DISTANCE = "32"
ENABLE_RCON = true
RCON_PASSWORD = {{ key "games/mc/esports-mc/rcon/password" }}
# Auto-download plugins
SPIGET_RESOURCES=83581,62325,118271,28140,102931 # RHLeafDecay, GSit, GravesX, Luckperms, NoChatReport
MODRINTH_PROJECTS=datapack:no-enderman-grief,thizzyz-tree-feller,imageframe,bmarker,datapack:players-drop-heads,viaversion,viabackwards
EOF
destination = "local/.env"
env = true
}
}
}
}

View file

@ -12,6 +12,9 @@ job "minecraft-vanilla" {
port "rcon" { port "rcon" {
to = 25575 to = 25575
} }
port "bluemap" {
to = 8100
}
} }
service { service {
@ -24,32 +27,48 @@ job "minecraft-vanilla" {
port = "rcon" port = "rcon"
} }
service {
name = "vanilla-mc-bluemap"
port = "bluemap"
tags = [
"traefik.enable=true",
"traefik.http.routers.vanilla-mc-bluemap.rule=Host(`vanilla-mc.rb.dcu.ie`)",
"traefik.http.routers.vanilla-mc-bluemap.entrypoints=web,websecure",
"traefik.http.routers.vanilla-mc-bluemap.tls.certresolver=lets-encrypt",
]
}
task "minecraft-vanilla" { task "minecraft-vanilla" {
driver = "docker" driver = "docker"
config { config {
image = "itzg/minecraft-server" image = "itzg/minecraft-server"
ports = ["mc", "rcon"] ports = ["mc", "rcon", "bluemap"]
volumes = [ volumes = [
"/storage/nomad/${NOMAD_TASK_NAME}:/data" "/storage/nomad/${NOMAD_TASK_NAME}:/data"
] ]
} }
resources { resources {
cpu = 3000 # 3000 MHz cpu = 5000 # 5000 MHz
memory = 8192 # 8GB memory = 20480 # 20 GB
} }
template { template {
data = <<EOF data = <<EOF
EULA = "TRUE" EULA = "TRUE"
TYPE = "PAPER" TYPE = "PAPER"
VERSION = "1.21.1" VERSION = "1.21.3"
ICON = "https://docs.redbrick.dcu.ie/assets/logo.png" ICON = "https://docs.redbrick.dcu.ie/assets/logo.png"
USE_AIKAR_FLAGS = true USE_AIKAR_FLAGS = true
MAX_MEMORY = 18G
MOTD = "LONG LIVE THE REDBRICK" MOTD = "LONG LIVE THE REDBRICK"
MAX_PLAYERS = "20" MAX_PLAYERS = "32"
VIEW_DISTANCE = "32"
ENABLE_RCON = true ENABLE_RCON = true
RCON_PASSWORD = {{ key "games/mc/vanilla-mc/rcon/password" }} RCON_PASSWORD = {{ key "games/mc/vanilla-mc/rcon/password" }}
# Auto-download plugins
SPIGET_RESOURCES=83581,62325,118271,28140,102931 # RHLeafDecay, GSit, GravesX, Luckperms, NoChatReport
MODRINTH_PROJECTS=datapack:no-enderman-grief,thizzyz-tree-feller,imageframe,bluemap,bmarker,datapack:players-drop-heads,viaversion,viabackwards
EOF EOF
destination = "local/.env" destination = "local/.env"
env = true env = true

View file

@ -14,6 +14,9 @@ job "traefik" {
port "admin" { port "admin" {
static = 8080 static = 8080
} }
port "ssh" {
static = 22
}
port "smtp" { port "smtp" {
static = 25 static = 25
} }
@ -38,6 +41,12 @@ job "traefik" {
port "managesieve" { port "managesieve" {
static = 4190 static = 4190
} }
port "voice-tcp" {
static = 4502
}
port "voice-udp" {
static = 4503
}
} }
service { service {
@ -55,6 +64,7 @@ job "traefik" {
volumes = [ volumes = [
"local/traefik.toml:/etc/traefik/traefik.toml", "local/traefik.toml:/etc/traefik/traefik.toml",
"/storage/nomad/traefik/acme/acme.json:/acme.json", "/storage/nomad/traefik/acme/acme.json:/acme.json",
"/storage/nomad/traefik/access.log:/access.log",
] ]
} }
@ -73,6 +83,9 @@ job "traefik" {
[entryPoints.traefik] [entryPoints.traefik]
address = ":8080" address = ":8080"
[entryPoints.ssh]
address = ":22"
[entryPoints.smtp] [entryPoints.smtp]
address = ":25" address = ":25"
@ -97,6 +110,14 @@ job "traefik" {
[entryPoints.managesieve] [entryPoints.managesieve]
address = ":4190" address = ":4190"
[entryPoints.voice-tcp]
address = ":4502"
[entryPoints.voice-udp]
address = ":4503/udp"
[entryPoints.voice-udp.udp]
timeout = "15s" # this will help reduce random dropouts in audio https://github.com/mumble-voip/mumble/issues/3550#issuecomment-441495977
[tls.options] [tls.options]
[tls.options.default] [tls.options.default]
minVersion = "VersionTLS12" minVersion = "VersionTLS12"
@ -122,6 +143,10 @@ job "traefik" {
address = "127.0.0.1:8500" address = "127.0.0.1:8500"
scheme = "http" scheme = "http"
# Enable the file provider for dynamic configuration.
[providers.file]
filename = "/local/dynamic.toml"
#[providers.nomad] #[providers.nomad]
# [providers.nomad.endpoint] # [providers.nomad.endpoint]
# address = "127.0.0.1:4646" # address = "127.0.0.1:4646"
@ -131,9 +156,50 @@ job "traefik" {
email = "elected-admins@redbrick.dcu.ie" email = "elected-admins@redbrick.dcu.ie"
storage = "acme.json" storage = "acme.json"
[certificatesResolvers.lets-encrypt.acme.tlsChallenge] [certificatesResolvers.lets-encrypt.acme.tlsChallenge]
[tracing]
[accessLog]
filePath = "/access.log"
EOF EOF
destination = "/local/traefik.toml" destination = "/local/traefik.toml"
} }
template {
data = <<EOF
[http]
[http.middlewares]
# handle redirects for short links
# NOTE: this is a consul template, add entries via consul kv
# create the middlewares with replacements for each redirect
{{ range $pair := tree "redirect/redbrick" }}
[http.middlewares.redirect-{{ trimPrefix "redirect/redbrick/" $pair.Key }}.redirectRegex]
regex = ".*" # match everything - hosts are handled by the router
replacement = "{{ $pair.Value }}"
permanent = true
{{- end }}
[http.routers]
# create routers with middlewares for each redirect
{{ range $pair := tree "redirect/redbrick" }}
[http.routers.{{ trimPrefix "redirect/redbrick/" $pair.Key }}-redirect]
rule = "Host(`{{ trimPrefix "redirect/redbrick/" $pair.Key }}.redbrick.dcu.ie`)"
entryPoints = ["web", "websecure"]
middlewares = ["redirect-{{ trimPrefix "redirect/redbrick/" $pair.Key }}"]
service = "dummy-service" # all routers need a service, this isn't used
[http.routers.{{ trimPrefix "redirect/redbrick/" $pair.Key }}-redirect.tls]
{{- end }}
[http.services]
[http.services.dummy-service.loadBalancer]
[[http.services.dummy-service.loadBalancer.servers]]
url = "http://127.0.0.1" # Dummy service - not used
EOF
destination = "local/dynamic.toml"
change_mode = "noop"
}
} }
} }
} }

View file

@ -0,0 +1,44 @@
job "uptime-kuma" {
datacenters = ["aperture"]
type = "service"
group "web" {
count = 1
network {
port "http" {
to = 3001
}
}
service {
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "2s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.uptime-kuma.rule=Host(`status.redbrick.dcu.ie`)",
"traefik.http.routers.uptime-kuma.entrypoints=web,websecure",
"traefik.http.routers.uptime-kuma.tls.certresolver=lets-encrypt",
]
}
task "web" {
driver = "docker"
config {
image = "louislam/uptime-kuma:1"
ports = ["http"]
volumes = [
"/storage/nomad/uptime-kuma/data:/app/data"
]
}
}
}
}

View file

@ -27,12 +27,18 @@ job "atlas" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.nginx-atlas.rule=Host(`redbrick.dcu.ie`) || Host(`rb.dcu.ie`)", "traefik.http.routers.nginx-atlas.rule=Host(`redbrick.dcu.ie`) || Host(`www.redbrick.dcu.ie`) || Host(`www.rb.dcu.ie`) || Host(`rb.dcu.ie`)",
"traefik.http.routers.nginx-atlas.entrypoints=web,websecure", "traefik.http.routers.nginx-atlas.entrypoints=web,websecure",
"traefik.http.routers.nginx-atlas.tls.certresolver=lets-encrypt", "traefik.http.routers.nginx-atlas.tls.certresolver=lets-encrypt",
"traefik.http.routers.nginx-atlas.middlewares=redirect-user-web", "traefik.http.routers.nginx-atlas.middlewares=atlas-www-redirect,redirect-user-web",
# redirect redbrick.dcu.ie/~user to user.redbrick.dcu.ie
"traefik.http.middlewares.redirect-user-web.redirectregex.regex=https://redbrick\\.dcu\\.ie/~([^/]*)/?([^/].*)?", "traefik.http.middlewares.redirect-user-web.redirectregex.regex=https://redbrick\\.dcu\\.ie/~([^/]*)/?([^/].*)?",
"traefik.http.middlewares.redirect-user-web.redirectregex.replacement=https://$1.redbrick.dcu.ie/$2", "traefik.http.middlewares.redirect-user-web.redirectregex.replacement=https://$1.redbrick.dcu.ie/$2",
"traefik.http.middlewares.redirect-user-web.redirectregex.permanent=true",
# redirect www.redbrick.dcu.ie to redbrick.dcu.ie
"traefik.http.middlewares.atlas-www-redirect.redirectregex.regex=^https?://www.redbrick.dcu.ie/(.*)",
"traefik.http.middlewares.atlas-www-redirect.redirectregex.replacement=https://redbrick.dcu.ie/$${1}",
"traefik.http.middlewares.atlas-www-redirect.redirectregex.permanent=true",
] ]
} }

View file

@ -0,0 +1,59 @@
job "github-actions-runner" {
datacenters = ["aperture"]
type = "service"
meta {
version = "2.320.0"
sha256 = "93ac1b7ce743ee85b5d386f5c1787385ef07b3d7c728ff66ce0d3813d5f46900"
}
group "github-actions" {
count = 3
spread {
attribute = "${node.unique.id}"
weight = 100
}
task "actions-runner" {
driver = "raw_exec"
# user = "nomad"
config {
command = "/bin/bash"
args = ["${NOMAD_TASK_DIR}/bootstrap.sh"]
}
template {
data = <<EOF
#!/bin/bash
export RUNNER_ALLOW_RUNASROOT=1
echo "Querying API for registration token..."
reg_token=$(curl -L \
-X POST \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer {{ key "github/actions-runner/token" }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/orgs/redbrick/actions/runners/registration-token | jq -r '.token')
echo "Configuring runner..."
bash -c "${NOMAD_TASK_DIR}/config.sh --unattended --url https://github.com/redbrick --token ${reg_token} --name $(hostname) --labels aperture,deployment-runner --replace"
echo "Running actions runner..."
bash "${NOMAD_TASK_DIR}/run.sh"
EOF
destination = "local/bootstrap.sh"
}
artifact {
source = "https://github.com/actions/runner/releases/download/v2.320.0/actions-runner-linux-x64-2.320.0.tar.gz"
options {
checksum = "sha256:93ac1b7ce743ee85b5d386f5c1787385ef07b3d7c728ff66ce0d3813d5f46900"
}
}
}
}
}

View file

@ -20,9 +20,9 @@ job "hedgedoc-backup" {
data = <<EOH data = <<EOH
#!/bin/bash #!/bin/bash
file=/storage/backups/nomad/postgres/hedgedoc/postgresql-hedgedoc-$(date +%Y-%m-%d_%H-%M-%S).sql file=/storage/backups/nomad/hedgedoc/postgresql-hedgedoc-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/postgres/hedgedoc mkdir -p /storage/backups/nomad/hedgedoc
alloc_id=$(nomad job status hedgedoc | grep running | tail -n 1 | cut -d " " -f 1) alloc_id=$(nomad job status hedgedoc | grep running | tail -n 1 | cut -d " " -f 1)
@ -30,7 +30,7 @@ job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task hedgedoc-db $alloc_id pg_dumpall -U {{ key "hedgedoc/db/user" }} > "${file}" nomad alloc exec -task hedgedoc-db $alloc_id pg_dumpall -U {{ key "hedgedoc/db/user" }} > "${file}"
find /storage/backups/nomad/postgres/hedgedoc/postgresql-hedgedoc* -ctime +3 -exec rm {} \; || true find /storage/backups/nomad/hedgedoc/postgresql-hedgedoc* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful" echo "Backup successful"

View file

@ -21,7 +21,7 @@ job "hedgedoc" {
check { check {
type = "http" type = "http"
path = "/" path = "/_health"
interval = "10s" interval = "10s"
timeout = "2s" timeout = "2s"
} }
@ -41,42 +41,54 @@ job "hedgedoc" {
task "app" { task "app" {
driver = "docker" driver = "docker"
constraint {
attribute = "${attr.unique.hostname}"
value = "chell"
}
config { config {
image = "quay.io/hedgedoc/hedgedoc:1.6.0" image = "quay.io/hedgedoc/hedgedoc:1.10.2"
ports = ["http"] ports = ["http"]
volumes = [
"/storage/nomad/hedgedoc/banner:/hedgedoc/public/banner",
]
} }
template { template {
data = <<EOH data = <<EOH
CMD_IMAGE_UPLOAD_TYPE = "imgur" CMD_DB_URL = "postgres://{{ key "hedgedoc/db/user" }}:{{ key "hedgedoc/db/password" }}@{{ env "NOMAD_ADDR_db" }}/{{ key "hedgedoc/db/name" }}"
CMD_IMGUR_CLIENTID = "{{ key "hedgedoc/imgur/clientid" }}" CMD_ALLOW_FREEURL = "false"
CMD_IMGUR_CLIENTSECRET = "{{ key "hedgedoc/imgur/clientsecret" }}" CMD_FORBIDDEN_NOTE_IDS = ['robots.txt', 'favicon.ico', 'api', 'build', 'css', 'docs', 'fonts', 'js', 'uploads', 'vendor', 'views', 'auth']
CMD_DB_URL = "postgres://{{ key "hedgedoc/db/user" }}:{{ key "hedgedoc/db/password" }}@{{ env "NOMAD_ADDR_db" }}/{{ key "hedgedoc/db/name" }}" CMD_DOMAIN = "md.redbrick.dcu.ie"
CMD_ALLOW_FREEURL = "false" CMD_ALLOW_ORIGIN = ["redbrick.dcu.ie", "rb.dcu.ie"]
CMD_DEFAULT_PERMISSION = "private" CMD_USE_CDN = "true"
CMD_DOMAIN = "md.redbrick.dcu.ie" CMD_PROTOCOL_USESSL = "true"
CMD_ALLOW_ORIGIN = ["md.redbrick.dcu.ie", "md.rb.dcu.ie"] CMD_URL_ADDPORT = "false"
CMD_HSTS_PRELOAD = "true" CMD_LOG_LEVEL = "debug"
CMD_USE_CDN = "true" CMD_ENABLE_STATS_API = "true"
CMD_PROTOCOL_USESSL = "true"
CMD_URL_ADDPORT = "false" # Accounts
CMD_ALLOW_EMAIL_REGISTER = "false" CMD_ALLOW_EMAIL_REGISTER = "false"
CMD_ALLOW_ANONYMOUS = "false" CMD_ALLOW_ANONYMOUS = "false"
CMD_EMAIL = "false" CMD_ALLOW_ANONYMOUS_EDITS = "false"
CMD_LDAP_URL = "{{ key "hedgedoc/ldap/url" }}" CMD_EMAIL = "false"
CMD_LDAP_SEARCHBASE = "ou=accounts,o=redbrick" CMD_LDAP_URL = "{{ key "hedgedoc/ldap/url" }}"
CMD_LDAP_SEARCHFILTER = "{{`(uid={{username}})`}}" CMD_LDAP_SEARCHBASE = "ou=accounts,o=redbrick"
CMD_LDAP_PROVIDERNAME = "Redbrick" CMD_LDAP_SEARCHFILTER = "{{`(uid={{username}})`}}"
CMD_LDAP_USERIDFIELD = "uidNumber" CMD_LDAP_PROVIDERNAME = "Redbrick"
CMD_LDAP_USERNAMEFIELD = "uid" CMD_LDAP_USERIDFIELD = "uidNumber"
CMD_ALLOW_GRAVATAR = "true" CMD_LDAP_USERNAMEFIELD = "uid"
CMD_SESSION_SECRET = "{{ key "hedgedoc/session/secret" }}" CMD_SESSION_SECRET = "{{ key "hedgedoc/session/secret" }}"
CMD_LOG_LEVEL = "debug" CMD_DEFAULT_PERMISSION = "limited"
# Security/Privacy
CMD_HSTS_PRELOAD = "true"
CMD_CSP_ENABLE = "true"
CMD_HSTS_INCLUDE_SUBDOMAINS = "true"
CMD_CSP_ADD_DISQUS = "false"
CMD_CSP_ADD_GOOGLE_ANALYTICS= "false"
CMD_CSP_ALLOW_PDF_EMBED = "true"
CMD_ALLOW_GRAVATAR = "true"
# Uploads
CMD_IMAGE_UPLOAD_TYPE = "imgur"
CMD_IMGUR_CLIENTID = "{{ key "hedgedoc/imgur/clientid" }}"
CMD_IMGUR_CLIENTSECRET = "{{ key "hedgedoc/imgur/clientsecret" }}"
EOH EOH
destination = "local/.env" destination = "local/.env"
env = true env = true
@ -86,17 +98,12 @@ EOH
task "hedgedoc-db" { task "hedgedoc-db" {
driver = "docker" driver = "docker"
constraint {
attribute = "${attr.unique.hostname}"
value = "chell"
}
config { config {
image = "postgres:9.6-alpine" image = "postgres:13.4-alpine"
ports = ["db"] ports = ["db"]
volumes = [ volumes = [
"/opt/postgres/hedgedoc:/var/lib/postgresql/data" "/storage/nomad/hedgedoc:/var/lib/postgresql/data",
] ]
} }

View file

@ -1,4 +1,4 @@
job "postgres-backup" { job "paperless-backup" {
datacenters = ["aperture"] datacenters = ["aperture"]
type = "batch" type = "batch"
@ -20,17 +20,17 @@ job "postgres-backup" {
data = <<EOH data = <<EOH
#!/bin/bash #!/bin/bash
file=/storage/backups/nomad/postgres/postgres-$(date +%Y-%m-%d_%H-%M-%S).sql file=/storage/backups/nomad/paperless/postgresql-paperless-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/postgres mkdir -p /storage/backups/nomad/paperless
alloc_id=$(nomad job status postgres | grep running | tail -n 1 | cut -d " " -f 1) alloc_id=$(nomad job status paperless | grep running | tail -n 1 | cut -d " " -f 1)
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1) job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec $alloc_id pg_dumpall -U {{ key "postgres/username/root" }} > "${file}" nomad alloc exec -task db $alloc_id pg_dumpall -U {{ key "paperless/db/user" }} > "${file}"
find /storage/backups/nomad/postgres/hedgedoc/postgres* -ctime +3 -exec rm {} \; || true find /storage/backups/nomad/paperless/postgresql-paperless* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful" echo "Backup successful"

118
jobs/services/paperless.hcl Normal file
View file

@ -0,0 +1,118 @@
job "paperless" {
datacenters = ["aperture"]
type = "service"
group "paperless-web" {
network {
port "http" {
to = 8000
}
port "redis" {
to = 6379
}
port "db" {
to = 5432
}
}
service {
name = "paperless"
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "2s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.paperless.rule=Host(`paperless.redbrick.dcu.ie`) || Host(`paperless.rb.dcu.ie`)",
"traefik.http.routers.paperless.entrypoints=websecure",
"traefik.http.routers.paperless.tls=true",
"traefik.http.routers.paperless.tls.certresolver=lets-encrypt",
"traefik.http.middlewares.paperless.headers.contentSecurityPolicy=default-src 'self'; img-src 'self' data:"
]
}
task "web" {
driver = "docker"
config {
image = "ghcr.io/paperless-ngx/paperless-ngx:latest"
ports = ["http"]
volumes = [
"/storage/nomad/paperless/consume:/usr/src/paperless/consume",
"/storage/nomad/paperless/data:/usr/src/paperless/data",
"/storage/nomad/paperless/media:/usr/src/paperless/media",
"/storage/nomad/paperless/export:/usr/src/paperless/export",
"/storage/nomad/paperless/preconsume:/usr/src/paperless/preconsume",
]
}
template {
data = <<EOH
PAPERLESS_REDIS = "redis://{{ env "NOMAD_ADDR_redis" }}"
PAPERLESS_DBHOST = "{{ env "NOMAD_IP_db" }}"
PAPERLESS_DBPORT = "{{ env "NOMAD_HOST_PORT_db" }}"
PAPERLESS_DBPASS={{ key "paperless/db/password" }}
PAPERLESS_DBUSER={{ key "paperless/db/user" }}
PAPERLESS_DBNAME={{ key "paperless/db/name" }}
PAPERLESS_SECRETKEY={{ key "paperless/secret_key" }}
PAPERLESS_URL=https://paperless.redbrick.dcu.ie
PAPERLESS_ADMIN_USER={{ key "paperless/admin/user" }}
PAPERLESS_ADMIN_PASSWORD={{ key "paperless/admin/password" }}
PAPERLESS_ALLOWED_HOSTS="paperless.redbrick.dcu.ie,paperless.rb.dcu.ie,10.10.0.4,10.10.0.5,10.10.0.6" # allow internal aperture IPs for health check
PAPERLESS_CONSUMER_POLLING=1
EOH
destination = "local/.env"
env = true
}
# PAPERLESS_PRE_CONSUME_SCRIPT={{ key "paperless/env/preconsume-script" }}
resources {
cpu = 800
memory = 1000
}
}
task "broker" {
driver = "docker"
config {
image = "docker.io/library/redis:7"
ports = ["redis"]
}
resources {
cpu = 300
memory = 50
}
}
task "db" {
driver = "docker"
config {
image = "postgres:16-alpine"
ports = ["db"]
volumes = [
"/storage/nomad/paperless/db:/var/lib/postgresql/data"
]
}
template {
data = <<EOH
POSTGRES_PASSWORD={{ key "paperless/db/password" }}
POSTGRES_USER={{ key "paperless/db/user" }}
POSTGRES_NAME={{ key "paperless/db/name" }}
EOH
destination = "local/db.env"
env = true
}
}
}
}

View file

@ -7,12 +7,15 @@ job "plausible" {
port "http" { port "http" {
to = 8000 to = 8000
} }
port "db" { port "clickhouse" {
static = 8123 static = 8123
} }
port "db" {
static = 5432
}
} }
task "plausible" { task "app" {
service { service {
name = "plausible" name = "plausible"
port = "http" port = "http"
@ -35,8 +38,11 @@ job "plausible" {
driver = "docker" driver = "docker"
config { config {
image = "ghcr.io/plausible/community-edition:v2.1.1" image = "ghcr.io/plausible/community-edition:v2.1"
ports = ["http"] ports = ["http"]
volumes = [
"/storage/nomad/${NOMAD_JOB_NAME}/${NOMAD_TASK_NAME}:/var/lib/plausible"
]
command = "/bin/sh" command = "/bin/sh"
args = ["-c", "sleep 10 && /entrypoint.sh db migrate && /entrypoint.sh run"] args = ["-c", "sleep 10 && /entrypoint.sh db migrate && /entrypoint.sh run"]
@ -44,6 +50,8 @@ job "plausible" {
template { template {
data = <<EOH data = <<EOH
TMPDIR=/var/lib/plausible/tmp
BASE_URL=https://plausible.redbrick.dcu.ie BASE_URL=https://plausible.redbrick.dcu.ie
SECRET_KEY_BASE={{ key "plausible/secret" }} SECRET_KEY_BASE={{ key "plausible/secret" }}
TOTP_VAULT_KEY={{ key "plausible/totp/key" }} TOTP_VAULT_KEY={{ key "plausible/totp/key" }}
@ -57,8 +65,8 @@ GOOGLE_CLIENT_ID={{ key "plausible/google/client_id" }}
GOOGLE_CLIENT_SECRET={{ key "plausible/google/client_secret" }} GOOGLE_CLIENT_SECRET={{ key "plausible/google/client_secret" }}
# Database settings # Database settings
DATABASE_URL=postgres://{{ key "plausible/db/user" }}:{{ key "plausible/db/password" }}@postgres.service.consul:5432/{{ key "plausible/db/name" }} DATABASE_URL=postgres://{{ key "plausible/db/user" }}:{{ key "plausible/db/password" }}@{{ env "NOMAD_ADDR_db" }}/{{ key "plausible/db/name" }}
CLICKHOUSE_DATABASE_URL=http://{{ env "NOMAD_ADDR_db" }}/plausible_events_db CLICKHOUSE_DATABASE_URL=http://{{ env "NOMAD_ADDR_clickhouse" }}/plausible_events_db
# Email settings # Email settings
MAILER_NAME="Redbrick Plausible" MAILER_NAME="Redbrick Plausible"
@ -80,24 +88,43 @@ EOH
} }
} }
task "clickhouse" { task "db" {
constraint { driver = "docker"
attribute = "${attr.unique.hostname}"
value = "chell" config {
image = "postgres:17-alpine"
ports = ["db"]
volumes = [
"/storage/nomad/${NOMAD_JOB_NAME}/${NOMAD_TASK_NAME}:/var/lib/postgresql/data",
]
} }
template {
data = <<EOH
POSTGRES_PASSWORD={{ key "plausible/db/password" }}
POSTGRES_USER={{ key "plausible/db/user" }}
POSTGRES_NAME={{ key "plausible/db/name" }}
EOH
destination = "local/db.env"
env = true
}
}
task "clickhouse" {
service { service {
name = "plausible-clickhouse" name = "plausible-clickhouse"
port = "db" port = "clickhouse"
} }
driver = "docker" driver = "docker"
config { config {
image = "clickhouse/clickhouse-server:24.3.3.102-alpine" image = "clickhouse/clickhouse-server:24.3.3.102-alpine"
ports = ["db"] ports = ["clickhouse"]
volumes = [ volumes = [
"/opt/plausible/clickhouse:/var/lib/clickhouse", "/storage/nomad/${NOMAD_JOB_NAME}/${NOMAD_TASK_NAME}:/var/lib/clickhouse",
"local/clickhouse.xml:/etc/clickhouse-server/config.d/logging.xml:ro", "local/clickhouse.xml:/etc/clickhouse-server/config.d/logging.xml:ro",
"local/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/logging.xml:ro" "local/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/logging.xml:ro"
] ]
@ -140,7 +167,7 @@ EOH
} }
resources { resources {
memory = 800 memory = 1000
} }
} }
} }

View file

@ -1,96 +0,0 @@
job "postgres" {
datacenters = ["aperture"]
constraint {
attribute = "${attr.unique.hostname}"
value = "wheatley"
}
group "db" {
network {
port "db" {
static = 5432
}
}
task "postgres-db" {
driver = "docker"
template {
data = <<EOH
POSTGRES_PASSWORD="{{ key "postgres/password/root" }}"
POSTGRES_USER="{{ key "postgres/username/root" }}"
EOH
destination = "local/file.env"
env = true
}
config {
image = "postgres:latest"
ports = ["db"]
volumes = [
"/opt/postgres:/var/lib/postgresql/data",
"local/postgresql.conf:/etc/postgres/postgresql.conf",
"local/pg_hba.conf:/pg_hba.conf",
]
}
template {
data = <<EOH
max_connections = 100
shared_buffers = 2GB
effective_cache_size = 6GB
maintenance_work_mem = 512MB
checkpoint_completion_target = 0.9
wal_buffers = 16MB
default_statistics_target = 100
random_page_cost = 1.1
effective_io_concurrency = 200
work_mem = 5242kB
min_wal_size = 1GB
max_wal_size = 4GB
max_worker_processes = 4
max_parallel_workers_per_gather = 2
max_parallel_workers = 4
max_parallel_maintenance_workers = 2
hba_file = "/pg_hba.conf"
EOH
destination = "local/postgresql.conf"
}
template {
data = <<EOH
local all all trust
host all all 127.0.0.1/32 trust
host all all ::1/128 trust
local replication all trust
host replication all 127.0.0.1/32 trust
host replication all ::1/128 trust
host all all all scram-sha-256
EOH
destination = "local/pg_hba.conf"
}
resources {
cpu = 400
memory = 800
}
service {
name = "postgres"
port = "db"
check {
type = "tcp"
interval = "2s"
timeout = "2s"
}
}
}
}
}

View file

@ -0,0 +1,50 @@
job "privatebin-backup" {
datacenters = ["aperture"]
type = "batch"
periodic {
crons = ["0 */3 * * * *"]
prohibit_overlap = true
}
group "db-backup" {
task "postgres-backup" {
driver = "raw_exec"
config {
command = "/bin/bash"
args = ["local/script.sh"]
}
template {
data = <<EOH
#!/bin/bash
file=/storage/backups/nomad/privatebin/postgresql-privatebin-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/privatebin
alloc_id=$(nomad job status privatebin | grep running | tail -n 1 | cut -d " " -f 1)
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task db $alloc_id pg_dumpall -U {{ key "privatebin/db/user" }} > "${file}"
find /storage/backups/nomad/privatebin/postgresql-privatebin* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful"
exit 0
else
rm $file
curl -H "Content-Type: application/json" -d \
'{"content": "<@&585512338728419341> `PostgreSQL` backup for **'"${job_name}"'** has just **FAILED**\nFile name: `'"$file"'`\nDate: `'"$(TZ=Europe/Dublin date)"'`\nTurn off this script with `nomad job stop '"${job_name}"'` \n\n## Remember to restart this backup job when fixed!!!"}' \
{{ key "postgres/webhook/discord" }}
fi
EOH
destination = "local/script.sh"
}
}
}
}

View file

@ -10,6 +10,9 @@ job "privatebin" {
port "http" { port "http" {
to = 8080 to = 8080
} }
port "db" {
to = 5432
}
} }
service { service {
@ -25,7 +28,7 @@ job "privatebin" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.privatebin.rule=Host(`paste.rb.dcu.ie`) || Host(`paste.redbrick.dcu.ie`)", "traefik.http.routers.privatebin.rule=Host(`paste.redbrick.dcu.ie`) || Host(`paste.rb.dcu.ie`)",
"traefik.http.routers.privatebin.entrypoints=web,websecure", "traefik.http.routers.privatebin.entrypoints=web,websecure",
"traefik.http.routers.privatebin.tls.certresolver=lets-encrypt", "traefik.http.routers.privatebin.tls.certresolver=lets-encrypt",
] ]
@ -42,15 +45,10 @@ job "privatebin" {
"local/conf.php:/srv/data/conf.php", "local/conf.php:/srv/data/conf.php",
] ]
} }
template { env {
destination = "local/.env" TZ = "Europe/Dublin"
env = true PHP_TZ = "Europe/Dublin"
change_mode = "restart" CONFIG_PATH = "/srv/data/"
data = <<EOH
TZ=Europe/Dublin
PHP_TZ=Europe/Dublin
CONFIG_PATH=/srv/data/
EOH
} }
template { template {
@ -59,7 +57,7 @@ EOH
[main] [main]
name = "Redbrick PasteBin" name = "Redbrick PasteBin"
basepath = "https://paste.rb.dcu.ie/" basepath = "https://paste.redbrick.dcu.ie/"
discussion = true discussion = true
@ -185,13 +183,36 @@ batchsize = 10
[model] [model]
class = Database class = Database
[model_options] [model_options]
dsn = "pgsql:host=postgres.service.consul;dbname={{ key "privatebin/db/name" }}" dsn = "pgsql:host={{ env "NOMAD_ADDR_db" }};dbname={{ key "privatebin/db/name" }}"
tbl = "privatebin_" ; table prefix tbl = "{{ key "privatebin/db/name" }}" ; table prefix
usr = "{{ key "privatebin/db/user" }}" usr = "{{ key "privatebin/db/user" }}"
pwd = "{{ key "privatebin/db/password" }}" pwd = "{{ key "privatebin/db/password" }}"
opt[12] = true ; PDO::ATTR_PERSISTENT ; use persistent connections - default opt[12] = true ; PDO::ATTR_PERSISTENT ; use persistent connections - default
EOH EOH
} }
} }
task "db" {
driver = "docker"
config {
image = "postgres:17-alpine"
ports = ["db"]
volumes = [
"/storage/nomad/${NOMAD_JOB_NAME}/${NOMAD_TASK_NAME}:/var/lib/postgresql/data",
]
}
template {
data = <<EOH
POSTGRES_PASSWORD={{ key "privatebin/db/password" }}
POSTGRES_USER={{ key "privatebin/db/user" }}
POSTGRES_NAME={{ key "privatebin/db/name" }}
EOH
destination = "local/db.env"
env = true
}
}
} }
} }

View file

@ -0,0 +1,50 @@
job "vaultwarden-backup" {
datacenters = ["aperture"]
type = "batch"
periodic {
crons = ["0 */3 * * * *"]
prohibit_overlap = true
}
group "db-backup" {
task "postgres-backup" {
driver = "raw_exec"
config {
command = "/bin/bash"
args = ["local/script.sh"]
}
template {
data = <<EOH
#!/bin/bash
file=/storage/backups/nomad/vaultwarden/postgresql-vaultwarden-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/vaultwarden
alloc_id=$(nomad job status vaultwarden | grep running | tail -n 1 | cut -d " " -f 1)
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task db $alloc_id pg_dumpall -U {{ key "vaultwarden/db/user" }} > "${file}"
find /storage/backups/nomad/vaultwarden/postgresql-vaultwarden* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful"
exit 0
else
rm $file
curl -H "Content-Type: application/json" -d \
'{"content": "<@&585512338728419341> `PostgreSQL` backup for **'"${job_name}"'** has just **FAILED**\nFile name: `'"$file"'`\nDate: `'"$(TZ=Europe/Dublin date)"'`\nTurn off this script with `nomad job stop '"${job_name}"'` \n\n## Remember to restart this backup job when fixed!!!"}' \
{{ key "postgres/webhook/discord" }}
fi
EOH
destination = "local/script.sh"
}
}
}
}

View file

@ -9,6 +9,9 @@ job "vaultwarden" {
port "http" { port "http" {
to = 80 to = 80
} }
port "db" {
to = 5432
}
} }
service { service {
@ -31,14 +34,15 @@ job "vaultwarden" {
ports = ["http"] ports = ["http"]
volumes = [ volumes = [
"/storage/nomad/vaultwarden:/data" "/storage/nomad/${NOMAD_JOB_NAME}:/data",
"/etc/localtime:/etc/localtime:ro"
] ]
} }
template { template {
data = <<EOF data = <<EOF
DOMAIN=https://vault.redbrick.dcu.ie DOMAIN=https://vault.redbrick.dcu.ie
DATABASE_URL=postgresql://{{ key "vaultwarden/db/user" }}:{{ key "vaultwarden/db/password" }}@postgres.service.consul:5432/{{ key "vaultwarden/db/name" }} DATABASE_URL=postgresql://{{ key "vaultwarden/db/user" }}:{{ key "vaultwarden/db/password" }}@{{ env "NOMAD_ADDR_db" }}/{{ key "vaultwarden/db/name" }}
SIGNUPS_ALLOWED=false SIGNUPS_ALLOWED=false
INVITATIONS_ALLOWED=true INVITATIONS_ALLOWED=true
@ -55,14 +59,37 @@ EOF
destination = "local/env" destination = "local/env"
env = true env = true
} }
# These yubico variables are not necessary for yubikey support, only to verify the keys with yubico. # These yubico variables are not necessary for yubikey support, only to verify the keys with yubico.
#YUBICO_CLIENT_ID={{ key "vaultwarden/yubico/client_id" }} #YUBICO_CLIENT_ID={{ key "vaultwarden/yubico/client_id" }}
#YUBICO_SECRET_KEY={{ key "vaultwarden/yubico/secret_key" }} #YUBICO_SECRET_KEY={{ key "vaultwarden/yubico/secret_key" }}
resources { resources {
cpu = 500 cpu = 500
memory = 500 memory = 500
} }
} }
task "db" {
driver = "docker"
config {
image = "postgres:17-alpine"
ports = ["db"]
volumes = [
"/storage/nomad/${NOMAD_JOB_NAME}/${NOMAD_TASK_NAME}:/var/lib/postgresql/data",
]
}
template {
data = <<EOH
POSTGRES_PASSWORD={{ key "vaultwarden/db/password" }}
POSTGRES_USER={{ key "vaultwarden/db/user" }}
POSTGRES_NAME={{ key "vaultwarden/db/name" }}
EOH
destination = "local/db.env"
env = true
}
}
} }
} }

View file

@ -41,7 +41,7 @@ $wgDBpassword = "{{ key "mediawiki/db/password" }}";
# MySQL specific settings # MySQL specific settings
$wgDBprefix = "rbwiki_"; $wgDBprefix = "rbwiki_";
# MySQL table options to use during installation or update # MySQL table options to use during installation or update
$wgDBTableOptions = "ENGINE=InnoDB, DEFAULT CHARSET=binary"; $wgDBTableOptions = "ENGINE=InnoDB, DEFAULT CHARSET=utf8mb4";
## Shared memory settings ## Shared memory settings
$wgMainCacheType = CACHE_NONE; $wgMainCacheType = CACHE_NONE;
@ -89,11 +89,15 @@ wfLoadSkin( 'Vector' );
wfLoadSkin( 'Citizen' ); wfLoadSkin( 'Citizen' );
wfLoadSkin( 'Timeless' ); wfLoadSkin( 'Timeless' );
wfLoadSkin( 'MinervaNeue' ); wfLoadSkin( 'MinervaNeue' );
wfLoadSkin( 'Medik' );
$wgCitizenThemeColor = "#a81e22"; $wgCitizenThemeColor = "#a81e22";
$wgCitizenShowPageTools = "permission"; $wgCitizenShowPageTools = "permission";
$wgCitizenSearchDescriptionSource = "pagedescription"; $wgCitizenSearchDescriptionSource = "pagedescription";
$wgMedikColor = "#a81e22";
$wgMedikShowLogo = "main";
$wgLocalisationUpdateDirectory = "$IP/cache"; $wgLocalisationUpdateDirectory = "$IP/cache";
# load extensions # load extensions

View file

@ -17,7 +17,7 @@ job "mediawiki-backup" {
} }
template { template {
data = <<EOH data = <<EOH
#!/bin/bash #!/bin/bash
file=/storage/backups/nomad/wiki/mysql/rbwiki-mysql-$(date +%Y-%m-%d_%H-%M-%S).sql file=/storage/backups/nomad/wiki/mysql/rbwiki-mysql-$(date +%Y-%m-%d_%H-%M-%S).sql
@ -30,7 +30,7 @@ job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task rbwiki-db $alloc_id mariadb-dump -u {{ key "mediawiki/db/username" }} -p'{{ key "mediawiki/db/password"}}' {{ key "mediawiki/db/name" }} > "${file}" nomad alloc exec -task rbwiki-db $alloc_id mariadb-dump -u {{ key "mediawiki/db/username" }} -p'{{ key "mediawiki/db/password"}}' {{ key "mediawiki/db/name" }} > "${file}"
find /storage/backups/nomad/wiki/mysql/rbwiki-mysql* -ctime +3 -exec rm {} \; || true find /storage/backups/nomad/wiki/mysql/rbwiki-mysql* -ctime +30 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful" echo "Backup successful"
@ -56,7 +56,7 @@ EOH
} }
template { template {
data = <<EOH data = <<EOH
#!/bin/bash #!/bin/bash
file=/storage/backups/nomad/wiki/xml/rbwiki-dump-$(date +%Y-%m-%d_%H-%M-%S).xml file=/storage/backups/nomad/wiki/xml/rbwiki-dump-$(date +%Y-%m-%d_%H-%M-%S).xml

View file

@ -1,6 +1,6 @@
job "mediawiki" { job "mediawiki" {
datacenters = ["aperture"] datacenters = ["aperture"]
type = "service" type = "service"
meta { meta {
domain = "wiki.redbrick.dcu.ie" domain = "wiki.redbrick.dcu.ie"
@ -27,10 +27,10 @@ job "mediawiki" {
port = "http" port = "http"
check { check {
type = "http" type = "http"
path = "/Main_Page" path = "/Main_Page"
interval = "10s" interval = "10s"
timeout = "5s" timeout = "5s"
} }
tags = [ tags = [
@ -61,11 +61,11 @@ job "mediawiki" {
] ]
} }
resources { resources {
cpu = 200 cpu = 200
memory = 100 memory = 100
} }
template { template {
data = <<EOH data = <<EOH
# user www-data www-data; # user www-data www-data;
error_log /dev/stderr error; error_log /dev/stderr error;
events { events {
@ -139,9 +139,9 @@ EOH
} }
resources { resources {
cpu = 4000 cpu = 4000
memory = 1200 memory = 1200
} }
template { template {
data = <<EOH data = <<EOH
@ -179,7 +179,7 @@ EOH
} }
template { template {
data = file("LocalSettings.php") data = file("LocalSettings.php")
destination = "local/LocalSettings.php" destination = "local/LocalSettings.php"
} }
} }
@ -189,10 +189,10 @@ EOH
port = "db" port = "db"
check { check {
name = "mariadb_probe" name = "mariadb_probe"
type = "tcp" type = "tcp"
interval = "10s" interval = "10s"
timeout = "2s" timeout = "2s"
} }
} }
@ -213,6 +213,18 @@ EOH
template { template {
data = <<EOH data = <<EOH
[mysqld] [mysqld]
# Ensure full UTF-8 support
character-set-server = utf8mb4
collation-server = utf8mb4_unicode_ci
skip-character-set-client-handshake
# Fix 1000-byte key length issue
innodb_large_prefix = 1
innodb_file_format = Barracuda
innodb_file_per_table = 1
innodb_default_row_format = dynamic
# Performance optimizations (Keep these based on your system)
max_connections = 100 max_connections = 100
key_buffer_size = 2G key_buffer_size = 2G
query_cache_size = 0 query_cache_size = 0
@ -224,22 +236,23 @@ innodb_io_capacity = 200
tmp_table_size = 5242K tmp_table_size = 5242K
max_heap_table_size = 5242K max_heap_table_size = 5242K
innodb_log_buffer_size = 16M innodb_log_buffer_size = 16M
innodb_file_per_table = 1
bind-address = 0.0.0.0
# Logging # Logging
slow_query_log = 1 slow_query_log = 1
slow_query_log_file = /var/log/mysql/slow.log slow_query_log_file = /var/log/mysql/slow.log
long_query_time = 1 long_query_time = 1
# Network
bind-address = 0.0.0.0
EOH EOH
destination = "local/conf.cnf" destination = "local/conf.cnf"
} }
resources { resources {
cpu = 800 cpu = 800
memory = 1200 memory = 2500
} }
template { template {
data = <<EOH data = <<EOH
@ -250,7 +263,7 @@ MYSQL_RANDOM_ROOT_PASSWORD=yes
EOH EOH
destination = "local/.env" destination = "local/.env"
env = true env = true
} }
} }
} }

View file

@ -12,7 +12,7 @@ job "ams-amikon" {
network { network {
port "http" { port "http" {
to = 80 to = 3000
} }
} }
@ -20,35 +20,45 @@ job "ams-amikon" {
port = "http" port = "http"
check { check {
type = "http" type = "http"
path = "/" path = "/"
interval = "10s" interval = "10s"
timeout = "2s" timeout = "2s"
} }
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.ams-amikon.rule=Host(`amikon.me`) || Host(`www.amikon.me`)", "traefik.http.routers.ams-amikon.rule=Host(`amikon.me`) || Host(`www.amikon.me`)",
"traefik.http.routers.ams-amikon.entrypoints=web,websecure", "traefik.http.routers.ams-amikon.entrypoints=web,websecure",
"traefik.http.routers.ams-amikon.tls.certresolver=lets-encrypt", "traefik.http.routers.ams-amikon.tls.certresolver=lets-encrypt",
"traefik.http.routers.ams-amikon.middlewares=www-redirect", "traefik.http.routers.ams-amikon.middlewares=amikon-www-redirect",
"traefik.http.middlewares.www-redirect.redirectregex.regex=^https?://www.amikon.me/(.*)", "traefik.http.middlewares.amikon-www-redirect.redirectregex.regex=^https?://www.amikon.me/(.*)",
"traefik.http.middlewares.www-redirect.redirectregex.replacement=https://amikon.me/$${1}", "traefik.http.middlewares.amikon-www-redirect.redirectregex.replacement=https://amikon.me/$${1}",
"traefik.http.middlewares.www-redirect.redirectregex.permanent=true", "traefik.http.middlewares.amikon-www-redirect.redirectregex.permanent=true",
] ]
} }
task "amikon-nginx" { task "amikon-node" {
driver = "docker" driver = "docker"
config { config {
image = "ghcr.io/dcuams/amikon-site-v2:latest" image = "ghcr.io/dcuams/amikon-site-v2:latest"
force_pull = true force_pull = true
ports = ["http"] ports = ["http"]
}
template {
data = <<EOF
EMAIL={{ key "ams/amikon/email/user" }}
EMAIL_PASS={{ key "ams/amikon/email/password" }}
TO_EMAIL={{ key "ams/amikon/email/to" }}
EOF
destination = ".env"
env = true
} }
resources { resources {
cpu = 100 cpu = 800
memory = 50 memory = 500
} }
} }
} }

View file

@ -0,0 +1,107 @@
job "dcusr-listmonk" {
datacenters = ["aperture"]
type = "service"
meta {
domain = "lists.solarracing.ie"
}
group "listmonk" {
network {
port "http" {
}
port "db" {
to = 5432
}
}
service {
name = "listmonk"
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "2s"
}
tags = [
"traefik.enable=true",
"traefik.port=${NOMAD_PORT_http}",
"traefik.http.routers.dcusr-listmonk.entrypoints=web,websecure",
"traefik.http.routers.dcusr-listmonk.rule=Host(`${NOMAD_META_domain}`)",
"traefik.http.routers.dcusr-listmonk.tls=true",
"traefik.http.routers.dcusr-listmonk.tls.certresolver=lets-encrypt",
]
}
task "app" {
driver = "docker"
config {
image = "listmonk/listmonk:latest"
ports = ["http"]
entrypoint = ["./listmonk", "--static-dir=/listmonk/static"]
volumes = [
"/storage/nomad/dcusr-listmonk/static:/listmonk/static",
"/storage/nomad/dcusr-listmonk/postgres/:/var/lib/postgresql/data",
"local/config.toml:/listmonk/config.toml"
]
}
resources {
cpu = 1000
memory = 500
}
template {
data = <<EOH
[app]
address = "0.0.0.0:{{ env "NOMAD_PORT_http" }}"
admin_username = "{{ key "dcusr/listmonk/admin/username" }}"
admin_password = "{{ key "dcusr/listmonk/admin/password" }}"
# Database.
[db]
host = "{{ env "NOMAD_HOST_IP_db" }}"
port = {{ env "NOMAD_HOST_PORT_db" }}
user = "{{ key "dcusr/listmonk/db/username" }}"
password = "{{ key "dcusr/listmonk/db/password" }}"
database = "{{ key "dcusr/listmonk/db/name" }}"
ssl_mode = "disable"
max_open = 25
max_idle = 25
max_lifetime = "300s"
EOH
destination = "local/config.toml"
}
}
task "listmonk-db" {
driver = "docker"
config {
image = "postgres:17-alpine"
ports = ["db"]
volumes = [
"/storage/nomad/dcusr-listmonk/postgres:/var/lib/postgresql/data"
]
}
template {
data = <<EOH
POSTGRES_DB = "{{ key "dcusr/listmonk/db/name" }}"
POSTGRES_USER = "{{ key "dcusr/listmonk/db/username" }}"
POSTGRES_PASSWORD = "{{ key "dcusr/listmonk/db/password" }}"
EOH
destination = "local/db.env"
env = true
}
}
}
}

View file

@ -126,7 +126,7 @@ EOH
driver = "docker" driver = "docker"
config { config {
image = "postgres:alpine" image = "postgres:16-alpine"
ports = ["db"] ports = ["db"]
volumes = [ volumes = [

View file

@ -53,6 +53,11 @@ DOCKER_PASS={{ key "dcusr/ghcr/password" }}
TO_EMAIL={{ key "dcusr/nodemailer/to" }} TO_EMAIL={{ key "dcusr/nodemailer/to" }}
EMAIL={{ key "dcusr/nodemailer/from" }} EMAIL={{ key "dcusr/nodemailer/from" }}
EMAIL_PASS={{ key "dcusr/nodemailer/password" }} EMAIL_PASS={{ key "dcusr/nodemailer/password" }}
LISTMONK_ENDPOINT={{ key "dcusr/listmonk/endpoint" }}
LISTMONK_USERNAME={{ key "dcusr/listmonk/username" }}
LISTMONK_PASSWORD={{ key "dcusr/listmonk/password" }}
LISTMONK_LIST_IDS={{ key "dcusr/listmonk/list/id" }}
RECAPTCHA_SECRET_KEY={{ key "dcusr/recaptcha/secret/key" }}
EOH EOH
} }
} }

View file

@ -0,0 +1,47 @@
job "esports-discord-bot" {
datacenters = ["aperture"]
type = "service"
group "esports-bot" {
count = 1
task "esports-bot" {
driver = "docker"
config {
image = "ghcr.io/aydenjahola/discord-multipurpose-bot:main"
force_pull = true
}
resources {
cpu = 500
memory = 256
}
template {
data = <<EOH
BOT_TOKEN={{ key "socs/esports/bot/discord/token" }}
EMAIL_NAME={{ key "socs/esports/bot/email/name" }}
EMAIL_PASS={{ key "socs/esports/bot/email/pass" }}
EMAIL_USER={{key "socs/esports/bot/email/user" }}
MONGODB_URI={{key "socs/esports/bot/mongodb/uri"}}
RAPIDAPI_KEY={{ key "socs/esports/bot/rapidapi/key" }}
TRACKER_API_KEY={{ key "socs/esports/bot/trackerapi/key" }}
TRACKER_API_URL={{ key "socs/esports/bot/trackerapi/url" }}
WORDNIK_API_KEY={{key "socs/esports/bot/wordnikapi/key" }}
HUGGING_FACE_API_KEY={{ key "socs/esports/bot/huggingface/key" }}
RCON_HOST=esports-mc-rcon.service.consul
# https://discuss.hashicorp.com/t/passing-registered-ip-and-port-from-consul-to-env-nomad-job-section/35647
{{ range service "esports-mc-rcon" }}
RCON_PORT={{ .Port }}{{ end }}
RCON_PASSWORD={{ key "games/mc/esports-mc/rcon/password" }}
EOH
destination = "local/.env"
env = true
}
}
}
}

View file

@ -46,9 +46,6 @@ job "mps-site" {
username = "${DOCKER_USER}" username = "${DOCKER_USER}"
password = "${DOCKER_PASS}" password = "${DOCKER_PASS}"
} }
volumes = [
"local/hosts:/etc/hosts",
]
} }
template { template {
@ -60,23 +57,6 @@ EOH
env = true env = true
} }
template {
data = <<EOF
127.0.0.1 localhost
::1 localhost ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
172.17.0.17 {{ env "NOMAD_TASK_NAME" }}
# use internal IP for thecollegeview.ie as external IP isn't routable
192.168.0.158 thecollegeview.ie
192.168.0.158 www.thecollegeview.ie
EOF
destination = "local/hosts"
}
resources { resources {
cpu = 300 cpu = 300
memory = 500 memory = 500

View file

@ -0,0 +1,49 @@
job "mps-thecollegeview-backup" {
datacenters = ["aperture"]
type = "batch"
periodic {
crons = ["0 */3 * * * *"]
prohibit_overlap = true
}
group "db-backup" {
task "mysql-backup" {
driver = "raw_exec"
config {
command = "/bin/bash"
args = ["local/mysql-backup.sh"]
}
template {
data = <<EOH
#!/bin/bash
file=/storage/backups/nomad/mps-thecollegeview/mysql/tcv-mysql-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/mps-thecollegeview/mysql
alloc_id=$(nomad job status mps-thecollegeview | grep running | tail -n 1 | cut -d " " -f 1)
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task tcv-db $alloc_id mariadb-dump -u {{ key "mps/thecollegeview/db/username" }} -p'{{ key "mps/thecollegeview/db/password"}}' {{ key "mps/thecollegeview/db/name" }} > "${file}"
find /storage/backups/nomad/mps-thecollegeview/mysql/tcv-mysql* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful"
exit 0
else
rm $file
curl -H "Content-Type: application/json" -d \
'{"content": "# <@&585512338728419341> `MySQL` backup for **'"${job_name}"'** has just **FAILED**\nFile name: `'"$file"'`\nDate: `'"$(TZ=Europe/Dublin date)"'`\nTurn off this script with `nomad job stop '"${job_name}"'` \n\n## Remember to restart this backup job when fixed!!!"}' \
{{ key "mysql/webhook/discord" }}
fi
EOH
destination = "local/mysql-backup.sh"
}
}
}
}

View file

@ -0,0 +1,257 @@
job "mps-thecollegeview" {
datacenters = ["aperture"]
type = "service"
meta {
domain = "thecollegeview.ie"
}
group "tcv" {
count = 1
network {
mode = "bridge"
port "http" {
to = 80
}
port "fpm" {
to = 9000
}
port "db" {
to = 3306
}
port "redis" {
to = 6379
}
}
service {
name = "tcv-web"
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "5s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.tcv.rule=Host(`${NOMAD_META_domain}`)",
"traefik.http.routers.tcv.entrypoints=web,websecure",
"traefik.http.routers.tcv.tls.certresolver=lets-encrypt",
]
}
task "tcv-nginx" {
driver = "docker"
config {
image = "nginx:alpine"
ports = ["http"]
volumes = [
"local/nginx.conf:/etc/nginx/nginx.conf",
"/storage/nomad/mps-thecollegeview:/var/www/html/",
]
group_add = [82] # www-data in alpine
}
resources {
cpu = 200
memory = 100
}
template {
data = <<EOH
# user www-data www-data;
error_log /dev/stderr error;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
server_tokens off;
error_log /dev/stderr error;
access_log /dev/stdout;
charset utf-8;
server {
server_name {{ env "NOMAD_META_domain" }};
listen 80;
listen [::]:80;
root /var/www/html;
index index.php index.html index.htm;
client_max_body_size 5m;
client_body_timeout 60;
# NOTE: Not used here, WP super cache rule used instead
# # Pass all folders to FPM
# location / {
# try_files $uri $uri/ /index.php?$args;
# }
# Pass the PHP scripts to FastCGI server
location ~ \.php$ {
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_pass {{ env "NOMAD_ADDR_fpm" }};
fastcgi_index index.php;
}
location ~ /\.ht {
deny all;
}
# WP Super Cache rules.
set $cache_uri $request_uri;
# POST requests and urls with a query string should always go to PHP
if ($request_method = POST) {
set $cache_uri 'null cache';
}
if ($query_string != "") {
set $cache_uri 'null cache';
}
# Don't cache uris containing the following segments
if ($request_uri ~* "(/wp-admin/|/xmlrpc.php|/wp-(app|cron|login|register|mail).php|wp-.*.php|/feed/|index.php|wp-comments-popup.php|wp-links-opml.php|wp-locations.php|sitemap(_index)?.xml|[a-z0-9_-]+-sitemap([0-9]+)?.xml)") {
set $cache_uri 'null cache';
}
# Don't use the cache for logged in users or recent commenters
if ($http_cookie ~* "comment_author|wordpress_[a-f0-9]+|wp-postpass|wordpress_logged_in") {
set $cache_uri 'null cache';
}
# Use cached or actual file if they exists, otherwise pass request to WordPress
location / {
try_files /wp-content/cache/supercache/$http_host/$cache_uri/index.html $uri $uri/ /index.php?$args ;
}
}
}
EOH
destination = "local/nginx.conf"
}
}
task "tcv-phpfpm" {
driver = "docker"
config {
image = "wordpress:php8.3-fpm-alpine"
ports = ["fpm"]
volumes = [
"/storage/nomad/mps-thecollegeview:/var/www/html/",
"local/custom.ini:/usr/local/etc/php/conf.d/custom.ini",
]
}
resources {
cpu = 800
memory = 500
}
template {
data = <<EOH
WORDPRESS_DB_HOST={{ env "NOMAD_ADDR_db" }}
WORDPRESS_DB_USER={{ key "mps/thecollegeview/db/username" }}
WORDPRESS_DB_PASSWORD={{ key "mps/thecollegeview/db/password" }}
WORDPRESS_DB_NAME={{ key "mps/thecollegeview/db/name" }}
WORDPRESS_TABLE_PREFIX=wp_2
WORDPRESS_CONFIG_EXTRA="define('WP_REDIS_HOST', '{{ env "NOMAD_ADDR_redis" }}');"
EOH
destination = "local/.env"
env = true
}
template {
data = <<EOH
pm.max_children = 10
upload_max_filesize = 64M
post_max_size = 64M
EOH
destination = "local/custom.ini"
}
}
service {
name = "tcv-db"
port = "db"
}
task "tcv-db" {
driver = "docker"
config {
image = "mariadb"
ports = ["db"]
volumes = [
"/storage/nomad/mps-thecollegeview/db:/var/lib/mysql",
]
}
template {
data = <<EOH
[mysqld]
max_connections = 100
key_buffer_size = 2G
query_cache_size = 0
innodb_buffer_pool_size = 6G
innodb_log_file_size = 512M
innodb_flush_log_at_trx_commit = 1
innodb_flush_method = O_DIRECT
innodb_io_capacity = 200
tmp_table_size = 5242K
max_heap_table_size = 5242K
innodb_log_buffer_size = 16M
innodb_file_per_table = 1
bind-address = 0.0.0.0
# Logging
slow_query_log = 1
slow_query_log_file = /var/log/mysql/slow.log
long_query_time = 1
EOH
destination = "local/conf.cnf"
}
resources {
cpu = 800
memory = 800
}
template {
data = <<EOH
MYSQL_DATABASE={{ key "mps/thecollegeview/db/name" }}
MYSQL_USER={{ key "mps/thecollegeview/db/username" }}
MYSQL_PASSWORD={{ key "mps/thecollegeview/db/password" }}
MYSQL_RANDOM_ROOT_PASSWORD=yes
EOH
destination = "local/.env"
env = true
}
}
task "redis" {
driver = "docker"
config {
image = "redis:latest"
ports = ["redis"]
}
resources {
cpu = 200
}
}
}
}

View file

@ -0,0 +1,49 @@
job "style-thelook-backup" {
datacenters = ["aperture"]
type = "batch"
periodic {
crons = ["0 */3 * * * *"]
prohibit_overlap = true
}
group "db-backup" {
task "mysql-backup" {
driver = "raw_exec"
config {
command = "/bin/bash"
args = ["local/mysql-backup.sh"]
}
template {
data = <<EOH
#!/bin/bash
file=/storage/backups/nomad/style-thelook/mysql/thelook-mysql-$(date +%Y-%m-%d_%H-%M-%S).sql
mkdir -p /storage/backups/nomad/style-thelook/mysql
alloc_id=$(nomad job status style-thelook | grep running | tail -n 1 | cut -d " " -f 1)
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
nomad alloc exec -task thelook-db $alloc_id mariadb-dump -u {{ key "style/thelook/db/username" }} -p'{{ key "style/thelook/db/password"}}' {{ key "style/thelook/db/name" }} > "${file}"
find /storage/backups/nomad/style-thelook/mysql/thelook-mysql* -ctime +3 -exec rm {} \; || true
if [ -s "$file" ]; then # check if file exists and is not empty
echo "Backup successful"
exit 0
else
rm $file
curl -H "Content-Type: application/json" -d \
'{"content": "# <@&585512338728419341> `MySQL` backup for **'"${job_name}"'** has just **FAILED**\nFile name: `'"$file"'`\nDate: `'"$(TZ=Europe/Dublin date)"'`\nTurn off this script with `nomad job stop '"${job_name}"'` \n\n## Remember to restart this backup job when fixed!!!"}' \
{{ key "mysql/webhook/discord" }}
fi
EOH
destination = "local/mysql-backup.sh"
}
}
}
}

257
jobs/socs/style-thelook.hcl Normal file
View file

@ -0,0 +1,257 @@
job "style-thelook" {
datacenters = ["aperture"]
type = "service"
meta {
domain = "thelookonline.dcu.ie"
}
group "thelook" {
count = 1
network {
mode = "bridge"
port "http" {
to = 80
}
port "fpm" {
to = 9000
}
port "db" {
to = 3306
}
port "redis" {
to = 6379
}
}
service {
name = "thelook-web"
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "5s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.thelook.rule=Host(`${NOMAD_META_domain}`) || Host(`style.redbrick.dcu.ie`)",
"traefik.http.routers.thelook.entrypoints=web,websecure",
"traefik.http.routers.thelook.tls.certresolver=lets-encrypt",
]
}
task "thelook-nginx" {
driver = "docker"
config {
image = "nginx:alpine"
ports = ["http"]
volumes = [
"local/nginx.conf:/etc/nginx/nginx.conf",
"/storage/nomad/style-thelook:/var/www/html/",
]
group_add = [82] # www-data in alpine
}
resources {
cpu = 200
memory = 100
}
template {
data = <<EOH
# user www-data www-data;
error_log /dev/stderr error;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
server_tokens off;
error_log /dev/stderr error;
access_log /dev/stdout;
charset utf-8;
server {
server_name {{ env "NOMAD_META_domain" }};
listen 80;
listen [::]:80;
root /var/www/html;
index index.php index.html index.htm;
client_max_body_size 5m;
client_body_timeout 60;
# NOTE: Not used here, WP super cache rule used instead
# Pass all folders to FPM
# location / {
# try_files $uri $uri/ /index.php?$args;
# }
# Pass the PHP scripts to FastCGI server
location ~ \.php$ {
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_pass {{ env "NOMAD_ADDR_fpm" }};
fastcgi_index index.php;
}
location ~ /\.ht {
deny all;
}
# WP Super Cache rules.
set $cache_uri $request_uri;
# POST requests and urls with a query string should always go to PHP
if ($request_method = POST) {
set $cache_uri 'null cache';
}
if ($query_string != "") {
set $cache_uri 'null cache';
}
# Don't cache uris containing the following segments
if ($request_uri ~* "(/wp-admin/|/xmlrpc.php|/wp-(app|cron|login|register|mail).php|wp-.*.php|/feed/|index.php|wp-comments-popup.php|wp-links-opml.php|wp-locations.php|sitemap(_index)?.xml|[a-z0-9_-]+-sitemap([0-9]+)?.xml)") {
set $cache_uri 'null cache';
}
# Don't use the cache for logged in users or recent commenters
if ($http_cookie ~* "comment_author|wordpress_[a-f0-9]+|wp-postpass|wordpress_logged_in") {
set $cache_uri 'null cache';
}
# Use cached or actual file if they exists, otherwise pass request to WordPress
location / {
try_files /wp-content/cache/supercache/$http_host/$cache_uri/index.html $uri $uri/ /index.php?$args ;
}
}
}
EOH
destination = "local/nginx.conf"
}
}
task "thelook-phpfpm" {
driver = "docker"
config {
image = "wordpress:php8.3-fpm-alpine"
ports = ["fpm"]
volumes = [
"/storage/nomad/style-thelook:/var/www/html/",
"local/custom.ini:/usr/local/etc/php/conf.d/custom.ini",
]
}
resources {
cpu = 800
memory = 500
}
template {
data = <<EOH
WORDPRESS_DB_HOST={{ env "NOMAD_ADDR_db" }}
WORDPRESS_DB_USER={{ key "style/thelook/db/username" }}
WORDPRESS_DB_PASSWORD={{ key "style/thelook/db/password" }}
WORDPRESS_DB_NAME={{ key "style/thelook/db/name" }}
WORDPRESS_TABLE_PREFIX=wp_
WORDPRESS_CONFIG_EXTRA="define('WP_REDIS_HOST', '{{ env "NOMAD_ADDR_redis" }}');"
EOH
destination = "local/.env"
env = true
}
template {
data = <<EOH
pm.max_children = 10
upload_max_filesize = 64M
post_max_size = 64M
EOH
destination = "local/custom.ini"
}
}
service {
name = "thelook-db"
port = "db"
}
task "thelook-db" {
driver = "docker"
config {
image = "mariadb"
ports = ["db"]
volumes = [
"/storage/nomad/style-thelook/db:/var/lib/mysql",
]
}
template {
data = <<EOH
[mysqld]
max_connections = 100
key_buffer_size = 2G
query_cache_size = 0
innodb_buffer_pool_size = 6G
innodb_log_file_size = 512M
innodb_flush_log_at_trx_commit = 1
innodb_flush_method = O_DIRECT
innodb_io_capacity = 200
tmp_table_size = 5242K
max_heap_table_size = 5242K
innodb_log_buffer_size = 16M
innodb_file_per_table = 1
bind-address = 0.0.0.0
# Logging
slow_query_log = 1
slow_query_log_file = /var/log/mysql/slow.log
long_query_time = 1
EOH
destination = "local/conf.cnf"
}
resources {
cpu = 800
memory = 800
}
template {
data = <<EOH
MYSQL_DATABASE={{ key "style/thelook/db/name" }}
MYSQL_USER={{ key "style/thelook/db/username" }}
MYSQL_PASSWORD={{ key "style/thelook/db/password" }}
MYSQL_RANDOM_ROOT_PASSWORD=yes
EOH
destination = "local/.env"
env = true
}
}
task "redis" {
driver = "docker"
config {
image = "redis:latest"
ports = ["redis"]
}
resources {
cpu = 200
}
}
}
}

View file

@ -0,0 +1,36 @@
job "urri-meetups-update" {
datacenters = ["aperture"]
type = "batch"
periodic {
crons = ["0 */6 * * * *"]
prohibit_overlap = true
}
group "urri-meetups-update" {
task "urri-meetups-update" {
driver = "raw_exec"
config {
command = "/bin/bash"
args = ["local/script.sh"]
}
template {
data = <<EOH
#!/bin/bash
# stop the urri-meetups job
nomad job stop urri-meetups
sleep 1
# revert the urri-meetups job to the previous version
# this will trigger a new deployment, which will pull the latest image
nomad job revert urri-meetups $(($(nomad job inspect urri-meetups | jq '.Job.Version')-1))
EOH
destination = "local/script.sh"
}
}
}
}

View file

@ -0,0 +1,47 @@
job "urri-meetups" {
datacenters = ["aperture"]
type = "service"
group "urri-meetups" {
count = 1
network {
port "http" {
to = 8000
}
}
service {
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "2s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.urri-meetups.rule=Host(`urri-meetups.rb.dcu.ie`)",
"traefik.http.routers.urri-meetups.entrypoints=web,websecure",
"traefik.http.routers.urri-meetups.tls.certresolver=lets-encrypt",
]
}
task "web" {
driver = "docker"
config {
image = "ghcr.io/haefae222/pizza_app:latest"
ports = ["http"]
force_pull = true
}
resources {
cpu = 1000
memory = 800
}
}
}
}

View file

@ -0,0 +1,61 @@
job "cands-room-bookings" {
datacenters = ["aperture"]
type = "service"
meta {
git-sha = ""
}
group "clubsandsocs-room-bookings" {
count = 1
network {
port "http" {
to = 5000
}
}
service {
port = "http"
check {
type = "http"
path = "/"
interval = "10s"
timeout = "2s"
}
tags = [
"traefik.enable=true",
"traefik.http.routers.clubsandsocs-room-bookings.rule=Host(`rooms.rb.dcu.ie`)",
"traefik.http.routers.clubsandsocs-room-bookings.entrypoints=web,websecure",
"traefik.http.routers.clubsandsocs-room-bookings.tls.certresolver=lets-encrypt",
]
}
task "web" {
driver = "docker"
config {
image = "ghcr.io/wizzdom/clubsandsocs-room-bookings:latest"
ports = ["http"]
force_pull = true
volumes = [
"local/.env:/app/.env"
]
}
template {
data = <<EOF
UPLOAD_FOLDER=uploads
SECRET_KEY={{ key "user-projects/wizzdom/clubsandsocs-room-bookings/secret" }}
EOF
destination = "local/.env"
}
resources {
cpu = 1000
memory = 800
}
}
}
}

View file

@ -38,7 +38,7 @@ alloc_id=$(nomad job status -verbose bastion-vm | grep running | tail -n 1 | cut
job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1) job_name=$(echo ${NOMAD_JOB_NAME} | cut -d "/" -f 1)
echo "Backing up alloc id: ${alloc_id} on: ${host} to ${path}/${file}..." echo "Backing up alloc id: ${alloc_id} on: ${host} to ${path}/${file}..."
scp -B -i {{ key "bastion-vm/service/key" }} {{ key "bastion-vm/service/user" }}@${host}:/opt/nomad/alloc/${alloc_id}/bastion-vm/local/bastion-vm.qcow2 ${path}/${file} ssh -i {{ key "bastion-vm/service/key" }} {{ key "bastion-vm/service/user" }}@${host} "sudo cat /opt/nomad/alloc/${alloc_id}/bastion-vm/local/bastion-vm.qcow2" > ${path}/${file}
find ${path}/bastion-vm-* -ctime +2 -exec rm {} \; || true find ${path}/bastion-vm-* -ctime +2 -exec rm {} \; || true