Compare commits
17 Commits
elastic_se
...
cb8ccd8f00
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb8ccd8f00 | ||
|
|
02168225b1 | ||
|
|
6ff1ccecd0 | ||
|
|
de62327fde | ||
|
|
b70c8408dc | ||
|
|
a913e1cbc0 | ||
|
|
e3c67a32e9 | ||
|
|
8f2998abc0 | ||
|
|
7fcee3912f | ||
|
|
591342f580 | ||
|
|
f2ea03bc01 | ||
|
|
0e8e07ed3e | ||
|
|
a2a58f6343 | ||
|
|
42196a32dc | ||
|
|
6934a9f5fc | ||
|
|
27621aac03 | ||
|
|
56f058c254 |
31
.ansible-lint
Normal file
31
.ansible-lint
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
# .ansible-lint
|
||||
|
||||
# Specify exclude paths to prevent linting vendor roles, etc.
|
||||
exclude_paths:
|
||||
- ./.git/
|
||||
- ./.venv/
|
||||
- ./galaxy_roles/
|
||||
|
||||
# A list of rules to skip. This is a more modern and readable alternative to 'skip_list'.
|
||||
skip_list:
|
||||
- experimental
|
||||
- fqcn-builtins
|
||||
- no-handler
|
||||
- var-naming
|
||||
|
||||
# Enforce certain rules that are not enabled by default.
|
||||
enable_list:
|
||||
- no-free-form
|
||||
- var-spacing
|
||||
- no-log-password
|
||||
- no-relative-path
|
||||
- command-instead-of-module
|
||||
- fqcn[deep]
|
||||
- no-changed-when
|
||||
|
||||
# Offline mode disables any features that require internet access.
|
||||
offline: true
|
||||
|
||||
# Set the desired verbosity level.
|
||||
verbosity: 1
|
||||
17
.editorconfig
Normal file
17
.editorconfig
Normal file
@@ -0,0 +1,17 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{yml,yaml}]
|
||||
indent_size = 2
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
13
README.md
13
README.md
@@ -72,3 +72,16 @@ sudo vgextend k3s-vg /dev/sda3
|
||||
# Use the newly available storage in the root volume
|
||||
sudo lvresize -l +100%FREE -r /dev/k3s-vg/root
|
||||
```
|
||||
|
||||
## Cloud Init VMs
|
||||
|
||||
```sh
|
||||
# On Hypervisor Host
|
||||
qm resize <vmid> scsi0 +32G
|
||||
# On VM
|
||||
sudo fdisk -l /dev/sda # To check
|
||||
echo 1 | sudo tee /sys/class/block/sda/device/rescan
|
||||
sudo fdisk -l /dev/sda # To check
|
||||
# sudo apt-get install cloud-guest-utils
|
||||
sudo growpart /dev/sda 1
|
||||
```
|
||||
|
||||
31
Vagrantfile
vendored
31
Vagrantfile
vendored
@@ -1,31 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "bento/ubuntu-24.04"
|
||||
config.vm.box_version = "202404.26.0"
|
||||
|
||||
# Configure VM provider resources (optional)
|
||||
config.vm.provider :virtualbox do |v|
|
||||
v.memory = 4096
|
||||
v.cpus = 2
|
||||
end
|
||||
|
||||
|
||||
config.vm.define "test" do |v|
|
||||
v.vm.hostname = "test"
|
||||
v.vm.network :private_network, ip: "192.168.56.123"
|
||||
|
||||
v.vm.provision "bootstrap", type: "shell" do |s|
|
||||
s.inline = "sudo apt install ansible -y"
|
||||
end
|
||||
#
|
||||
# Use Ansible for provisioning
|
||||
v.vm.provision "ansible" do |ansible|
|
||||
ansible.playbook = "playbook.yml" # Path to the Ansible playbook relative to the Vagrantfile
|
||||
ansible.inventory_path = "inventory" # Path to the inventory file
|
||||
# Extra vars can be defined if needed
|
||||
# ansible.extra_vars = { some_var: "value" }
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
@@ -1,9 +1,12 @@
|
||||
[defaults]
|
||||
# (string) Path to the Python interpreter to be used for module execution on remote targets, or an automatic discovery mode. Supported discovery modes are ``auto`` (the default), ``auto_silent``, ``auto_legacy``, and ``auto_legacy_silent``. All discovery modes employ a lookup table to use the included system Python (on distributions known to include one), falling back to a fixed ordered list of well-known Python interpreter locations if a platform-specific default is not available. The fallback behavior will issue a warning that the interpreter should be set explicitly (since interpreters installed later may change which one is used). This warning behavior can be disabled by setting ``auto_silent`` or ``auto_legacy_silent``. The value of ``auto_legacy`` provides all the same behavior, but for backwards-compatibility with older Ansible releases that always defaulted to ``/usr/bin/python``, will use that interpreter if present.
|
||||
interpreter_python=python3
|
||||
|
||||
# (pathspec) Colon separated paths in which Ansible will search for Roles.
|
||||
roles_path=./roles
|
||||
|
||||
# (pathlist) Comma separated list of Ansible inventory sources
|
||||
inventory=./inventory/production
|
||||
inventory=./production.ini
|
||||
|
||||
# (path) The vault password file to use. Equivalent to --vault-password-file or --vault-id
|
||||
# If executable, it will be run and the resulting stdout will be used as the password.
|
||||
|
||||
@@ -1,56 +1,14 @@
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
34623331393561623539666362643966336661326136363431666465356535343663376236663066
|
||||
3235363061633666626133313363373336656438633566630a383230393161323862303863656464
|
||||
61633861323966343263363466343130306635343539326464363637383139343033656130336464
|
||||
3163373535613961340a643335626165306663363063656339653862393533633534366331336231
|
||||
63393432383731633463323164333831313535373261336166326237306230326465616239306536
|
||||
37663863663161393130373835373062393866633864373465333937633838303130386334356566
|
||||
64303663303862623038646235303934376230393538353466393232363764366339616633343433
|
||||
65343730663864393766313134653335396562646135306637613031333461613965666465376532
|
||||
32643261626665396338313836633337383932616265613662383132303539623239623965333966
|
||||
66333638643635313262616434396164313833303065303662303736303232346535613834643435
|
||||
32316434343231363662393163353832393166643739396165313631363539663439316133616361
|
||||
61623830613035396333303363383332653736666231343763353666356539633433373066613330
|
||||
65656631343764323234333161636632616130353139626362343361386535313336666566636464
|
||||
35323434656439346262336335383366626565333765343562633236636132636532333761663535
|
||||
31383565313436633438633336306430343733663539666631386532313836623166356332626664
|
||||
39653762353265643861633237326662383466373539633732323833376238383963393837636466
|
||||
66656631666131623166393731643537393161303636353932653062363137376334356238643064
|
||||
34303666656638396263336639636135393536623037666137653132633264316431656438386432
|
||||
34333632616265343435306365373039653036353337633563393739653632656163316636363336
|
||||
32346638393364353634386231616639386164326531353134366639653837653236333030666139
|
||||
64656334336231636337656233383834343763393738643362626665333362353335656131653165
|
||||
35376330336433383262653039643131313437643265343663626363373439643932643063646439
|
||||
37663630363839643263373630646430386536346132383564396463376361343661346661333636
|
||||
39643961643031626462363537633263393838363262626439313838313039373035373634633462
|
||||
38363938343932626131343966616638323632303636383034383536616164393539343635666166
|
||||
39383434313863356434383961383139623436636230323866396366326665623863336438623335
|
||||
33346634303639643131333933363838666336306438646335343931366437326462376438663837
|
||||
34353938343837663930356464373332356530643231653166616331376335643832316365303164
|
||||
32393062313638393936393863613731363233376537323834623164613231393133353635623866
|
||||
35626337336562653265613730363961633662653331663966333430343462666535306133663835
|
||||
64663539303765366331613666653632313233626231313264346332323266653230323332373836
|
||||
33303564633464333064613431383230383535633362373839323334353162623433646230393838
|
||||
33306162613739393338373361616634396636313765326465393332396537613263383339626666
|
||||
63613162616363363138323965373966353366323463313934356530663931653565656164346363
|
||||
37633862366436623030303233396639393434336438623433383530393836626164353064366432
|
||||
35303532393437316162346366346636633135383938323631316563323935383561326335323438
|
||||
30613266643232656138663431666162663330643133643263343237663565323231316239633037
|
||||
39323732386236396136633539383335646634306139643533666636633131623566333137376236
|
||||
39616134306463613864353135313636343365643437323465643862303137663937376233306261
|
||||
31383862356535646563383438396363323838613237623034656561396163376433663262366137
|
||||
63323562346633303162666530616534386539383238366139376263326265343138373139393432
|
||||
35643335363139373139666230626363386232316536306431653964376333366235303763336135
|
||||
65623231336638643034373932376263636336653561646664366138643031316438316465353363
|
||||
38386539363631393433313664323135646562313537376236653635303263633230383866653039
|
||||
66636534336234363438363139366531653237323137613961383831376665626365393462363834
|
||||
36333965366463636233643433616431376436323535396238363933326363333661326462353161
|
||||
66626435373938633832393662313161663336613862343332643766333633653866316464653735
|
||||
31356135363662633961386264613836323435323836386635336338353663333137336666323531
|
||||
36663731336664633763633634613136663866363530613264356431326539316530326161313362
|
||||
62616539356537353261343464356334636134396664353463623163313765633432653932346136
|
||||
32326239373333643461333733646264353238356134613037663836643131316664653539643839
|
||||
30613235623933356565336630323939633266613164306262386666363137666661666131613962
|
||||
61623930663536646462343264336535353634373833316537613839396566376466653736333830
|
||||
33376663613063326230346439626237373232656665633832373364653931663361666432303166
|
||||
663564323132383864336332363139393534
|
||||
65646664663537386235383334613536393336623332363437376337323235636335363165366632
|
||||
3433623633393731373932306433643663333133393734370a353261353164353335356264643234
|
||||
65376132336534306465376435303764616136646633303166336136373263346436353235343065
|
||||
6238353863333239330a303131623262353563323864323536313036356237653936326361366565
|
||||
62616566396266363535653062636537383061363438303138333237643939323162336465326363
|
||||
64323830393839386233303634326562386537373766646461376238663963376463623130303363
|
||||
65366638666132393538336361663639303831333232336632616338396539353565663239373265
|
||||
38323036343733303131383439323738623263383736303935636339303564343662633437626233
|
||||
33303564373963646465306137346161656166366266663766356362636362643430393232646635
|
||||
38363764386538613166306464336532623464343565396431643738353434313838633763663861
|
||||
35616365383831643434316436313035366131663131373064663464393031623132366137303333
|
||||
62333561373465323664303539353966663763613365373633373761343966656166363265313134
|
||||
6163
|
||||
|
||||
@@ -17,20 +17,4 @@ internal_domain: seyshiro.de
|
||||
# Packages
|
||||
#
|
||||
|
||||
common_packages:
|
||||
- build-essential
|
||||
- curl
|
||||
- git
|
||||
- iperf3
|
||||
- neovim
|
||||
- rsync
|
||||
- smartmontools
|
||||
- sudo
|
||||
- systemd-timesyncd
|
||||
- tree
|
||||
- screen
|
||||
- bat
|
||||
- fd-find
|
||||
- ripgrep
|
||||
|
||||
arch: "{{ 'arm64' if ansible_architecture == 'aarch64' else 'amd64' }}"
|
||||
|
||||
502
group_vars/docker/docker.yml
Normal file
502
group_vars/docker/docker.yml
Normal file
@@ -0,0 +1,502 @@
|
||||
docker:
|
||||
url: "https://download.docker.com/linux"
|
||||
apt_release_channel: "stable"
|
||||
directories:
|
||||
local: "/opt/local/"
|
||||
config: "/opt/docker/config/"
|
||||
compose: "/opt/docker/compose/"
|
||||
|
||||
services:
|
||||
- name: status
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: kuma
|
||||
image: louislam/uptime-kuma:1.23.16
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /app/data
|
||||
external: "{{ docker.directories.local }}/kuma/"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 3001
|
||||
external: "{{ services_external_http.kuma }}"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: plex
|
||||
vm:
|
||||
- docker-host10
|
||||
container_name: plex
|
||||
image: lscr.io/linuxserver/plex:1.41.5
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/plex/config/"
|
||||
- name: "TV Series"
|
||||
internal: /tv:ro
|
||||
external: /media/series
|
||||
- name: "Movies"
|
||||
internal: /movies:ro
|
||||
external: /media/movies
|
||||
- name: "Music"
|
||||
internal: /music:ro
|
||||
external: /media/songs
|
||||
devices:
|
||||
- name: "Graphics Card"
|
||||
internal: /dev/dri
|
||||
external: /dev/dri
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 32400
|
||||
external: "{{ services_external_http.plex }}"
|
||||
- name: ""
|
||||
internal: 1900
|
||||
external: 1900
|
||||
- name: ""
|
||||
internal: 3005
|
||||
external: 3005
|
||||
- name: ""
|
||||
internal: 5353
|
||||
external: 5353
|
||||
- name: ""
|
||||
internal: 32410
|
||||
external: 32410
|
||||
- name: ""
|
||||
internal: 8324
|
||||
external: 8324
|
||||
- name: ""
|
||||
internal: 32412
|
||||
external: 32412
|
||||
- name: ""
|
||||
internal: 32469
|
||||
external: 32469
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- VERSION=docker
|
||||
- name: jellyfin
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: jellyfin
|
||||
image: jellyfin/jellyfin:10.10
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/jellyfin/config"
|
||||
- name: "Cache"
|
||||
internal: /cache
|
||||
external: "{{ docker.directories.config }}/jellyfin/cache"
|
||||
- name: "Tv Series"
|
||||
internal: /tv:ro
|
||||
external: /media/series
|
||||
- name: "Music"
|
||||
internal: /movies:ro
|
||||
external: /media/movies
|
||||
- name: "Music"
|
||||
internal: /music:ro
|
||||
external: /media/songs
|
||||
devices:
|
||||
- name: "Graphics Card"
|
||||
internal: /dev/dri
|
||||
external: /dev/dri
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8096
|
||||
external: "{{ services_external_http.jellyfin }}"
|
||||
environment:
|
||||
- name: hass
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: homeassistant
|
||||
image: "ghcr.io/home-assistant/home-assistant:stable"
|
||||
privileged: true
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config/
|
||||
external: "{{ docker.directories.local }}/home-assistant/config/"
|
||||
- name: "Local Time"
|
||||
internal: /etc/localtime:ro
|
||||
external: /etc/localtime
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8123
|
||||
external: "{{ services_external_http.hass }}"
|
||||
- name: ""
|
||||
internal: 4357
|
||||
external: 4357
|
||||
- name: ""
|
||||
internal: 5683
|
||||
external: 5683
|
||||
- name: ""
|
||||
internal: 5683
|
||||
external: 5683
|
||||
- name: ddns
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: ddns-updater
|
||||
image: qmcgaw/ddns-updater:2
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /updater/data/
|
||||
external: "{{ docker.directories.local }}/ddns-updater/data/"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8000
|
||||
external: "{{ services_external_http.ddns }}"
|
||||
- name: sonarr
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: sonarr
|
||||
image: linuxserver/sonarr:4.0.14
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/sonarr/config"
|
||||
- name: "Tv Series"
|
||||
internal: /tv
|
||||
external: /media/series
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/sonarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8989
|
||||
external: "{{ services_external_http.sonarr }}"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: radarr
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: radarr
|
||||
image: linuxserver/radarr:5.21.1
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/radarr/config"
|
||||
- name: "Movies"
|
||||
internal: /movies
|
||||
external: /media/movies
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/radarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 7878
|
||||
external: "{{ services_external_http.radarr }}"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: lidarr
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: lidarr
|
||||
image: linuxserver/lidarr:2.10.3
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/lidarr/config"
|
||||
- name: "Music"
|
||||
internal: /music
|
||||
external: /media/songs
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/lidarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8686
|
||||
external: "{{ services_external_http.lidarr }}"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: prowlarr
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: prowlarr
|
||||
image: linuxserver/prowlarr:1.32.2
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/prowlarr/config"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 9696
|
||||
external: "{{ services_external_http.prowlarr }}"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: paperless
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: paperless
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:2.14
|
||||
depends_on:
|
||||
- paperless-postgres
|
||||
- paperless-redis
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /usr/src/paperless/data
|
||||
external: "{{ docker.directories.local }}/paperless/data/data"
|
||||
- name: "Media"
|
||||
internal: /usr/src/paperless/media
|
||||
external: "{{ docker.directories.local }}/paperless/data/media"
|
||||
- name: "Document Export"
|
||||
internal: /usr/src/paperless/export
|
||||
external: "{{ docker.directories.local }}/paperless/data/export"
|
||||
- name: "Document Consume"
|
||||
internal: /usr/src/paperless/consume
|
||||
external: "{{ docker.directories.local }}/paperless/data/consume"
|
||||
environment:
|
||||
- "PAPERLESS_REDIS=redis://paperless-redis:6379"
|
||||
- "PAPERLESS_DBHOST=paperless-postgres"
|
||||
- "PAPERLESS_DBUSER=paperless"
|
||||
- "PAPERLESS_DBPASS={{ vault.docker.paperless.dbpass }}"
|
||||
- "USERMAP_UID=1000"
|
||||
- "USERMAP_GID=1000"
|
||||
- "PAPERLESS_URL=https://paperless.{{ domain }}"
|
||||
- "PAPERLESS_TIME_ZONE=Europe/Berlin"
|
||||
- "PAPERLESS_OCR_LANGUAGE=deu"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8000
|
||||
external: "{{ services_external_http.paperless }}"
|
||||
sub_service:
|
||||
- name: postgres
|
||||
version: 15
|
||||
username: paperless
|
||||
password: "{{ vault.docker.paperless.dbpass }}"
|
||||
- name: redis
|
||||
version: 7
|
||||
- name: pdf
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: stirling
|
||||
image: frooodle/s-pdf:0.45.0
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8080
|
||||
external: "{{ services_external_http.pdf }}"
|
||||
- name: git
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: gitea
|
||||
image: gitea/gitea:1.23-rootless
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /etc/gitea
|
||||
external: "{{ docker.directories.local }}/gitea/config"
|
||||
- name: "Data"
|
||||
internal: /var/lib/gitea
|
||||
external: "{{ docker.directories.local }}/gitea/data"
|
||||
- name: "Time Zone"
|
||||
internal: /etc/timezone:ro
|
||||
external: /etc/timezone
|
||||
- name: "Local Time"
|
||||
internal: /etc/localtime:ro
|
||||
external: /etc/localtime
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 3000
|
||||
external: "{{ services_external_http.git }}"
|
||||
- name: "ssh"
|
||||
internal: 2222
|
||||
external: 2222
|
||||
environment:
|
||||
- USER_UID=1000
|
||||
- USER_GID=1000
|
||||
- name: changedetection
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: changedetection
|
||||
image: dgtlmoon/changedetection.io:0.49
|
||||
healthcheck: curl
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /datastore
|
||||
external: "{{ docker.directories.local }}/changedetection/data/"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 5000
|
||||
external: "{{ services_external_http.changedetection }}"
|
||||
- name: gluetun
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: gluetun
|
||||
image: qmcgaw/gluetun:v3.40
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
devices:
|
||||
- name: "Tunnel"
|
||||
internal: /dev/net/tun
|
||||
external: /dev/net/tun
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /gluetun
|
||||
external: "{{ docker.directories.local }}/gluetun/config"
|
||||
ports:
|
||||
- name: "Qbit Client"
|
||||
internal: 8082
|
||||
external: 8082
|
||||
- name: "Torrentleech Client"
|
||||
internal: 8083
|
||||
external: 8083
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- VPN_SERVICE_PROVIDER=protonvpn
|
||||
- UPDATER_VPN_SERVICE_PROVIDERS=protonvpn
|
||||
- UPDATER_PERIOD=24h
|
||||
- "SERVER_COUNTRIES={{ vault.docker.proton.country }}"
|
||||
- "OPENVPN_USER={{ vault.docker.proton.openvpn_user }}"
|
||||
- "OPENVPN_PASSWORD={{ vault.docker.proton.openvpn_password }}"
|
||||
- name: torrentleech
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: torrentleech
|
||||
image: qbittorrentofficial/qbittorrent-nox
|
||||
depends_on:
|
||||
- gluetun
|
||||
network_mode: "container:gluetun"
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/torrentleech/config"
|
||||
- name: "Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: proxy_only
|
||||
external: 8083
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- QBT_EULA="accept"
|
||||
- QBT_WEBUI_PORT="8083"
|
||||
- name: qbit
|
||||
vm:
|
||||
- docker-host12
|
||||
container_name: qbit
|
||||
image: qbittorrentofficial/qbittorrent-nox:5.0.4-1
|
||||
depends_on:
|
||||
- gluetun
|
||||
network_mode: "container:gluetun"
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.local }}/qbit/config"
|
||||
- name: "Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: proxy_only
|
||||
external: 8082
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- QBT_EULA="accept"
|
||||
- QBT_WEBUI_PORT="8082"
|
||||
- name: cadvisor
|
||||
vm:
|
||||
- docker-host12
|
||||
- docker-host10
|
||||
- docker-host01
|
||||
container_name: cadvisor
|
||||
image: gcr.io/cadvisor/cadvisor:v0.52.1
|
||||
ports:
|
||||
- name: ""
|
||||
internal: 8080
|
||||
external: 8081
|
||||
volumes:
|
||||
- name: "Root"
|
||||
internal: /rootfs:ro
|
||||
external: /
|
||||
- name: "Run"
|
||||
internal: /var/run:rw
|
||||
external: /var/run
|
||||
- name: "System"
|
||||
internal: /sys:ro
|
||||
external: /sys
|
||||
- name: "Docker"
|
||||
internal: /var/lib/docker:ro
|
||||
external: /var/lib/docker
|
||||
- name: karakeep
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: karakeep
|
||||
image: ghcr.io/karakeep-app/karakeep:0.23.2
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 3000
|
||||
external: "{{ services_external_http.karakeep }}"
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /data
|
||||
external: "{{ docker.directories.local }}/karakeep/config"
|
||||
environment:
|
||||
- MEILI_ADDR=http://karakeep-meilisearch:7700
|
||||
- BROWSER_WEB_URL=http://karakeep-chrome:9222
|
||||
- NEXTAUTH_SECRET={{ vault.docker.karakeep.nextauth_secret }}
|
||||
- MEILI_MASTER_KEY={{ vault.docker.karakeep.meili_master_key }}
|
||||
- NEXTAUTH_URL=https://karakeep.tudattr.dev/
|
||||
- OPENAI_API_KEY={{ vault.docker.karakeep.openai_key }}
|
||||
- DATA_DIR=/data
|
||||
- DISABLE_SIGNUPS=true
|
||||
sub_service:
|
||||
- name: meilisearch
|
||||
version: v1.11.1
|
||||
nextauth_secret: "{{ vault.docker.karakeep.nextauth_secret }}"
|
||||
meili_master_key: "{{ vault.docker.karakeep.meili_master_key }}"
|
||||
openai_key: "{{ vault.docker.karakeep.openai_key }}"
|
||||
- name: chrome
|
||||
version: 123
|
||||
- name: keycloak
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: keycloak
|
||||
image: quay.io/keycloak/keycloak:26.2
|
||||
depends_on:
|
||||
- keycloak-postgres
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8080
|
||||
external: "{{ services_external_http.keycloak }}"
|
||||
volumes:
|
||||
- name: "config"
|
||||
internal: /opt/keycloak/data/import/homelab-realm.json
|
||||
external: "{{ docker.directories.local }}/keycloak/homelab-realm.json"
|
||||
- name: "config"
|
||||
internal: /opt/keycloak/data/import/master-realm.json
|
||||
external: "{{ docker.directories.local }}/keycloak/master-realm.json"
|
||||
command:
|
||||
- "start"
|
||||
- "--import-realm"
|
||||
environment:
|
||||
- KC_DB=postgres
|
||||
- KC_DB_URL=jdbc:postgresql://keycloak-postgres:5432/keycloak
|
||||
- KC_DB_USERNAME={{ keycloak_config.database.username }}
|
||||
- KC_DB_PASSWORD={{ keycloak_config.database.password }}
|
||||
- KC_HOSTNAME=keycloak.{{ internal_domain }}
|
||||
- KC_HTTP_ENABLED=true
|
||||
- KC_HTTP_RELATIVE_PATH=/
|
||||
- KC_PROXY=edge
|
||||
- KC_PROXY_HEADERS=xforwarded
|
||||
- KC_HOSTNAME_URL=https://keycloak.{{ internal_domain }}
|
||||
- KC_HOSTNAME_ADMIN_URL=https://keycloak.{{ internal_domain }}
|
||||
- KC_BOOTSTRAP_ADMIN_USERNAME=serviceadmin-{{ keycloak_admin_hash }}
|
||||
- KC_BOOTSTRAP_ADMIN_PASSWORD={{ vault.docker.keycloak.admin.password }}
|
||||
sub_service:
|
||||
- name: postgres
|
||||
version: 17
|
||||
username: "{{ keycloak_config.database.username }}"
|
||||
password: "{{ keycloak_config.database.password }}"
|
||||
61
group_vars/docker/keycloak.yml
Normal file
61
group_vars/docker/keycloak.yml
Normal file
@@ -0,0 +1,61 @@
|
||||
keycloak_admin_hash: "{{ vault.docker.keycloak.admin.hash }}"
|
||||
|
||||
keycloak_realms: "{{ keycloak_config.realms }}"
|
||||
|
||||
keycloak_config:
|
||||
database:
|
||||
db_name: keycloak
|
||||
username: keycloak
|
||||
password: "{{ vault.docker.keycloak.database.password }}"
|
||||
realms:
|
||||
- realm: homelab
|
||||
display_name: "Homelab Realm"
|
||||
users:
|
||||
- username: tudattr
|
||||
password: "{{ vault.docker.keycloak.user.password }}"
|
||||
realm_roles:
|
||||
- offline_access
|
||||
- uma_authorization
|
||||
client_roles:
|
||||
account:
|
||||
- view-profile
|
||||
- manage-account
|
||||
admin:
|
||||
username: "serviceadmin-{{ keycloak_admin_hash }}"
|
||||
password: "{{ vault.docker.keycloak.admin.password }}"
|
||||
realm_roles:
|
||||
- offline_access
|
||||
- uma_authorization
|
||||
- admin
|
||||
client_roles:
|
||||
realm_management:
|
||||
- realm-admin
|
||||
account:
|
||||
- view-profile
|
||||
- manage-account
|
||||
roles:
|
||||
realm:
|
||||
- name: admin
|
||||
description: "Administrator role for the homelab realm"
|
||||
default_roles:
|
||||
- offline_access
|
||||
- uma_authorization
|
||||
- realm: master
|
||||
display_name: "master"
|
||||
admin:
|
||||
username: "serviceadmin-{{ keycloak_admin_hash }}"
|
||||
password: "{{ vault.docker.keycloak.admin.password }}"
|
||||
realm_roles:
|
||||
- offline_access
|
||||
- uma_authorization
|
||||
- create-realm
|
||||
- admin
|
||||
client_roles:
|
||||
realm_management:
|
||||
- realm-admin
|
||||
account:
|
||||
- view-profile
|
||||
- manage-account
|
||||
roles:
|
||||
realm: []
|
||||
default_roles: []
|
||||
18
group_vars/docker/port_mapping.yml
Normal file
18
group_vars/docker/port_mapping.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
services_external_http:
|
||||
kuma: 3001
|
||||
plex: 32400
|
||||
jellyfin: 8096
|
||||
hass: 8123
|
||||
ddns: 8001
|
||||
sonarr: 8989
|
||||
radarr: 7878
|
||||
lidarr: 8686
|
||||
prowlarr: 9696
|
||||
paperless: 8000
|
||||
pdf: 8080
|
||||
git: 3000
|
||||
changedetection: 5000
|
||||
torrentleech: 8083
|
||||
qbit: 8082
|
||||
karakeep: 3002
|
||||
keycloak: 3003
|
||||
65
group_vars/docker/secrets.yml
Normal file
65
group_vars/docker/secrets.yml
Normal file
@@ -0,0 +1,65 @@
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
62353938306631616432613936343031386266643837393733336533306532643332383761336462
|
||||
3566663762343161373266353236323532666562383031310a663661316264313737633732313166
|
||||
35336535353964646238393563333339646634346532633130633364343864363565353461616663
|
||||
6336343138623762320a366132383634383231316130643535313465356238343534656237626362
|
||||
38373439663730353739386636313865336262363864323633343839636434353261313432386135
|
||||
33343438663564323465373435613765306538633339303362656163636237643661623637376135
|
||||
65346465303530663161356666333062326536313135363536313237616564363838326339646162
|
||||
62323066626431376231386432333766366434326239303734353036396433333662333733373830
|
||||
66336433643032636166306332323063393333363734326333363936303033396336626135363832
|
||||
30636136656235376163613033616563663663633161643937666537333066343135326138643663
|
||||
64646638393364376466306438383337383231303637313366333638393939373739646338353036
|
||||
62303162383362393830316163303236336236363531333665353163373530323063313164656562
|
||||
33383561613530346561336166653536393137346630333262633738383838383338643761666463
|
||||
61303239636631646634373266303930343437636464326132316534616261376137396233653265
|
||||
39383137666533613739363764643162663361333465386332383964343534646537343065343833
|
||||
66643938623734643537313866316335396135613239393262613562356332663861646261373630
|
||||
34373939663239646534396638636265303438386239636439663635313665613634373832313237
|
||||
62306366633139333937646534393765663130396466346161376235656461346638323063353662
|
||||
64386466373433376133343266396537656435333831356531346531653262396330346238623431
|
||||
61303466366161336664333239663066643232623532643933373661663266366639646139666636
|
||||
62393532643535656566643862353337333533633861396164643766316637393638363662653863
|
||||
32643566333961663065383636383436666137356237643634326464636463303530306466616635
|
||||
36366365636337366335333630306237356366306535613464636463373063653861623464323764
|
||||
62336139653361376239303632326431643231346137333835356362333962613039643332373166
|
||||
32316234376431376136666161383039633035356636626664376137323630323966646161313664
|
||||
38623463376366623430663363663662303166636165646138363631643261376137336636636663
|
||||
61656631393963353066333930303932653730613431366131616233363662316139663038336538
|
||||
36383532316162356235373566313832323131326466363734613438323233353330613561383435
|
||||
39623435366236306431636232323838386462346464653561653638346338613833613133373133
|
||||
38626364643738373938336237323836646532356539643933333730353333626138646239633234
|
||||
66316563306230636139323335323665646462343861393366666462623966376431393438376134
|
||||
37376339356430316235633337376462666439643430303062656538386630613763623433646133
|
||||
65663530626533663266623861326431633137363466346634656634623166623331306636616666
|
||||
31643761343632336531356566636165363737646639326533386333646434393736643934643064
|
||||
39393039346639353439653766326138613164343030306436383461663636346534346365333265
|
||||
66653535623962653762633934646131653334363232636634303130306632383263373161363462
|
||||
35323133616665366238353535346561323834353634613730613439643536376337353234313337
|
||||
61623264616433336532383533376631396438313739616462323064613665396638333438306336
|
||||
34633338366235336131303462346665663464376334353431343363336662356335356562366532
|
||||
64366461623864633238666339346138663931363331613463333762336230313530613235303766
|
||||
34313064383461623230383730623731323533326663613565646436303230653264323061616536
|
||||
38636162356164656432626433373864326264623063343662323563366133363336313739326137
|
||||
31326164646364613865396534626533616366613565303032636637366435326336396464313232
|
||||
66393538393862616466313833326666316231393130666238636130613339663664393434613732
|
||||
65383363323138343335393636626138303561613532306131666334346631336333336639626466
|
||||
38343337346566346334383934306433366239666662346463666166643338613264636563653434
|
||||
36306338313363636665333763323135386165313939336432636339613432323736326635303162
|
||||
36656234656563376633373333633430313430333834623964653530626539333265363563376239
|
||||
33633430396338663063383338333732356532313435613737393465323431393035356136306166
|
||||
62633035653731636361396235613162643332393233326434353831613731373333326464326234
|
||||
36366166633437356336616166306164343636623962623136653861333866393039653939333037
|
||||
31343261663534356530373233336165326134613961616331316531313435386464396438363838
|
||||
31353935666566326630373336376438326366623537356536653564303066343837653030373962
|
||||
30393363336232646662663166326166386636356466616165376435623031666664373664623330
|
||||
31613030616162303732353738386434666566386138373238363732303138316533356435656662
|
||||
38636136353134303166636438663036363834663639613464376662666364386635333138353035
|
||||
39363236653336386332313930306663366130303836333664363335386331636431623036336535
|
||||
32366339386539306364343065323263366563643663623731643866346232653838333561336331
|
||||
36363030383263666137393035356331323038316239356637303665653164363739313664396235
|
||||
32366231613532323865623861636263383731303164366333303636356633323161653635393830
|
||||
38616139656264393932353332303264393038396663663236353838343432373965663561333531
|
||||
36363432323362643634623030356539396562633238653732313739616464643436666130633364
|
||||
37383764623938626332316630636630343236663338323661333933333730333630353061653061
|
||||
62656233653439353438
|
||||
@@ -1,532 +1,8 @@
|
||||
docker:
|
||||
url: "https://download.docker.com/linux"
|
||||
apt_release_channel: "stable"
|
||||
directories:
|
||||
config: "/opt/docker/config/"
|
||||
compose: "/opt/docker/compose/"
|
||||
media: "/media/docker/data/"
|
||||
|
||||
caddy:
|
||||
admin_email: me+acme@tudattr.dev
|
||||
|
||||
domain: "seyshiro.de"
|
||||
domain: "{{ internal_domain }}"
|
||||
|
||||
elk_version: 8.17.0
|
||||
|
||||
services:
|
||||
- name: syncthing
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: syncthing
|
||||
image: syncthing/syncthing
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /var/syncthing/
|
||||
external: /media/docker/data/syncthing/
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8384
|
||||
external: 8384
|
||||
- name: ""
|
||||
internal: 22000
|
||||
external: 22000
|
||||
- name: ""
|
||||
internal: 22000
|
||||
external: 22000
|
||||
- name: ""
|
||||
internal: 21027
|
||||
external: 21027
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: status
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: kuma
|
||||
image: louislam/uptime-kuma:1
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /app/data
|
||||
external: /opt/local/kuma/
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 3001
|
||||
external: 3001
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: plex
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: plex
|
||||
image: lscr.io/linuxserver/plex:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/plex/config/
|
||||
- name: "TV Series"
|
||||
internal: /tv:ro
|
||||
external: /media/series
|
||||
- name: "Movies"
|
||||
internal: /movies:ro
|
||||
external: /media/movies
|
||||
- name: "Music"
|
||||
internal: /music:ro
|
||||
external: /media/songs
|
||||
devices:
|
||||
- name: "Graphics Card"
|
||||
internal: /dev/dri
|
||||
external: /dev/dri
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 32400
|
||||
external: 32400
|
||||
- name: ""
|
||||
internal: 1900
|
||||
external: 1900
|
||||
- name: ""
|
||||
internal: 3005
|
||||
external: 3005
|
||||
- name: ""
|
||||
internal: 5353
|
||||
external: 5353
|
||||
- name: ""
|
||||
internal: 32410
|
||||
external: 32410
|
||||
- name: ""
|
||||
internal: 8324
|
||||
external: 8324
|
||||
- name: ""
|
||||
internal: 32412
|
||||
external: 32412
|
||||
- name: ""
|
||||
internal: 32469
|
||||
external: 32469
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- VERSION=docker
|
||||
- name: jellyfin
|
||||
vm:
|
||||
- docker-host02
|
||||
container_name: jellyfin
|
||||
image: jellyfin/jellyfin
|
||||
restart: "unless-stopped"
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/jellyfin/config
|
||||
- name: "Cache"
|
||||
internal: /cache
|
||||
external: "{{ docker.directories.config }}/jellyfin/cache"
|
||||
- name: "Tv Series"
|
||||
internal: /tv:ro
|
||||
external: /media/series
|
||||
- name: "Music"
|
||||
internal: /movies:ro
|
||||
external: /media/movies
|
||||
- name: "Music"
|
||||
internal: /music:ro
|
||||
external: /media/songs
|
||||
devices:
|
||||
- name: "Graphics Card"
|
||||
internal: /dev/dri
|
||||
external: /dev/dri
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8096
|
||||
external: 8096
|
||||
environment:
|
||||
- name: hass
|
||||
vm:
|
||||
- docker-host02
|
||||
container_name: homeassistant
|
||||
image: "ghcr.io/home-assistant/home-assistant:stable"
|
||||
restart: unless-stopped
|
||||
privileged: true
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config/
|
||||
external: /opt/local/home-assistant/config/
|
||||
- name: "Local Time"
|
||||
internal: /etc/localtime:ro
|
||||
external: /etc/localtime
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8123
|
||||
external: 8123
|
||||
- name: ""
|
||||
internal: 4357
|
||||
external: 4357
|
||||
- name: ""
|
||||
internal: 5683
|
||||
external: 5683
|
||||
- name: ""
|
||||
internal: 5683
|
||||
external: 5683
|
||||
- name: ddns
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: ddns-updater
|
||||
image: ghcr.io/qdm12/ddns-updater
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /updater/data/"
|
||||
external: "{{ docker.directories.config }}/ddns-updater/data/"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8000
|
||||
external: 8001
|
||||
- name: sonarr
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: sonarr
|
||||
image: lscr.io/linuxserver/sonarr:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/sonarr/config
|
||||
- name: "Tv Series"
|
||||
internal: /tv
|
||||
external: /media/series
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/sonarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8989
|
||||
external: 8989
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: radarr
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: radarr
|
||||
image: lscr.io/linuxserver/radarr:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/radarr/config
|
||||
- name: "Movies"
|
||||
internal: /movies
|
||||
external: /media/movies
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/radarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 7878
|
||||
external: 7878
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: lidarr
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: lidarr
|
||||
image: lscr.io/linuxserver/lidarr:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/lidarr/config
|
||||
- name: "Music"
|
||||
internal: /music
|
||||
external: /media/songs
|
||||
- name: "Torrent Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads/lidarr
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8686
|
||||
external: 8686
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: prowlarr
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: prowlarr
|
||||
image: lscr.io/linuxserver/prowlarr:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: /opt/local/prowlarr/config
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 9696
|
||||
external: 9696
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- name: paperless
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: paperless
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- paperless-postgres
|
||||
- paperless-broker
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /usr/src/paperless/data
|
||||
external: /opt/local/paperless/data/data
|
||||
- name: "Media"
|
||||
internal: /usr/src/paperless/media
|
||||
external: /opt/local/paperless/data/media
|
||||
- name: "Document Export"
|
||||
internal: /usr/src/paperless/export
|
||||
external: /opt/local/paperless/data/export
|
||||
- name: "Document Consume"
|
||||
internal: /usr/src/paperless/consume
|
||||
external: /opt/local/paperless/data/consume
|
||||
environment:
|
||||
- "PAPERLESS_REDIS=redis://paperless-broker:6379"
|
||||
- "PAPERLESS_DBHOST=paperless-postgres"
|
||||
- "PAPERLESS_DBUSER=paperless"
|
||||
- "PAPERLESS_DBPASS={{ vault.docker.paperless.dbpass }}"
|
||||
- "USERMAP_UID=1000"
|
||||
- "USERMAP_GID=1000"
|
||||
- "PAPERLESS_URL=https://paperless.{{ domain }}"
|
||||
- "PAPERLESS_TIME_ZONE=Europe/Berlin"
|
||||
- "PAPERLESS_OCR_LANGUAGE=deu"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8000
|
||||
external: 8000
|
||||
- name: pdf
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: stirling
|
||||
image: frooodle/s-pdf:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 8080
|
||||
external: 8080
|
||||
- name: git
|
||||
vm:
|
||||
- docker-host02
|
||||
container_name: gitea
|
||||
image: gitea/gitea:1.23.1-rootless
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /etc/gitea
|
||||
external: /opt/local/gitea/config
|
||||
- name: "Data"
|
||||
internal: /var/lib/gitea
|
||||
external: /opt/local/gitea/data
|
||||
- name: "Time Zone"
|
||||
internal: /etc/timezone:ro
|
||||
external: /etc/timezone
|
||||
- name: "Local Time"
|
||||
internal: /etc/localtime:ro
|
||||
external: /etc/localtime
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 3000
|
||||
external: 3000
|
||||
- name: "ssh"
|
||||
internal: 2222
|
||||
external: 2222
|
||||
environment:
|
||||
- USER_UID=1000
|
||||
- USER_GID=1000
|
||||
- name: changedetection
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: changedetection
|
||||
image: dgtlmoon/changedetection.io
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- name: "Data"
|
||||
internal: /datastore
|
||||
external: "{{ docker.directories.config }}/changedetection/data/"
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 5000
|
||||
external: 5000
|
||||
- name: gluetun
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: gluetun
|
||||
image: qmcgaw/gluetun
|
||||
restart: unless-stopped
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
devices:
|
||||
- name: "Tunnel"
|
||||
internal: /dev/net/tun
|
||||
external: /dev/net/tun
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /gluetun
|
||||
external: "{{ docker.directories.config }}/gluetun/config"
|
||||
ports:
|
||||
- name: "Qbit Client"
|
||||
internal: 8082
|
||||
external: 8082
|
||||
- name: "Torrentleech Client"
|
||||
internal: 8083
|
||||
external: 8083
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- VPN_SERVICE_PROVIDER=protonvpn
|
||||
- UPDATER_VPN_SERVICE_PROVIDERS=protonvpn
|
||||
- UPDATER_PERIOD=24h
|
||||
- "SERVER_COUNTRIES={{ vault.docker.proton.country }}"
|
||||
- "OPENVPN_USER={{ vault.docker.proton.openvpn_user }}"
|
||||
- "OPENVPN_PASSWORD={{ vault.docker.proton.openvpn_password }}"
|
||||
- name: torrentleech
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: torrentleech
|
||||
image: qbittorrentofficial/qbittorrent-nox
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- gluetun
|
||||
network_mode: "container:gluetun"
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.config }}/torrentleech/config"
|
||||
- name: "Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: proxy_only
|
||||
external: 8083
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- QBT_EULA="accept"
|
||||
- QBT_WEBUI_PORT="8083"
|
||||
- name: qbit
|
||||
vm:
|
||||
- docker-host00
|
||||
container_name: qbit
|
||||
image: qbittorrentofficial/qbittorrent-nox
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- gluetun
|
||||
network_mode: "container:gluetun"
|
||||
volumes:
|
||||
- name: "Configuration"
|
||||
internal: /config
|
||||
external: "{{ docker.directories.config }}/qbit/config"
|
||||
- name: "Downloads"
|
||||
internal: /downloads
|
||||
external: /media/docker/data/arr_downloads
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: proxy_only
|
||||
external: 8082
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Berlin
|
||||
- QBT_EULA="accept"
|
||||
- QBT_WEBUI_PORT="8082"
|
||||
- name: cadvisor
|
||||
vm:
|
||||
- docker-host00
|
||||
- docker-host01
|
||||
- docker-host02
|
||||
container_name: cadvisor
|
||||
image: gcr.io/cadvisor/cadvisor:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- name: ""
|
||||
internal: 8080
|
||||
external: 8081
|
||||
volumes:
|
||||
- name: "Root"
|
||||
internal: /rootfs:ro
|
||||
external: /
|
||||
- name: "Run"
|
||||
internal: /var/run:rw
|
||||
external: /var/run
|
||||
- name: "System"
|
||||
internal: /sys:ro
|
||||
external: /sys
|
||||
- name: "Docker"
|
||||
internal: /var/lib/docker:ro
|
||||
external: /var/lib/docker
|
||||
- name: elasticsearch
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: elasticsearch
|
||||
image: "docker.elastic.co/elasticsearch/elasticsearch:{{ elk_version }}"
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- name: ""
|
||||
internal: 9200
|
||||
external: 9200
|
||||
- name: ""
|
||||
internal: 9300
|
||||
external: 9300
|
||||
volumes:
|
||||
- name: "data"
|
||||
internal: /usr/share/elasticsearch/data
|
||||
external: "{{ docker.directories.config }}/elk/elasticsearch/data"
|
||||
- name: "certs"
|
||||
internal: /usr/share/elasticsearch/config/certs
|
||||
external: "{{ docker.directories.config }}/elk/certs"
|
||||
environment:
|
||||
- node.name=elasticsearch
|
||||
- cluster.name=docker-cluster
|
||||
- discovery.type=single-node
|
||||
- "ELASTIC_PASSWORD={{ vault.docker.elk.elastic.password }}"
|
||||
- xpack.security.enabled=true
|
||||
- xpack.security.authc.api_key.enabled=true
|
||||
- xpack.security.http.ssl.enabled=true
|
||||
- xpack.security.http.ssl.key=certs/elasticsearch.key
|
||||
- xpack.security.http.ssl.certificate=certs/elasticsearch.crt
|
||||
- xpack.security.http.ssl.certificate_authorities=certs/ca.crt
|
||||
- xpack.security.transport.ssl.enabled=true
|
||||
- xpack.security.transport.ssl.verification_mode=certificate
|
||||
- xpack.security.transport.ssl.key=certs/elasticsearch.key
|
||||
- xpack.security.transport.ssl.certificate=certs/elasticsearch.crt
|
||||
- xpack.security.transport.ssl.certificate_authorities=certs/ca.crt
|
||||
- name: kibana
|
||||
vm:
|
||||
- docker-host01
|
||||
container_name: kibana
|
||||
image: "docker.elastic.co/kibana/kibana:{{ elk_version }}"
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- name: "http"
|
||||
internal: 5601
|
||||
external: 5601
|
||||
volumes:
|
||||
- name: "certs"
|
||||
internal: /usr/share/kibana/config/certs
|
||||
external: "{{ docker.directories.config }}/elk/certs/"
|
||||
environment:
|
||||
- ELASTICSEARCH_HOSTS=["https://elasticsearch:9200"]
|
||||
- ELASTICSEARCH_USERNAME=kibana_system
|
||||
- ELASTICSEARCH_PASSWORD={{ vault.docker.elk.elastic.password }}
|
||||
- SERVER_SSL_ENABLED=true
|
||||
- SERVER_SSL_CERTIFICATE=/usr/share/kibana/config/certs/kibana.crt
|
||||
- SERVER_SSL_KEY=/usr/share/kibana/config/certs/kibana.key
|
||||
netcup_api_key: "{{ vault.netcup.api_key }}"
|
||||
netcup_api_password: "{{ vault.netcup.api_password }}"
|
||||
netcup_customer_id: "{{ vault.netcup.customer_id }}"
|
||||
|
||||
26
group_vars/k3s/secrets.yml
Normal file
26
group_vars/k3s/secrets.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
66323965326561656434636164616434353663633933346332373537663136323465323461306337
|
||||
3733663066623866333534366430663761653262646662650a323938306636653965656361646330
|
||||
66313965376537643033666165366435653862663231383366636166373238666334313836313138
|
||||
6164353263323136300a653236636334643832396534623735316465623133373838353163313136
|
||||
33303331313037376336623637356633383734343338386634646335616632646366366138643539
|
||||
37303531346430323330396637316632643065346537386433663431373437376261366263306264
|
||||
63323235303632356661373463383565613764323733343839653139613766633036346234316432
|
||||
37626432333935613566386631346161623133366438343630316237363730626234336462303132
|
||||
38323132363631653432643462306133323266333637346139343961623430363436663763383234
|
||||
66343232386263646633653739343963333364386630376638396261326563333935643437646638
|
||||
63656664633838336535613963393434336264656265356238306237626361336533643363323838
|
||||
30376236613236386133383130633164306632323630383932383432353439646266386239383834
|
||||
32346431306662346166653738333138643733623739623536303639663136336533373230643533
|
||||
64323037303161306435316662653237356161393239656362383261306366336134353438326233
|
||||
62363532396336616261383735386535396363386339333962623233383534393033306662666266
|
||||
66316237616137366639333439613732666638376163373235306663323762613466363636346337
|
||||
38393762653537316134316234363066363439623164356237313566626533326332646663313838
|
||||
38383633616538353833353634376236656433383464303538613663383838633538616136313365
|
||||
64643438316638333433366137656634353039663763353734616432306465386563353665666136
|
||||
63383739323038333537396433303332343235383562376438633237663465396366643438353862
|
||||
32646637323530356432386662613366323234323639653139306665623865613666623133656465
|
||||
31636334666638623939393366663935363434613731386365303130343439376430613331663561
|
||||
30353738346138343563383738393666333761333231303366386563303165363039313263343563
|
||||
36303533353165323461376461623665313938356535363462663737643265636137613366616639
|
||||
38383761343161336462373563383338393435326331353132333336666330306638
|
||||
@@ -3,7 +3,7 @@ db:
|
||||
user: "postgres"
|
||||
name: "k3s"
|
||||
user: "k3s"
|
||||
password: "{{ vault.k3s.postgres.db.password }}"
|
||||
password: "{{ vault_k3s.postgres.db.password }}"
|
||||
listen_address: "{{ k3s.db.ip }}"
|
||||
|
||||
k3s:
|
||||
|
||||
2
group_vars/proxmox/containers.yml
Normal file
2
group_vars/proxmox/containers.yml
Normal file
@@ -0,0 +1,2 @@
|
||||
lxcs:
|
||||
- name: "test-lxc-00"
|
||||
15
group_vars/proxmox/secrets.yml
Normal file
15
group_vars/proxmox/secrets.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
35333866323538343132373761316430616539643436646637633131366232346566656438303438
|
||||
3539333661363964633834613161626134323533653737650a613832323436663739663162303066
|
||||
31333130646631306539356233346632636132346539343734393065353033613865363466646632
|
||||
6565343937666530330a326130393934326435643837323631653862313232363466643534306131
|
||||
62376132383137336230366538326364663362346137613930633161663834393835623935373164
|
||||
65623564633765653137623361376130623363613263313835366464313039613532323661363461
|
||||
37366438616566643537656639316665363339633737363539636364316335663639303364663366
|
||||
62653734343364663830633534643931656439313763366138323663373464303137323864313637
|
||||
65316135343464393031343166366338323839326631623533343931353833643232643339386231
|
||||
38623735386465383964653663346631376531376261353933346661666131353533633331353437
|
||||
63336366623333653732306130316264393865633338653238303861646535343837396232366134
|
||||
63343037636361323239376436326431623165326366383561323832323730636532623039383734
|
||||
66663139656262643038303435346666323762343661336234663131343531636161636536646465
|
||||
6530333864323262363536393562346362306161653162346132
|
||||
20
group_vars/proxmox/secrets_vm.yml
Normal file
20
group_vars/proxmox/secrets_vm.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
35616266333838306161336339353538306634373132626132643732303066303163343630333630
|
||||
6338393762616262303038373334663230383464643836370a656538393531393134616463643239
|
||||
36383330653339393362353838313639333432643535643833396535653632376336613130646663
|
||||
3532646538363137630a363731613235653935316531616430346264643837306434386333373033
|
||||
33663135653931373963343734366562386263663939383536663439383537333264666233343233
|
||||
62626162666538333435396638393338393734656131303065616534613733353335643939333765
|
||||
38326237343337363064666530303664326563633262313432343030336266373437353837346461
|
||||
63333363626164316638346635666537613963383537313965373638303732353365623166363736
|
||||
31633239646262613539646637663664313337353465636366313338303439613638653530656631
|
||||
62396536316561623736633631623336313537646138383431633538303163303261323864383538
|
||||
38626338373332653561343036323236383337343037356366626230646432646538373836303063
|
||||
61346339376561626630653562346439306561643664666437386562356535303264646338326261
|
||||
33636536663161366635666264663539653037306339316233643662643134396636636162656333
|
||||
36666139376263646130333263653335333165356462363434373439313330383331356138333431
|
||||
31633362343639376436616339656561316433346532346533336261383433366366396261366134
|
||||
35363264373335616165643665653466613434386630373232386261393464376361313131386462
|
||||
33333531336334386562356338623233313862316232356562373561633364363263306465333439
|
||||
37386631626538636365376464653837333662363361653237366161316431653266643238346336
|
||||
363863376530613036313866323965326638
|
||||
3
group_vars/proxmox/vars.yml
Normal file
3
group_vars/proxmox/vars.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
proxmox_api_user: root
|
||||
proxmox_api_host: 192.168.20.12
|
||||
proxmox_api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
36
group_vars/proxmox/vms.yml
Normal file
36
group_vars/proxmox/vms.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
vms:
|
||||
- name: "docker-host10"
|
||||
node: "lulu"
|
||||
vmid: 410
|
||||
cores: 2
|
||||
memory: 4096 # in MiB
|
||||
net:
|
||||
net0: "virtio,bridge=vmbr0,firewall=1"
|
||||
boot_image: "{{ proxmox_cloud_init_images.debian.name }}"
|
||||
ciuser: "{{ user }}"
|
||||
sshkeys: "{{ pubkey }}"
|
||||
disk_size: 128 # in Gb
|
||||
hostpci:
|
||||
hostpci0: "0000:00:02.0"
|
||||
- name: "docker-host11"
|
||||
node: "lulu"
|
||||
vmid: 411
|
||||
cores: 2
|
||||
memory: 4096 # in MiB
|
||||
net:
|
||||
net0: "virtio,bridge=vmbr0,firewall=1"
|
||||
boot_image: "{{ proxmox_cloud_init_images.ubuntu.name }}"
|
||||
ciuser: "{{ user }}"
|
||||
sshkeys: "{{ pubkey }}"
|
||||
disk_size: 128 # in Gb
|
||||
- name: "docker-host12"
|
||||
node: "naruto01"
|
||||
vmid: 412
|
||||
cores: 4
|
||||
memory: 8192
|
||||
net:
|
||||
net0: "virtio,bridge=vmbr0,firewall=1"
|
||||
boot_image: "{{ proxmox_cloud_init_images.ubuntu.name }}"
|
||||
ciuser: "{{ user }}"
|
||||
sshkeys: "{{ pubkey }}"
|
||||
disk_size: 128 # in Gb
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
ansible_user: "root"
|
||||
ansible_host: 192.168.20.12
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.pve.aya01.root.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "aya01"
|
||||
ip: "{{ ansible_host }}"
|
||||
# ansible_user: "root"
|
||||
# ansible_host: 192.168.20.12
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# ansible_become_pass: "{{ vault.pve.aya01.root.sudo }}"
|
||||
#
|
||||
# host:
|
||||
# hostname: "aya01"
|
||||
# ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.34
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# Configure this in ~/.ssh/config*
|
||||
# ansible_user: "{{ user }}"
|
||||
# ansible_host: 192.168.20.34
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.docker.host00.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "docker-host00"
|
||||
ip: "{{ ansible_host }}"
|
||||
# host:
|
||||
# hostname: "docker-host00"
|
||||
# ip: "192.168.20.34"
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
---
|
||||
ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.35
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# Configure this in ~/.ssh/config*
|
||||
# ansible_user: "{{ user }}"
|
||||
# ansible_host: 192.168.20.35
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.docker.host01.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "docker-host01"
|
||||
ip: "{{ ansible_host }}"
|
||||
#
|
||||
# host:
|
||||
# hostname: "docker-host01"
|
||||
# ip: "192.168.20.35"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.36
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# Configure this in ~/.ssh/config*
|
||||
# ansible_user: "{{ user }}"
|
||||
# ansible_host: 192.168.20.36
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.docker.host02.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "docker-host02"
|
||||
ip: "{{ ansible_host }}"
|
||||
# host:
|
||||
# hostname: "docker-host02"
|
||||
# ip: "192.168.20.36"
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
---
|
||||
ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.37
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# ansible_user: "{{ user }}"
|
||||
# ansible_host: 192.168.20.37
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.docker.lb.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "docker-lb"
|
||||
ip: "{{ ansible_host }}"
|
||||
# host:
|
||||
# hostname: "docker-lb"
|
||||
# ip: "192.168.20.37"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
ansible_user: "root"
|
||||
ansible_host: 192.168.20.14
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.pve.inko.root.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "inko"
|
||||
ip: "{{ ansible_host }}"
|
||||
# ansible_user: "root"
|
||||
# ansible_host: 192.168.20.14
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# ansible_become_pass: "{{ vault.pve.inko.root.sudo }}"
|
||||
#
|
||||
# host:
|
||||
# hostname: "inko"
|
||||
# ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.25
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.agent00.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.agent00.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-agent00"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.26
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.agent01.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.agent01.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-agent01"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.27
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.agent02.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.agent02.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-agent02"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.22
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.loadbalancer.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.loadbalancer.sudo }}"
|
||||
host:
|
||||
hostname: "k3s-loadbalancer"
|
||||
ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.32
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.longhorn00.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.longhorn00.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-longhorn00"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.33
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.longhorn01.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.longhorn01.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-longhorn01"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.31
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.longhorn02.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.longhorn02.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-longhorn02"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.23
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.postgres.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.postgres.sudo }}"
|
||||
host:
|
||||
hostname: "k3s-postgres"
|
||||
ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.21
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.server00.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.server00.sudo }}"
|
||||
host:
|
||||
hostname: "k3s-server00"
|
||||
ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.24
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.server01.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.server01.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-server01"
|
||||
|
||||
@@ -3,7 +3,7 @@ ansible_user: "{{ user }}"
|
||||
ansible_host: 192.168.20.30
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.k3s.server02.sudo }}"
|
||||
ansible_become_pass: "{{ vault_k3s.server02.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "k3s-server02"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
ansible_user: "root"
|
||||
ansible_host: 192.168.20.28
|
||||
ansible_port: 22
|
||||
ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
ansible_become_pass: "{{ vault.pve.lulu.root.sudo }}"
|
||||
|
||||
host:
|
||||
hostname: "lulu"
|
||||
ip: "{{ ansible_host }}"
|
||||
# ansible_user: "root"
|
||||
# ansible_host: 192.168.20.28
|
||||
# ansible_port: 22
|
||||
# ansible_ssh_private_key_file: "{{ pk_path }}"
|
||||
# ansible_become_pass: "{{ vault.pve.lulu.root.sudo }}"
|
||||
#
|
||||
# host:
|
||||
# hostname: "lulu"
|
||||
# ip: "{{ ansible_host }}"
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[local]
|
||||
test ansible_connection=local ansible_become_pass=vagrant
|
||||
@@ -1,10 +0,0 @@
|
||||
---
|
||||
- name: Run the common role on k3s
|
||||
hosts: k3s
|
||||
gather_facts: yes
|
||||
vars_files:
|
||||
- secrets.yml
|
||||
roles:
|
||||
- role: common
|
||||
tags:
|
||||
- common
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Set up Servers
|
||||
hosts: docker_host
|
||||
gather_facts: yes
|
||||
gather_facts: true
|
||||
vars_files:
|
||||
- secrets.yml
|
||||
roles:
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
---
|
||||
- name: Set up reverse proxy for docker
|
||||
hosts: docker_lb
|
||||
gather_facts: yes
|
||||
hosts: docker
|
||||
gather_facts: true
|
||||
vars_files:
|
||||
- secrets.yml
|
||||
roles:
|
||||
- role: common
|
||||
tags:
|
||||
- common
|
||||
when: inventory_hostname in groups["docker_lb"]
|
||||
- role: reverse_proxy
|
||||
tags:
|
||||
- reverse_proxy
|
||||
when: inventory_hostname in groups["docker_lb"]
|
||||
|
||||
5
playbooks/docker.yml
Normal file
5
playbooks/docker.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
- name: Setup Docker Hosts
|
||||
ansible.builtin.import_playbook: docker-host.yml
|
||||
- name: Setup Docker load balancer
|
||||
ansible.builtin.import_playbook: docker-lb.yml
|
||||
17
playbooks/proxmox.yml
Normal file
17
playbooks/proxmox.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
- name: Run proxmox vm playbook
|
||||
hosts: proxmox
|
||||
gather_facts: true
|
||||
vars_files:
|
||||
- secrets.yml
|
||||
vars:
|
||||
is_localhost: "{{ inventory_hostname == '127.0.0.1' }}"
|
||||
is_proxmox_node: "{{ 'proxmox_nodes' in group_names }}"
|
||||
roles:
|
||||
- role: common
|
||||
tags:
|
||||
- common
|
||||
when: not is_localhost
|
||||
- role: proxmox
|
||||
tags:
|
||||
- proxmox
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
- hosts: db
|
||||
gather_facts: yes
|
||||
vars_files:
|
||||
- secrets.yml
|
||||
tasks:
|
||||
- name: Print the database connection string
|
||||
debug:
|
||||
msg: "{{ k3s_db_connection_string }}"
|
||||
@@ -1,5 +0,0 @@
|
||||
- name: Provision Local Ubuntu Machine
|
||||
hosts: local
|
||||
gather_facts: true
|
||||
roles:
|
||||
- ubuntu
|
||||
@@ -1,7 +1,14 @@
|
||||
[proxmox]
|
||||
127.0.0.1 ansible_connection=local
|
||||
|
||||
[proxmox:children]
|
||||
proxmox_nodes
|
||||
|
||||
[proxmox_nodes]
|
||||
aya01
|
||||
lulu
|
||||
inko
|
||||
naruto01
|
||||
|
||||
[k3s]
|
||||
k3s-postgres
|
||||
@@ -16,7 +23,7 @@ k3s-loadbalancer
|
||||
k3s-agent[00:02]
|
||||
k3s-server[00:02]
|
||||
k3s-longhorn[00:02]
|
||||
docker-host[00:02]
|
||||
# docker-host[00:01]
|
||||
|
||||
[k3s_nodes]
|
||||
k3s-server[00:02]
|
||||
@@ -24,7 +31,9 @@ k3s-agent[00:02]
|
||||
k3s-longhorn[00:02]
|
||||
|
||||
[docker]
|
||||
docker-host[00:02]
|
||||
docker-host01
|
||||
docker-host10
|
||||
docker-host12
|
||||
docker-lb
|
||||
|
||||
[vps]
|
||||
@@ -46,13 +55,9 @@ k3s-postgres
|
||||
k3s-loadbalancer
|
||||
|
||||
[docker_host]
|
||||
docker-host[00:02]
|
||||
docker-host01
|
||||
docker-host10
|
||||
docker-host12
|
||||
|
||||
[docker_lb]
|
||||
docker-lb
|
||||
|
||||
[local]
|
||||
localhost ansible_connection=local
|
||||
|
||||
[vm:vars]
|
||||
ansible_ssh_common_args='-o ProxyCommand="ssh -p 22 -W %h:%p -q aya01"'
|
||||
7
requirements.txt
Normal file
7
requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
certifi==2025.1.31
|
||||
charset-normalizer==3.4.1
|
||||
idna==3.10
|
||||
nc-dnsapi==0.1.3
|
||||
proxmoxer==2.2.0
|
||||
requests==2.32.3
|
||||
urllib3==2.3.0
|
||||
80
roles/common/files/ghostty/infocmp
Normal file
80
roles/common/files/ghostty/infocmp
Normal file
@@ -0,0 +1,80 @@
|
||||
xterm-ghostty|ghostty|Ghostty,
|
||||
am, bce, ccc, hs, km, mc5i, mir, msgr, npc, xenl, AX, Su, Tc, XT, fullkbd,
|
||||
colors#0x100, cols#80, it#8, lines#24, pairs#0x7fff,
|
||||
acsc=++\,\,--..00``aaffgghhiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~,
|
||||
bel=^G, blink=\E[5m, bold=\E[1m, cbt=\E[Z, civis=\E[?25l,
|
||||
clear=\E[H\E[2J, cnorm=\E[?12l\E[?25h, cr=\r,
|
||||
csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=^H,
|
||||
cud=\E[%p1%dB, cud1=\n, cuf=\E[%p1%dC, cuf1=\E[C,
|
||||
cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A,
|
||||
cvvis=\E[?12;25h, dch=\E[%p1%dP, dch1=\E[P, dim=\E[2m,
|
||||
dl=\E[%p1%dM, dl1=\E[M, dsl=\E]2;\007, ech=\E[%p1%dX,
|
||||
ed=\E[J, el=\E[K, el1=\E[1K, flash=\E[?5h$<100/>\E[?5l,
|
||||
fsl=^G, home=\E[H, hpa=\E[%i%p1%dG, ht=^I, hts=\EH,
|
||||
ich=\E[%p1%d@, ich1=\E[@, il=\E[%p1%dL, il1=\E[L, ind=\n,
|
||||
indn=\E[%p1%dS,
|
||||
initc=\E]4;%p1%d;rgb:%p2%{255}%*%{1000}%/%2.2X/%p3%{255}%*%{1000}%/%2.2X/%p4%{255}%*%{1000}%/%2.2X\E\\,
|
||||
invis=\E[8m, kDC=\E[3;2~, kEND=\E[1;2F, kHOM=\E[1;2H,
|
||||
kIC=\E[2;2~, kLFT=\E[1;2D, kNXT=\E[6;2~, kPRV=\E[5;2~,
|
||||
kRIT=\E[1;2C, kbs=^?, kcbt=\E[Z, kcub1=\EOD, kcud1=\EOB,
|
||||
kcuf1=\EOC, kcuu1=\EOA, kdch1=\E[3~, kend=\EOF, kent=\EOM,
|
||||
kf1=\EOP, kf10=\E[21~, kf11=\E[23~, kf12=\E[24~,
|
||||
kf13=\E[1;2P, kf14=\E[1;2Q, kf15=\E[1;2R, kf16=\E[1;2S,
|
||||
kf17=\E[15;2~, kf18=\E[17;2~, kf19=\E[18;2~, kf2=\EOQ,
|
||||
kf20=\E[19;2~, kf21=\E[20;2~, kf22=\E[21;2~,
|
||||
kf23=\E[23;2~, kf24=\E[24;2~, kf25=\E[1;5P, kf26=\E[1;5Q,
|
||||
kf27=\E[1;5R, kf28=\E[1;5S, kf29=\E[15;5~, kf3=\EOR,
|
||||
kf30=\E[17;5~, kf31=\E[18;5~, kf32=\E[19;5~,
|
||||
kf33=\E[20;5~, kf34=\E[21;5~, kf35=\E[23;5~,
|
||||
kf36=\E[24;5~, kf37=\E[1;6P, kf38=\E[1;6Q, kf39=\E[1;6R,
|
||||
kf4=\EOS, kf40=\E[1;6S, kf41=\E[15;6~, kf42=\E[17;6~,
|
||||
kf43=\E[18;6~, kf44=\E[19;6~, kf45=\E[20;6~,
|
||||
kf46=\E[21;6~, kf47=\E[23;6~, kf48=\E[24;6~,
|
||||
kf49=\E[1;3P, kf5=\E[15~, kf50=\E[1;3Q, kf51=\E[1;3R,
|
||||
kf52=\E[1;3S, kf53=\E[15;3~, kf54=\E[17;3~,
|
||||
kf55=\E[18;3~, kf56=\E[19;3~, kf57=\E[20;3~,
|
||||
kf58=\E[21;3~, kf59=\E[23;3~, kf6=\E[17~, kf60=\E[24;3~,
|
||||
kf61=\E[1;4P, kf62=\E[1;4Q, kf63=\E[1;4R, kf7=\E[18~,
|
||||
kf8=\E[19~, kf9=\E[20~, khome=\EOH, kich1=\E[2~,
|
||||
kind=\E[1;2B, kmous=\E[<, knp=\E[6~, kpp=\E[5~,
|
||||
kri=\E[1;2A, oc=\E]104\007, op=\E[39;49m, rc=\E8,
|
||||
rep=%p1%c\E[%p2%{1}%-%db, rev=\E[7m, ri=\EM,
|
||||
rin=\E[%p1%dT, ritm=\E[23m, rmacs=\E(B, rmam=\E[?7l,
|
||||
rmcup=\E[?1049l, rmir=\E[4l, rmkx=\E[?1l\E>, rmso=\E[27m,
|
||||
rmul=\E[24m, rs1=\E]\E\\\Ec, sc=\E7,
|
||||
setab=\E[%?%p1%{8}%<%t4%p1%d%e%p1%{16}%<%t10%p1%{8}%-%d%e48;5;%p1%d%;m,
|
||||
setaf=\E[%?%p1%{8}%<%t3%p1%d%e%p1%{16}%<%t9%p1%{8}%-%d%e38;5;%p1%d%;m,
|
||||
sgr=%?%p9%t\E(0%e\E(B%;\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;%?%p7%t;8%;m,
|
||||
sgr0=\E(B\E[m, sitm=\E[3m, smacs=\E(0, smam=\E[?7h,
|
||||
smcup=\E[?1049h, smir=\E[4h, smkx=\E[?1h\E=, smso=\E[7m,
|
||||
smul=\E[4m, tbc=\E[3g, tsl=\E]2;, u6=\E[%i%d;%dR, u7=\E[6n,
|
||||
u8=\E[?%[;0123456789]c, u9=\E[c, vpa=\E[%i%p1%dd,
|
||||
BD=\E[?2004l, BE=\E[?2004h, Clmg=\E[s,
|
||||
Cmg=\E[%i%p1%d;%p2%ds, Dsmg=\E[?69l, E3=\E[3J,
|
||||
Enmg=\E[?69h, Ms=\E]52;%p1%s;%p2%s\007, PE=\E[201~,
|
||||
PS=\E[200~, RV=\E[>c, Se=\E[2 q,
|
||||
Setulc=\E[58:2::%p1%{65536}%/%d:%p1%{256}%/%{255}%&%d:%p1%{255}%&%d%;m,
|
||||
Smulx=\E[4:%p1%dm, Ss=\E[%p1%d q,
|
||||
Sync=\E[?2026%?%p1%{1}%-%tl%eh%;,
|
||||
XM=\E[?1006;1000%?%p1%{1}%=%th%el%;, XR=\E[>0q,
|
||||
fd=\E[?1004l, fe=\E[?1004h, kDC3=\E[3;3~, kDC4=\E[3;4~,
|
||||
kDC5=\E[3;5~, kDC6=\E[3;6~, kDC7=\E[3;7~, kDN=\E[1;2B,
|
||||
kDN3=\E[1;3B, kDN4=\E[1;4B, kDN5=\E[1;5B, kDN6=\E[1;6B,
|
||||
kDN7=\E[1;7B, kEND3=\E[1;3F, kEND4=\E[1;4F,
|
||||
kEND5=\E[1;5F, kEND6=\E[1;6F, kEND7=\E[1;7F,
|
||||
kHOM3=\E[1;3H, kHOM4=\E[1;4H, kHOM5=\E[1;5H,
|
||||
kHOM6=\E[1;6H, kHOM7=\E[1;7H, kIC3=\E[2;3~, kIC4=\E[2;4~,
|
||||
kIC5=\E[2;5~, kIC6=\E[2;6~, kIC7=\E[2;7~, kLFT3=\E[1;3D,
|
||||
kLFT4=\E[1;4D, kLFT5=\E[1;5D, kLFT6=\E[1;6D,
|
||||
kLFT7=\E[1;7D, kNXT3=\E[6;3~, kNXT4=\E[6;4~,
|
||||
kNXT5=\E[6;5~, kNXT6=\E[6;6~, kNXT7=\E[6;7~,
|
||||
kPRV3=\E[5;3~, kPRV4=\E[5;4~, kPRV5=\E[5;5~,
|
||||
kPRV6=\E[5;6~, kPRV7=\E[5;7~, kRIT3=\E[1;3C,
|
||||
kRIT4=\E[1;4C, kRIT5=\E[1;5C, kRIT6=\E[1;6C,
|
||||
kRIT7=\E[1;7C, kUP=\E[1;2A, kUP3=\E[1;3A, kUP4=\E[1;4A,
|
||||
kUP5=\E[1;5A, kUP6=\E[1;6A, kUP7=\E[1;7A, kxIN=\E[I,
|
||||
kxOUT=\E[O, rmxx=\E[29m, rv=\E\\[[0-9]+;[0-9]+;[0-9]+c,
|
||||
setrgbb=\E[48:2:%p1%d:%p2%d:%p3%dm,
|
||||
setrgbf=\E[38:2:%p1%d:%p2%d:%p3%dm, smxx=\E[9m,
|
||||
xm=\E[<%i%p3%d;%p1%d;%p2%d;%?%p4%tM%em%;,
|
||||
xr=\EP>\\|[ -~]+a\E\\,
|
||||
19
roles/common/files/ssh/root/sshd_config
Normal file
19
roles/common/files/ssh/root/sshd_config
Normal file
@@ -0,0 +1,19 @@
|
||||
Protocol 2
|
||||
PermitRootLogin yes
|
||||
MaxAuthTries 3
|
||||
PubkeyAuthentication yes
|
||||
PasswordAuthentication no
|
||||
PermitEmptyPasswords no
|
||||
ChallengeResponseAuthentication no
|
||||
UsePAM yes
|
||||
AllowAgentForwarding no
|
||||
AllowTcpForwarding yes
|
||||
X11Forwarding no
|
||||
PrintMotd no
|
||||
TCPKeepAlive no
|
||||
ClientAliveCountMax 2
|
||||
TrustedUserCAKeys /etc/ssh/vault-ca.pub
|
||||
UseDNS yes
|
||||
AcceptEnv LANG LC_*
|
||||
Subsystem sftp /usr/lib/openssh/sftp-server
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
Include /etc/ssh/sshd_config.d/*.conf
|
||||
Protocol 2
|
||||
PermitRootLogin no
|
||||
MaxAuthTries 3
|
||||
@@ -13,6 +12,7 @@ X11Forwarding no
|
||||
PrintMotd no
|
||||
TCPKeepAlive no
|
||||
ClientAliveCountMax 2
|
||||
TrustedUserCAKeys /etc/ssh/vault-ca.pub
|
||||
UseDNS yes
|
||||
AcceptEnv LANG LC_*
|
||||
Subsystem sftp /usr/lib/openssh/sftp-server
|
||||
1
roles/common/files/ssh/vault-ca.pub
Normal file
1
roles/common/files/ssh/vault-ca.pub
Normal file
@@ -0,0 +1 @@
|
||||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDxIbkko72kVSfYDjJpiMH9SjHUGqBn3MbBvmotsPQhybFgnnkBpX/3fM9olP+Z6PGsmbOEs0fOjPS6uY5hjKcKsyHdZfS6cA4wjY/DL8fwATAW5FCDBtMpdg2/sb8j9jutHHs4sQeRBolVwKcv+ZAaJNnOzNHwxVUfT9bNwShthnAFjkY7oZo657FRomlkDJjmGQuratP0veKA8jYzqqPWwWidTGQerLYTyJ3Z8pbQa5eN7svrvabjjDLbVTDESE8st9WEmwvAwoj7Kz+WovCy0Uz7LRFVmaRiapM8SXtPPUC0xfyzAB3NxwBtxizdUMlShvLcL6cujcUBMulVMpsqEaOESTpmVTrMJhnJPZG/3j9ziGoYIa6hMj1J9/qLQ5dDNVVXMxw99G31x0LJoy12IE90P4Cahux8iN0Cp4oB4+B6/qledxs1fcRzsnQY/ickjKhqcJwgHzsnwjDkeYRaYte5x4f/gJ77kA20nPto7mxr2mhWot/i9B1KlMURVXOH/q4nrzhJ0hPJpM0UtzQ58TmzE4Osf/B5yoe8V//6XnelbmG/nKCIzg12d7PvaLjbFMn8IgOwDMRlip+vpyadRr/+pCawrfo4vLF7BsnJ84aoByIpbwaysgaYHtjfZWImorMVkgviC4O6Hn9/ZiLNze2A9DaNUnLVJ0nYNbmv9Q==
|
||||
@@ -2,11 +2,23 @@
|
||||
- name: Copy bash-configs
|
||||
ansible.builtin.template:
|
||||
src: "files/bash/{{ item }}"
|
||||
dest: "/home/{{ user }}/.{{ item }}"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
dest: "{{ ansible_env.HOME }}/.{{ item }}"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
mode: "644"
|
||||
loop:
|
||||
- bashrc
|
||||
- bash_aliases
|
||||
become: true
|
||||
|
||||
- name: Copy ghostty infocmp
|
||||
ansible.builtin.copy:
|
||||
src: files/ghostty/infocmp
|
||||
dest: "{{ ansible_env.HOME }}/ghostty"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
mode: "0644"
|
||||
register: ghostty_terminfo
|
||||
|
||||
- name: Compile ghostty terminalinfo
|
||||
ansible.builtin.command: "tic -x {{ ansible_env.HOME }}/ghostty"
|
||||
when: ghostty_terminfo.changed
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
url: https://raw.githubusercontent.com/eza-community/eza/main/deb.asc
|
||||
dest: /etc/apt/keyrings/gierens.asc
|
||||
mode: "0644"
|
||||
register: gpg_key_result
|
||||
become: true
|
||||
|
||||
- name: Add Gierens repository to apt sources
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
---
|
||||
- name: Set a hostname
|
||||
ansible.builtin.hostname:
|
||||
name: "{{ host.hostname }}"
|
||||
name: "{{ inventory_hostname }}"
|
||||
become: true
|
||||
|
||||
- name: Update /etc/hosts to reflect the new hostname
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/hosts
|
||||
regexp: '^127\.0\.1\.1'
|
||||
line: "127.0.1.1 {{ host.hostname }}"
|
||||
line: "127.0.1.1 {{ inventory_hostname }}"
|
||||
state: present
|
||||
backup: true
|
||||
become: true
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
---
|
||||
- name: Configure Time
|
||||
ansible.builtin.include_tasks: time.yml
|
||||
- name: Configure Hostname
|
||||
ansible.builtin.include_tasks: hostname.yml
|
||||
- name: Configure Packages
|
||||
ansible.builtin.include_tasks: packages.yml
|
||||
- name: Configure Hostname
|
||||
ansible.builtin.include_tasks: hostname.yml
|
||||
- name: Configure Extra-Packages
|
||||
ansible.builtin.include_tasks: extra_packages.yml
|
||||
- name: Configure Bash
|
||||
|
||||
@@ -5,9 +5,24 @@
|
||||
upgrade: true
|
||||
autoremove: true
|
||||
become: true
|
||||
when: ansible_user_id != "root"
|
||||
|
||||
- name: Install base packages
|
||||
ansible.builtin.apt:
|
||||
name: "{{ common_packages }}"
|
||||
state: present
|
||||
become: true
|
||||
when: ansible_user_id != "root"
|
||||
|
||||
- name: Update and upgrade packages
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
upgrade: true
|
||||
autoremove: true
|
||||
when: ansible_user_id == "root"
|
||||
|
||||
- name: Install base packages
|
||||
ansible.builtin.apt:
|
||||
name: "{{ common_packages }}"
|
||||
state: present
|
||||
when: ansible_user_id == "root"
|
||||
|
||||
@@ -1,17 +1,28 @@
|
||||
---
|
||||
- name: Copy sshd_config
|
||||
- name: Copy user sshd_config
|
||||
ansible.builtin.template:
|
||||
src: templates/ssh/sshd_config
|
||||
src: files/ssh/user/sshd_config
|
||||
dest: /etc/ssh/sshd_config
|
||||
mode: "644"
|
||||
backup: true
|
||||
notify:
|
||||
- Restart sshd
|
||||
become: true
|
||||
when: ansible_user_id != "root"
|
||||
|
||||
- name: Copy root sshd_config
|
||||
ansible.builtin.template:
|
||||
src: files/ssh/root/sshd_config
|
||||
dest: /etc/ssh/sshd_config
|
||||
mode: "644"
|
||||
backup: true
|
||||
notify:
|
||||
- Restart sshd
|
||||
when: ansible_user_id == "root"
|
||||
|
||||
- name: Copy pubkey
|
||||
ansible.builtin.copy:
|
||||
content: "{{ pubkey }}"
|
||||
dest: "/home/{{ user }}/.ssh/authorized_keys"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
src: files/ssh/vault-ca.pub
|
||||
dest: "/etc/ssh/vault-ca.pub"
|
||||
mode: "644"
|
||||
become: true
|
||||
|
||||
@@ -2,3 +2,10 @@
|
||||
- name: Set timezone to "{{ timezone }}"
|
||||
community.general.timezone:
|
||||
name: "{{ timezone }}"
|
||||
become: true
|
||||
when: ansible_user_id != "root"
|
||||
|
||||
- name: Set timezone to "{{ timezone }}"
|
||||
community.general.timezone:
|
||||
name: "{{ timezone }}"
|
||||
when: ansible_user_id == "root"
|
||||
|
||||
15
roles/common/vars/main.yml
Normal file
15
roles/common/vars/main.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
common_packages:
|
||||
- build-essential
|
||||
- curl
|
||||
- git
|
||||
- iperf3
|
||||
- neovim
|
||||
- rsync
|
||||
- smartmontools
|
||||
- sudo
|
||||
- systemd-timesyncd
|
||||
- tree
|
||||
- screen
|
||||
- bat
|
||||
- fd-find
|
||||
- ripgrep
|
||||
@@ -8,4 +8,14 @@
|
||||
- name: Restart compose
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ docker.directories.compose }}"
|
||||
state: restarted
|
||||
state: present
|
||||
retries: 3
|
||||
delay: 5
|
||||
become: true
|
||||
|
||||
- name: Restart host
|
||||
ansible.builtin.reboot:
|
||||
connect_timeout: 5
|
||||
reboot_timeout: 600
|
||||
test_command: whoami
|
||||
become: true
|
||||
|
||||
50
roles/docker_host/tasks/10_setup.yml
Normal file
50
roles/docker_host/tasks/10_setup.yml
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
- name: Check if debian.sources file exists
|
||||
ansible.builtin.stat:
|
||||
path: /etc/apt/sources.list.d/debian.sources
|
||||
register: debian_sources_stat
|
||||
|
||||
- name: Replace Components line to include non-free and non-free-firmware
|
||||
ansible.builtin.replace:
|
||||
path: /etc/apt/sources.list.d/debian.sources
|
||||
regexp: "^Components:.*$"
|
||||
replace: "Components: main non-free non-free-firmware"
|
||||
when: debian_sources_stat.stat.exists
|
||||
become: true
|
||||
|
||||
- name: Setup VM Packages
|
||||
ansible.builtin.apt:
|
||||
name: "{{ item }}"
|
||||
state: present
|
||||
update_cache: true
|
||||
loop: "{{ docker_host_package_common_dependencies }}"
|
||||
become: true
|
||||
|
||||
- name: Gather installed package facts
|
||||
ansible.builtin.package_facts:
|
||||
manager: auto
|
||||
|
||||
- name: Filter for specific cloud kernel packages
|
||||
ansible.builtin.set_fact:
|
||||
cloud_kernel_packages: >-
|
||||
{{
|
||||
ansible_facts.packages.keys()
|
||||
| select('search', 'linux-image')
|
||||
| select('search', 'cloud')
|
||||
| list
|
||||
}}
|
||||
|
||||
- name: Use the list to remove the found packages
|
||||
ansible.builtin.apt:
|
||||
name: "{{ cloud_kernel_packages }}"
|
||||
state: absent
|
||||
autoremove: true
|
||||
when: cloud_kernel_packages | length > 0
|
||||
become: true
|
||||
|
||||
- name: Restart host
|
||||
ansible.builtin.reboot:
|
||||
connect_timeout: 5
|
||||
reboot_timeout: 600
|
||||
test_command: whoami
|
||||
become: true
|
||||
@@ -5,10 +5,12 @@
|
||||
state: present
|
||||
become: true
|
||||
|
||||
- name: Append the group docker to "{{ user }}"
|
||||
- name: Append the group docker to "{{ ansible_user_id }}"
|
||||
ansible.builtin.user:
|
||||
name: "{{ user }}"
|
||||
name: "{{ ansible_user_id }}"
|
||||
shell: /bin/bash
|
||||
groups: docker
|
||||
append: true
|
||||
become: true
|
||||
notify:
|
||||
- Restart host
|
||||
@@ -9,19 +9,20 @@
|
||||
- /media/series
|
||||
- /media/movies
|
||||
- /media/songs
|
||||
- "{{ docker.directories.opt }}"
|
||||
- "{{ docker.directories.local }}"
|
||||
- "{{ docker.directories.config }}"
|
||||
- "{{ docker.directories.compose }}"
|
||||
- /opt/local
|
||||
become: true
|
||||
|
||||
- name: Set ownership to {{ user }}
|
||||
- name: Set ownership to {{ ansible_user_id }}
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
loop:
|
||||
- "{{ docker.directories.opt }}"
|
||||
- /opt/local
|
||||
- "{{ docker.directories.local }}"
|
||||
- "{{ docker.directories.config }}"
|
||||
- "{{ docker.directories.compose }}"
|
||||
- /media
|
||||
become: true
|
||||
|
||||
31
roles/docker_host/tasks/50_provision.yml
Normal file
31
roles/docker_host/tasks/50_provision.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
- name: Set fact if this host should run Keycloak
|
||||
ansible.builtin.set_fact:
|
||||
is_keycloak_host: "{{ inventory_hostname in (services | selectattr('name', 'equalto', 'keycloak') | map(attribute='vm') | first) }}"
|
||||
|
||||
- name: Create Keycloak directories
|
||||
ansible.builtin.file:
|
||||
path: "{{ docker.directories.local }}/keycloak/"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
when: is_keycloak_host | bool
|
||||
become: true
|
||||
|
||||
- name: Setup Keycloak realms
|
||||
ansible.builtin.template:
|
||||
src: "templates/keycloak/realm.json.j2"
|
||||
dest: "{{ docker.directories.local }}/keycloak/{{ keycloak.realm }}-realm.json"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
mode: "644"
|
||||
backup: true
|
||||
when: is_keycloak_host | bool
|
||||
loop: "{{ keycloak_config.realms }}"
|
||||
loop_control:
|
||||
loop_var: keycloak
|
||||
notify:
|
||||
- Restart docker
|
||||
- Restart compose
|
||||
become: true
|
||||
@@ -3,8 +3,8 @@
|
||||
ansible.builtin.template:
|
||||
src: "templates/compose.yaml.j2"
|
||||
dest: "{{ docker.directories.compose }}/compose.yaml"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
owner: "{{ ansible_user_id }}"
|
||||
group: "{{ ansible_user_id }}"
|
||||
mode: "644"
|
||||
backup: true
|
||||
notify:
|
||||
@@ -1,18 +1,20 @@
|
||||
---
|
||||
- name: Setup VM
|
||||
ansible.builtin.include_tasks: setup.yml
|
||||
|
||||
ansible.builtin.include_tasks: 10_setup.yml
|
||||
- name: Install docker
|
||||
ansible.builtin.include_tasks: installation.yml
|
||||
ansible.builtin.include_tasks: 20_installation.yml
|
||||
|
||||
- name: Setup user and group for docker
|
||||
ansible.builtin.include_tasks: user_group_setup.yml
|
||||
ansible.builtin.include_tasks: 30_user_group_setup.yml
|
||||
|
||||
- name: Setup directory structure for docker
|
||||
ansible.builtin.include_tasks: directory_setup.yml
|
||||
ansible.builtin.include_tasks: 40_directory_setup.yml
|
||||
|
||||
- name: Deploy configs
|
||||
ansible.builtin.include_tasks: 50_provision.yml
|
||||
|
||||
- name: Deploy docker compose
|
||||
ansible.builtin.include_tasks: deploy_compose.yml
|
||||
ansible.builtin.include_tasks: 60_deploy_compose.yml
|
||||
|
||||
- name: Publish metrics
|
||||
ansible.builtin.include_tasks: export.yml
|
||||
ansible.builtin.include_tasks: 70_export.yml
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
- name: Enable HW accelerate for VM
|
||||
ansible.builtin.apt:
|
||||
name: "{{ item }}"
|
||||
state: present
|
||||
loop:
|
||||
- firmware-misc-nonfree
|
||||
- nfs-common
|
||||
become: true
|
||||
@@ -1,12 +1,13 @@
|
||||
services:
|
||||
{% for service in services %}
|
||||
{% if inventory_hostname in service.vm %}
|
||||
{{service.name}}:
|
||||
|
||||
{{ service.name }}:
|
||||
container_name: {{ service.container_name }}
|
||||
image: {{ service.image }}
|
||||
restart: {{ service.restart }}
|
||||
restart: unless-stopped
|
||||
{% if service.network_mode is not defined %}
|
||||
hostname: {{service.name}}
|
||||
hostname: {{ service.name }}
|
||||
networks:
|
||||
- net
|
||||
{% endif %}
|
||||
@@ -15,11 +16,40 @@ services:
|
||||
ports:
|
||||
{% for port in service.ports %}
|
||||
{% if port.internal != 'proxy_only' %}
|
||||
- {{port.external}}:{{port.internal}}
|
||||
- {{ port.external }}:{{ port.internal }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if service.ports is defined and service.ports is iterable %}
|
||||
{% set first_http_port = service.ports | default([]) | selectattr('name', 'defined') | selectattr('name', 'search', 'http') | first %}
|
||||
{% set chosen_http_port_value = none %}
|
||||
{% if first_http_port is not none %}
|
||||
{% if first_http_port.internal is defined and first_http_port.internal == 'proxy_only' %}
|
||||
{% if first_http_port.external is defined %}
|
||||
{% set chosen_http_port_value = first_http_port.external %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% set chosen_http_port_value = first_http_port.internal %}
|
||||
{% endif %}
|
||||
{% if chosen_http_port_value is defined %}
|
||||
healthcheck:
|
||||
{% set healthcheck = 'curl' %}
|
||||
{% if service.healthcheck is defined %}
|
||||
{% set healthcheck = service.healthcheck %}
|
||||
{% endif %}
|
||||
{% if healthcheck == 'curl' %}
|
||||
test: ["CMD", "curl", "-f", "--silent", "--show-error", "--connect-timeout", "5", "http://localhost:{{ chosen_http_port_value }}/"]
|
||||
{% elif healthcheck == 'wget' %}
|
||||
test: ["CMD-SHELL", "wget --quiet --spider --timeout=5 http://localhost:{{ chosen_http_port_value }}/ || exit 1"]
|
||||
{% endif %}
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 20s
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if service.cap_add is defined and service.cap_add is iterable %}
|
||||
cap_add:
|
||||
{% for cap in service.cap_add %}
|
||||
@@ -41,46 +71,88 @@ services:
|
||||
{% if service.volumes is defined and service.volumes is iterable %}
|
||||
volumes:
|
||||
{% for volume in service.volumes %}
|
||||
- {{volume.external}}:{{volume.internal}}
|
||||
- {{ volume.external }}:{{ volume.internal }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if service.environment is defined and service.environment is iterable %}
|
||||
environment:
|
||||
{% for env in service.environment %}
|
||||
- {{env}}
|
||||
- {{ env }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if service.devices is defined and service.devices is iterable %}
|
||||
devices:
|
||||
{% for device in service.devices %}
|
||||
- {{device.external}}:{{device.internal}}
|
||||
- {{ device.external }}:{{ device.internal }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if service.name == 'paperless' %}
|
||||
|
||||
{{service.name}}-broker:
|
||||
container_name: paperless-broker
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- net
|
||||
volumes:
|
||||
- /opt/local/paperless/redis/data:/data
|
||||
|
||||
{{service.name}}-postgres:
|
||||
container_name: paperless-postgres
|
||||
image: docker.io/library/postgres:15
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- net
|
||||
volumes:
|
||||
- /opt/local/paperless/db/data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: 5fnhn%u2YWY3paNvMAjdoufYPQ2Hf3Yi
|
||||
{% if service.command is defined and service.command is iterable %}
|
||||
command:
|
||||
{% for command in service.command %}
|
||||
- {{ command }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if service.sub_service is defined and service.sub_service is iterable %}
|
||||
{% for sub in service.sub_service %}
|
||||
{% if sub.name is defined and sub.name == "postgres" %}
|
||||
{{ service.name }}-postgres:
|
||||
container_name: {{ service.name }}-postgres
|
||||
image: docker.io/library/postgres:{{ sub.version }}
|
||||
restart: unless-stopped
|
||||
hostname: {{ service.name }}-postgres
|
||||
networks:
|
||||
- net
|
||||
volumes:
|
||||
- /opt/local/{{ service.name }}/postgres/data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_DB: {{ service.name }}
|
||||
POSTGRES_USER: {{ sub.username }}
|
||||
POSTGRES_PASSWORD: {{ sub.password }}
|
||||
{% endif %}
|
||||
{% if sub.name is defined and sub.name == "redis" %}
|
||||
{{ service.name }}-redis:
|
||||
container_name: {{ service.name }}-redis
|
||||
image: docker.io/library/redis:{{ sub.version }}
|
||||
restart: unless-stopped
|
||||
hostname: {{ service.name }}-redis
|
||||
networks:
|
||||
- net
|
||||
volumes:
|
||||
- /opt/local/{{ service.name }}/redis/data:/data
|
||||
{% endif %}
|
||||
{% if sub.name is defined and sub.name == "chrome" %}
|
||||
{{ service.name }}-chrome:
|
||||
image: gcr.io/zenika-hub/alpine-chrome:{{ sub.version }}
|
||||
container_name: {{ service.name }}-chrome
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- net
|
||||
command:
|
||||
- --no-sandbox
|
||||
- --disable-gpu
|
||||
- --disable-dev-shm-usage
|
||||
- --remote-debugging-address=0.0.0.0
|
||||
- --remote-debugging-port=9222
|
||||
- --hide-scrollbars
|
||||
{% endif %}
|
||||
{% if sub.name is defined and sub.name == "meilisearch" %}
|
||||
{{ service.name }}-meilisearch:
|
||||
container_name: {{ service.name }}-meilisearch
|
||||
image: getmeili/meilisearch:{{ sub.version }}
|
||||
restart: unless-stopped
|
||||
hostname: {{ service.name }}-meilisearch
|
||||
networks:
|
||||
- net
|
||||
volumes:
|
||||
- /opt/local/{{ service.name }}/mailisearch/data:/meili_data
|
||||
environment:
|
||||
- MEILI_NO_ANALYTICS=true
|
||||
- NEXTAUTH_SECRET={{ sub.nextauth_secret }}
|
||||
- MEILI_MASTER_KEY={{ sub.meili_master_key }}
|
||||
- OPENAI_API_KEY="{{ sub.openai_key }}"
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
networks:
|
||||
@@ -90,6 +162,3 @@ networks:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.16.69.0/24
|
||||
|
||||
volumes:
|
||||
prometheus_data: {}
|
||||
|
||||
79
roles/docker_host/templates/keycloak/realm.json.j2
Normal file
79
roles/docker_host/templates/keycloak/realm.json.j2
Normal file
@@ -0,0 +1,79 @@
|
||||
{
|
||||
"realm": "{{ keycloak.realm }}",
|
||||
"enabled": true,
|
||||
"displayName": "{{ keycloak.display_name }}",
|
||||
"displayNameHtml": "<div class=\"kc-logo-text\">{{keycloak.display_name}}</div>",
|
||||
"bruteForceProtected": true,
|
||||
"users": [
|
||||
{% if keycloak.users is defined and keycloak.users is iterable %}
|
||||
{% for user in keycloak.users %}
|
||||
{
|
||||
"username": "{{ user.username }}",
|
||||
"enabled": true,
|
||||
"credentials": [
|
||||
{
|
||||
"type": "password",
|
||||
"value": "{{ user.password }}",
|
||||
"temporary": false
|
||||
}
|
||||
],
|
||||
"realmRoles": [
|
||||
{% for realm_role in user.realm_roles %}
|
||||
"{{ realm_role }}"{%- if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
],
|
||||
"clientRoles": {
|
||||
"account": [
|
||||
{% for account in user.client_roles.account %}
|
||||
"{{ account }}"{%- if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
]
|
||||
}
|
||||
},{% if not loop.last %}{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{
|
||||
"username": "{{ keycloak.admin.username }}",
|
||||
"enabled": true,
|
||||
"credentials": [
|
||||
{
|
||||
"type": "password",
|
||||
"value": "{{ keycloak.admin.password }}",
|
||||
"temporary": false
|
||||
}
|
||||
],
|
||||
"realmRoles": [
|
||||
{% for realm_role in keycloak.admin.realm_roles %}
|
||||
"{{ realm_role }}"{% if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
],
|
||||
"clientRoles": {
|
||||
"realm-management": [
|
||||
{% for realm_management in keycloak.admin.client_roles.realm_management %}
|
||||
"{{ realm_management }}"{%- if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
],
|
||||
"account": [
|
||||
{% for account in keycloak.admin.client_roles.account %}
|
||||
"{{ account }}"{%- if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"roles": {
|
||||
"realm": [
|
||||
{% for role in keycloak.roles.realm %}
|
||||
{
|
||||
"name": "{{ role.name }}",
|
||||
"description": "{{ role.name }}"
|
||||
}{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
]
|
||||
},
|
||||
"defaultRoles": [
|
||||
{% for role in keycloak.roles.default_roles %}
|
||||
"{{ role }}"{% if not loop.last %},{% endif %}{{''}}
|
||||
{% endfor %}
|
||||
]
|
||||
}
|
||||
9
roles/docker_host/vars/main.yml
Normal file
9
roles/docker_host/vars/main.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
docker_host_package_common_dependencies:
|
||||
- nfs-common
|
||||
- firmware-misc-nonfree
|
||||
- linux-image-amd64
|
||||
|
||||
apt_lock_files:
|
||||
- /var/lib/dpkg/lock
|
||||
- /var/lib/dpkg/lock-frontend
|
||||
- /var/cache/apt/archives/lock
|
||||
@@ -4,7 +4,7 @@ Description=PostgresExporter
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
User={{ bin_name }}
|
||||
ExecStart={{ bin_path }} --web.listen-address={{ host.ip }}:{{ bind_port }} {{ options }}
|
||||
ExecStart={{ bin_path }} --web.listen-address={{ ansible_host }}:{{ bind_port }} {{ options }}
|
||||
Environment="DATA_SOURCE_URI=localhost:5432/postgres?sslmode=disable"
|
||||
Environment="DATA_SOURCE_USER={{ db.user }}"
|
||||
Environment="DATA_SOURCE_PASS={{ db.password }}"
|
||||
|
||||
83
roles/proxmox/files/check_proxmox_vm.sh
Executable file
83
roles/proxmox/files/check_proxmox_vm.sh
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
VM_ID=303
|
||||
TARGET_IP="192.168.20.36" # Replace with the IP of your VM
|
||||
PORT=22
|
||||
CHECK_INTERVAL=300 # 5 minutes in seconds
|
||||
LOG_FILE="/var/log/vm_monitor.log"
|
||||
|
||||
# Function to log messages
|
||||
log_message() {
|
||||
echo "$(date): $1" | tee -a $LOG_FILE
|
||||
}
|
||||
|
||||
# Check if running on a Proxmox host
|
||||
if ! command -v qm &>/dev/null; then
|
||||
log_message "qm command not found. This script must run on a Proxmox host."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to check port
|
||||
check_port() {
|
||||
# Try nc first if available
|
||||
if command -v nc &>/dev/null; then
|
||||
if nc -z -w 5 $TARGET_IP $PORT 2>/dev/null; then
|
||||
return 0 # Port is open
|
||||
else
|
||||
return 1 # Port is closed
|
||||
fi
|
||||
# Fall back to nmap if nc is not available
|
||||
elif command -v nmap &>/dev/null; then
|
||||
if nmap -p $PORT $TARGET_IP | grep -q "$PORT/tcp.*open"; then
|
||||
return 0 # Port is open
|
||||
else
|
||||
return 1 # Port is closed
|
||||
fi
|
||||
else
|
||||
log_message "Neither nc nor nmap found. Please install one of them."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to restart the VM
|
||||
restart_vm() {
|
||||
log_message "Port $PORT is not reachable. Restarting VM $VM_ID..."
|
||||
|
||||
# Stop the VM
|
||||
qm stop $VM_ID
|
||||
if [ $? -ne 0 ]; then
|
||||
log_message "Failed to stop VM $VM_ID. Trying force stop..."
|
||||
qm stop $VM_ID --force
|
||||
fi
|
||||
|
||||
# Wait for VM to fully stop
|
||||
log_message "Waiting for VM to stop..."
|
||||
sleep 10
|
||||
|
||||
# Start the VM
|
||||
qm start $VM_ID
|
||||
if [ $? -ne 0 ]; then
|
||||
log_message "Failed to start VM $VM_ID. Manual intervention required."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_message "VM $VM_ID has been restarted."
|
||||
}
|
||||
|
||||
# Main loop
|
||||
log_message "Starting monitoring of VM $VM_ID on port $PORT..."
|
||||
log_message "Press Ctrl+C to exit."
|
||||
|
||||
while true; do
|
||||
# Check if port 22 is open
|
||||
if ! check_port; then
|
||||
restart_vm
|
||||
else
|
||||
log_message "Port $PORT is reachable. VM is running normally."
|
||||
fi
|
||||
|
||||
# Wait for the next check
|
||||
log_message "Sleeping for $CHECK_INTERVAL seconds..."
|
||||
sleep $CHECK_INTERVAL
|
||||
done
|
||||
6
roles/proxmox/handlers/node.yml
Normal file
6
roles/proxmox/handlers/node.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
- name: Reboot Node
|
||||
ansible.builtin.reboot:
|
||||
connect_timeout: 5
|
||||
reboot_timeout: 600
|
||||
test_command: whoami
|
||||
8
roles/proxmox/tasks/00_setup_machines.yml
Normal file
8
roles/proxmox/tasks/00_setup_machines.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
- name: Prepare Localhost
|
||||
ansible.builtin.include_tasks: ./01_setup_localhost.yml
|
||||
when: is_localhost
|
||||
|
||||
- name: Prepare Localhost
|
||||
ansible.builtin.include_tasks: ./05_setup_node.yml
|
||||
when: is_proxmox_node
|
||||
7
roles/proxmox/tasks/01_setup_localhost.yml
Normal file
7
roles/proxmox/tasks/01_setup_localhost.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: Install dependencies
|
||||
ansible.builtin.apt:
|
||||
name: "{{ item }}"
|
||||
update_cache: true
|
||||
state: present
|
||||
loop: "{{ proxmox_localhost_dependencies }}"
|
||||
10
roles/proxmox/tasks/05_setup_node.yml
Normal file
10
roles/proxmox/tasks/05_setup_node.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
- name: Install dependencies
|
||||
ansible.builtin.apt:
|
||||
name: "{{ item }}"
|
||||
update_cache: true
|
||||
state: present
|
||||
loop: "{{ proxmox_node_dependencies }}"
|
||||
|
||||
- name: Ensure Harware Acceleration on node
|
||||
ansible.builtin.include_tasks: 06_hardware_acceleration.yml
|
||||
41
roles/proxmox/tasks/06_hardware_acceleration.yml
Normal file
41
roles/proxmox/tasks/06_hardware_acceleration.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
- name: Set GRUB_CMDLINE_LINUX_DEFAULT for PCI passthrough
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/default/grub
|
||||
regexp: "^GRUB_CMDLINE_LINUX_DEFAULT="
|
||||
line: 'GRUB_CMDLINE_LINUX_DEFAULT="quiet intel_iommu=on iommu=pt pcie_acs_override=downstream,multifunction initcall_blacklist=sysfb_init video=simplefb:off video=vesafb:off video=efifb:off video=vesa:off disable_vga=1 vfio_iommu_type1.allow_unsafe_interrupts=1 kvm.ignore_msrs=1 modprobe.blacklist=radeon,nouveau,nvidia,nvidiafb,nvidia-gpu,snd_hda_intel,snd_hda_codec_hdmi,i915"'
|
||||
backup: true
|
||||
register: iommu_result
|
||||
|
||||
- name: Set GRUB_CMDLINE_LINUX_DEFAULT for PCI passthrough
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/default/grub
|
||||
regexp: "^GRUB_CMDLINE_LINUX_DEFAULT="
|
||||
line: 'GRUB_CMDLINE_LINUX_DEFAULT="quiet intel_iommu=on iommu=pt pcie_acs_override=downstream,multifunction initcall_blacklist=sysfb_init video=simplefb:off video=vesafb:off video=efifb:off video=vesa:off disable_vga=1 vfio_iommu_type1.allow_unsafe_interrupts=1 kvm.ignore_msrs=1 modprobe.blacklist=radeon,nouveau,nvidia,nvidiafb,nvidia-gpu,snd_hda_intel,snd_hda_codec_hdmi,i915"'
|
||||
backup: true
|
||||
register: iommu_result
|
||||
|
||||
- name: Ensure VFIO modules are listed in /etc/modules
|
||||
ansible.builtin.blockinfile:
|
||||
path: /etc/modules
|
||||
marker: "# {mark} VFIO Modules"
|
||||
block: |
|
||||
vfio
|
||||
vfio_iommu_type1
|
||||
vfio_pci
|
||||
vfio_virqfd
|
||||
create: true
|
||||
backup: true
|
||||
register: vfio_result
|
||||
|
||||
- name: Update initramfs
|
||||
ansible.builtin.command: update-initramfs -u -k all
|
||||
when: iommu_result.changed or vfio_result.changed
|
||||
# notify:
|
||||
# - Reboot Node
|
||||
|
||||
- name: update grub configuration
|
||||
ansible.builtin.command: update-grub
|
||||
when: iommu_result.changed or vfio_result.changed
|
||||
# notify:
|
||||
# - Reboot Node
|
||||
12
roles/proxmox/tasks/10_create_secrets.yml
Normal file
12
roles/proxmox/tasks/10_create_secrets.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
- name: Ensure Vault file exists
|
||||
ansible.builtin.file:
|
||||
path: "{{ proxmox_vault_file }}"
|
||||
state: touch
|
||||
mode: "0600"
|
||||
|
||||
- name: Update Vault data
|
||||
ansible.builtin.include_tasks: 15_create_secret.yml
|
||||
loop: "{{ vms | map(attribute='name') }}"
|
||||
loop_control:
|
||||
loop_var: "vm_name"
|
||||
47
roles/proxmox/tasks/15_create_secret.yml
Normal file
47
roles/proxmox/tasks/15_create_secret.yml
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
- name: Decrypt vm vault file
|
||||
ansible.builtin.shell: cd ../; ansible-vault decrypt "./playbooks/{{ proxmox_vault_file }}"
|
||||
ignore_errors: true
|
||||
no_log: true
|
||||
|
||||
- name: Load existing vault content
|
||||
ansible.builtin.slurp:
|
||||
src: "{{ proxmox_vault_file }}"
|
||||
register: vault_content
|
||||
no_log: true
|
||||
|
||||
- name: Parse vault content as YAML
|
||||
ansible.builtin.set_fact:
|
||||
vault_data: "{{ (vault_content['content'] | b64decode | from_yaml) if (vault_content['content'] | length > 0) else {} }}"
|
||||
no_log: true
|
||||
|
||||
- name: Setup secret name
|
||||
ansible.builtin.set_fact:
|
||||
vm_name_secret: "{{ proxmox_secrets_prefix }}_{{ vm_name | replace('-','_') }}"
|
||||
|
||||
- name: Check if variable is in vault
|
||||
ansible.builtin.set_fact:
|
||||
variable_exists: "{{ vm_name_secret in vault_data }}"
|
||||
|
||||
- name: Set new secret
|
||||
ansible.builtin.set_fact:
|
||||
cipassword: "{{ lookup('password', '/dev/null length=32 chars=ascii_letters,digits') }}"
|
||||
when: not variable_exists
|
||||
|
||||
- name: Set new secret
|
||||
ansible.builtin.set_fact:
|
||||
new_vault_data: "{{ vault_data | combine({ vm_name_secret: cipassword }) }}"
|
||||
when: not variable_exists
|
||||
|
||||
- name: Write updated Vault content to file (temporary plaintext)
|
||||
ansible.builtin.copy:
|
||||
content: "{{ new_vault_data | to_nice_yaml }}"
|
||||
dest: "{{ proxmox_vault_file }}"
|
||||
mode: "0600"
|
||||
when: not variable_exists
|
||||
no_log: true
|
||||
|
||||
- name: Encrypt vm vault file
|
||||
ansible.builtin.shell: cd ../; ansible-vault encrypt "./playbooks/{{ proxmox_vault_file }}"
|
||||
ignore_errors: true
|
||||
no_log: true
|
||||
6
roles/proxmox/tasks/40_prepare_vm_creation.yml
Normal file
6
roles/proxmox/tasks/40_prepare_vm_creation.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
- name: Download Cloud Init Isos
|
||||
ansible.builtin.include_tasks: 42_download_isos.yml
|
||||
loop: "{{ proxmox_cloud_init_images | dict2items | map(attribute='value') }}"
|
||||
loop_control:
|
||||
loop_var: distro
|
||||
28
roles/proxmox/tasks/42_download_isos.yml
Normal file
28
roles/proxmox/tasks/42_download_isos.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
- name: Check if file exists
|
||||
ansible.builtin.stat:
|
||||
path: "{{ proxmox_dirs.isos }}/{{ distro.name }}"
|
||||
register: image_stat
|
||||
|
||||
- name: Download image if missing
|
||||
ansible.builtin.get_url:
|
||||
url: "{{ distro.url }}"
|
||||
dest: "{{ proxmox_dirs.isos }}/{{ distro.name }}"
|
||||
mode: "0644"
|
||||
when: not image_stat.stat.exists
|
||||
|
||||
- name: Set raw image file name fact
|
||||
ansible.builtin.set_fact:
|
||||
raw_image_name: "{{ distro.name | splitext | first }}.img"
|
||||
|
||||
- name: Check if raw image already exists
|
||||
ansible.builtin.stat:
|
||||
path: "{{ proxmox_dirs.isos }}/{{ raw_image_name }}"
|
||||
register: raw_image_stat
|
||||
|
||||
- name: Convert image to raw format
|
||||
ansible.builtin.command:
|
||||
cmd: "qemu-img convert -O raw {{ proxmox_dirs.isos }}/{{ distro.name }} {{ proxmox_dirs.isos }}/{{ raw_image_name }}"
|
||||
when:
|
||||
- download_result is changed or not raw_image_stat.stat.exists
|
||||
- image_stat.stat.exists
|
||||
17
roles/proxmox/tasks/50_create_vms.yml
Normal file
17
roles/proxmox/tasks/50_create_vms.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
- name: Load vault variables
|
||||
ansible.builtin.include_vars:
|
||||
file: "{{ proxmox_vault_file }}"
|
||||
name: vm_secrets
|
||||
|
||||
# - name: Destroy vms (Only during rapid testing)
|
||||
# ansible.builtin.include_tasks: 54_destroy_vm.yml
|
||||
# loop: "{{ vms }}"
|
||||
# loop_control:
|
||||
# loop_var: "vm"
|
||||
|
||||
- name: Create vms
|
||||
ansible.builtin.include_tasks: 55_create_vm.yml
|
||||
loop: "{{ vms }}"
|
||||
loop_control:
|
||||
loop_var: "vm"
|
||||
30
roles/proxmox/tasks/54_destroy_vm.yml
Normal file
30
roles/proxmox/tasks/54_destroy_vm.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
- name: Gather info about VM
|
||||
community.general.proxmox_vm_info:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
register: vm_info
|
||||
|
||||
- name: Stop VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
state: stopped
|
||||
force: true
|
||||
when: vm_info.proxmox_vms | length > 0
|
||||
|
||||
- name: Destroy VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
state: absent
|
||||
force: true
|
||||
when: vm_info.proxmox_vms | length > 0
|
||||
31
roles/proxmox/tasks/55_create_vm.yml
Normal file
31
roles/proxmox/tasks/55_create_vm.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
- name: Create VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: "{{ proxmox_api_user }}@pam"
|
||||
api_password: "{{ proxmox_api_password }}"
|
||||
api_host: "{{ proxmox_api_host }}"
|
||||
agent: true
|
||||
name: "{{ vm.name }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
node: "{{ vm.node }}"
|
||||
cores: "{{ vm.cores }}"
|
||||
memory: "{{ vm.memory }}"
|
||||
net: "{{ vm.net }}"
|
||||
hostpci: "{{ vm.hostpci | default({})}}"
|
||||
scsihw: "virtio-scsi-pci"
|
||||
ostype: "l26"
|
||||
tags: "{{ proxmox_tags }}"
|
||||
description: "Created via Ansible with cloud-init"
|
||||
boot: "order=scsi0"
|
||||
cpu: "x86-64-v2-AES"
|
||||
ciuser: "{{ vm.ciuser }}"
|
||||
# cipassword: "{{ vm_secrets[proxmox_secrets_prefix + '_' + vm.name.replace('-', '_')] }}"
|
||||
cipassword: "flyff369"
|
||||
ipconfig:
|
||||
ipconfig0: "ip=dhcp"
|
||||
sshkeys: "{{ vm.sshkeys }}"
|
||||
register: proxmox_deploy_info
|
||||
|
||||
- name: Provision created VM
|
||||
ansible.builtin.include_tasks: 56_provision_new_vm.yml
|
||||
when: proxmox_deploy_info.changed
|
||||
101
roles/proxmox/tasks/56_provision_new_vm.yml
Normal file
101
roles/proxmox/tasks/56_provision_new_vm.yml
Normal file
@@ -0,0 +1,101 @@
|
||||
---
|
||||
- name: Debug proxmox_deploy_info
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ proxmox_deploy_info }}"
|
||||
|
||||
- name: Get MAC Address of new machine
|
||||
ansible.builtin.set_fact:
|
||||
mac_address: "{{ proxmox_deploy_info.mac.net0 }}"
|
||||
|
||||
- name: Import disk
|
||||
ansible.builtin.shell: |
|
||||
qm importdisk {{ vm.vmid }} {{ proxmox_dirs.isos }}/{{ vm.boot_image }} {{ proxmox_storage }}
|
||||
delegate_to: "{{ vm.node }}"
|
||||
when: proxmox_deploy_info.changed
|
||||
|
||||
- name: Attach disk and cloud-init
|
||||
ansible.builtin.shell: |
|
||||
qm set {{ vm.vmid }} --scsi0 {{ proxmox_storage }}:{{ vm.vmid }}/vm-{{ vm.vmid }}-disk-0.raw --ide2 {{ proxmox_storage }}:cloudinit --boot order=scsi0
|
||||
delegate_to: "{{ vm.node }}"
|
||||
|
||||
- name: Resize scsi0 disk if needed
|
||||
ansible.builtin.shell: |
|
||||
qm resize {{ vm.vmid }} scsi0 {{ vm.disk_size }}G
|
||||
delegate_to: "{{ vm.node }}"
|
||||
|
||||
- name: Start VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
state: started
|
||||
|
||||
- name: Stop VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
state: stopped
|
||||
force: true
|
||||
|
||||
- name: Wait until VM is fully stopped
|
||||
community.general.proxmox_vm_info:
|
||||
api_user: "root@pam"
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
register: vm_status_check
|
||||
until: vm_status_check.proxmox_vms[0].status == "stopped"
|
||||
retries: 24
|
||||
delay: 5
|
||||
|
||||
- name: Start VM
|
||||
community.general.proxmox_kvm:
|
||||
api_user: root@pam
|
||||
api_password: "{{ vault.pve.aya01.root.sudo }}"
|
||||
api_host: "192.168.20.12"
|
||||
node: "{{ vm.node }}"
|
||||
vmid: "{{ vm.vmid }}"
|
||||
state: started
|
||||
|
||||
- name: Wait for VM to appear on network
|
||||
ansible.builtin.shell: |
|
||||
nmap -sn -n -PR 192.168.20.0/24 | grep -B2 "{{ mac_address }}" | grep "Nmap scan report for"
|
||||
register: vm_nmap_scan
|
||||
retries: 30
|
||||
delay: 5
|
||||
until: vm_nmap_scan.stdout != ""
|
||||
delegate_to: "{{ vm.node }}"
|
||||
|
||||
- name: Extract the IP address from Nmap output
|
||||
ansible.builtin.set_fact:
|
||||
vm_found_ip: "{{ vm_nmap_scan.stdout | regex_search('Nmap scan report for ([0-9\\.]+)', '\\1') | first }}"
|
||||
|
||||
- name: Append new VM to SSH config "{{ vm.name }}"
|
||||
ansible.builtin.blockinfile:
|
||||
path: "{{ ansible_env.HOME }}/.ssh/config_homelab"
|
||||
marker: "# {mark} HOMELAB VM: {{ vm.name }} BLOCK"
|
||||
block: |
|
||||
Host {{ vm.name }}
|
||||
HostName {{ vm_found_ip }}
|
||||
Port 22
|
||||
User {{ user }}
|
||||
IdentityFile {{ pk_path }}
|
||||
IdentityFile ~/.ssh/id_ed25519
|
||||
IdentityFile ~/.ssh/id_ed25519-cert.pub
|
||||
ProxyJump {{ vm.node }}
|
||||
StrictHostKeyChecking no
|
||||
|
||||
- name: Add VM to homelab_vms group in production.ini
|
||||
ansible.builtin.lineinfile:
|
||||
path: "{{ inventory_file }}"
|
||||
line: "{{ vm.name }}"
|
||||
insertafter: '^\[vms\]'
|
||||
create: true
|
||||
state: present
|
||||
delegate_to: localhost
|
||||
11
roles/proxmox/tasks/60_create_containers.yml
Normal file
11
roles/proxmox/tasks/60_create_containers.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
- name: Load vault variables
|
||||
ansible.builtin.include_vars:
|
||||
file: "{{ proxmox_vault_file }}"
|
||||
name: vm_secrets
|
||||
|
||||
- name: Create vms
|
||||
ansible.builtin.include_tasks: 65_create_container.yml
|
||||
loop: "{{ lxcs }}"
|
||||
loop_control:
|
||||
loop_var: "container"
|
||||
4
roles/proxmox/tasks/65_create_container.yml
Normal file
4
roles/proxmox/tasks/65_create_container.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
- name: Create Container
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ container.name }}"
|
||||
19
roles/proxmox/tasks/main.yml
Normal file
19
roles/proxmox/tasks/main.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
---
|
||||
- name: Prepare Machines
|
||||
ansible.builtin.include_tasks: 00_setup_machines.yml
|
||||
|
||||
- name: Create VM vault
|
||||
ansible.builtin.include_tasks: 10_create_secrets.yml
|
||||
when: is_localhost
|
||||
|
||||
- name: Prime node for VM
|
||||
ansible.builtin.include_tasks: 40_prepare_vm_creation.yml
|
||||
when: is_proxmox_node
|
||||
|
||||
- name: Create VMs
|
||||
ansible.builtin.include_tasks: 50_create_vms.yml
|
||||
when: is_localhost
|
||||
|
||||
- name: Create LXC containers
|
||||
ansible.builtin.include_tasks: 60_create_containers.yml
|
||||
when: is_localhost
|
||||
25
roles/proxmox/vars/main.yml
Normal file
25
roles/proxmox/vars/main.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
proxmox_author: tuan-dat.tran@tudattr.dev
|
||||
proxmox_creator: ansible
|
||||
|
||||
proxmox_storage: proxmox
|
||||
|
||||
proxmox_vault_file: ../group_vars/proxmox/secrets_vm.yml
|
||||
proxmox_secrets_prefix: secrets_vm
|
||||
proxmox_cloud_init_images:
|
||||
debian:
|
||||
name: debian-12-genericcloud-amd64.qcow2
|
||||
url: https://cdimage.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.qcow2
|
||||
ubuntu:
|
||||
name: noble-server-cloudimg-amd64.img
|
||||
url: https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img
|
||||
proxmox_dirs:
|
||||
isos: /opt/proxmox/template/iso/
|
||||
|
||||
proxmox_tags:
|
||||
- "{{ proxmox_creator }}"
|
||||
|
||||
proxmox_node_dependencies:
|
||||
- libguestfs-tools
|
||||
- nmap
|
||||
|
||||
proxmox_localhost_dependencies: []
|
||||
@@ -1,4 +1,11 @@
|
||||
---
|
||||
- name: Restart Caddy
|
||||
ansible.builtin.command: "{{ caddy_binary }} reload --config {{ caddy_config_path }}"
|
||||
- name: Restart caddy service
|
||||
ansible.builtin.systemd:
|
||||
name: caddy
|
||||
state: restarted
|
||||
become: true
|
||||
|
||||
- name: Update apt cache
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
23
roles/reverse_proxy/tasks/10_caddy_install.yml
Normal file
23
roles/reverse_proxy/tasks/10_caddy_install.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
- name: Download Caddy GPG Key
|
||||
ansible.builtin.get_url:
|
||||
url: https://dl.cloudsmith.io/public/caddy/stable/gpg.key
|
||||
dest: /usr/share/keyrings/caddy-stable-archive-keyring.asc
|
||||
mode: "0644"
|
||||
become: true
|
||||
|
||||
- name: Add Caddy repository source list
|
||||
ansible.builtin.apt_repository:
|
||||
repo: "{{ item }}"
|
||||
state: present
|
||||
become: true
|
||||
notify: Update apt cache
|
||||
loop:
|
||||
- "deb [signed-by=/usr/share/keyrings/caddy-stable-archive-keyring.asc] https://dl.cloudsmith.io/public/caddy/stable/deb/debian any-version main"
|
||||
- "deb-src [signed-by=/usr/share/keyrings/caddy-stable-archive-keyring.asc] https://dl.cloudsmith.io/public/caddy/stable/deb/debian any-version main"
|
||||
|
||||
- name: Install Caddy
|
||||
ansible.builtin.apt:
|
||||
name: caddy
|
||||
state: present
|
||||
become: true
|
||||
@@ -24,8 +24,8 @@
|
||||
state: present
|
||||
become: true
|
||||
|
||||
- name: Install Caddy
|
||||
ansible.builtin.command: xcaddy build --with github.com/caddy-dns/netcup
|
||||
- name: Build Custom Caddy with netcup
|
||||
ansible.builtin.command: xcaddy build --with github.com/caddy-dns/netcup {{ reverse_proxy_caddy_version}}
|
||||
environment:
|
||||
PATH: "{{ ansible_env.PATH }}:/usr/local/go/bin"
|
||||
register: xcaddy_build
|
||||
41
roles/reverse_proxy/tasks/30_custom_caddy.yml
Normal file
41
roles/reverse_proxy/tasks/30_custom_caddy.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
- name: Check current diversion status for {{ reverse_proxy_default_caddy_path }}
|
||||
ansible.builtin.command:
|
||||
cmd: dpkg-divert --list {{ reverse_proxy_default_caddy_path }}
|
||||
register: divert_check_result
|
||||
changed_when: false # This task only checks state
|
||||
failed_when: false # Don't fail if diversion doesn't exist (rc=1)
|
||||
become: true
|
||||
|
||||
- name: Divert package manager's caddy binary path
|
||||
ansible.builtin.command:
|
||||
cmd: dpkg-divert --divert {{ reverse_proxy_diverted_caddy_path }} --rename {{ reverse_proxy_default_caddy_path }}
|
||||
# Only run if the diversion isn't already set correctly
|
||||
when: "reverse_proxy_diverted_caddy_path not in divert_check_result.stdout"
|
||||
notify: Restart caddy service # Notify restart if diversion happens
|
||||
become: true
|
||||
|
||||
- name: Copy custom Caddy binary to destination path
|
||||
ansible.builtin.copy:
|
||||
src: "{{ reverse_proxy_custom_caddy_source_path }}"
|
||||
dest: "{{ reverse_proxy_custom_caddy_dest_path }}"
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0755"
|
||||
remote_src: true
|
||||
notify: Restart caddy service # Notify restart if binary changes
|
||||
become: true
|
||||
|
||||
- name: Install original (diverted) caddy binary alternative
|
||||
ansible.builtin.command:
|
||||
# Use --force if the link /usr/bin/caddy might exist but not be managed by alternatives yet
|
||||
cmd: update-alternatives --install {{ reverse_proxy_alternatives_link }} {{ reverse_proxy_alternatives_name }} {{ reverse_proxy_diverted_caddy_path }} 10
|
||||
changed_when: false # update-alternatives is idempotent but often reports no change via rc
|
||||
become: true
|
||||
|
||||
- name: Install custom caddy binary alternative with higher priority
|
||||
ansible.builtin.command:
|
||||
cmd: update-alternatives --install {{ reverse_proxy_alternatives_link }} {{ reverse_proxy_alternatives_name }} {{ reverse_proxy_custom_caddy_dest_path }} 50
|
||||
changed_when: false # update-alternatives is idempotent but often reports no change via rc
|
||||
notify: Restart caddy service
|
||||
become: true
|
||||
14
roles/reverse_proxy/tasks/50_netcup_dns.yml
Normal file
14
roles/reverse_proxy/tasks/50_netcup_dns.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
- name: Setup DNS on Netcup
|
||||
community.general.netcup_dns:
|
||||
api_key: "{{ reverse_proxy_netcup_api_key }}"
|
||||
api_password: "{{ reverse_proxy_netcup_api_password }}"
|
||||
customer_id: "{{ reverse_proxy_netcup_customer_id }}"
|
||||
domain: "{{ domain }}"
|
||||
name: "{{ service.name }}"
|
||||
type: "A"
|
||||
value: "{{ hostvars['docker-lb'].ansible_default_ipv4.address }}"
|
||||
loop: "{{ services }}"
|
||||
loop_control:
|
||||
loop_var: service
|
||||
delegate_to: localhost
|
||||
@@ -13,4 +13,10 @@
|
||||
mode: "0644"
|
||||
backup: true
|
||||
become: true
|
||||
notify: Restart Caddy
|
||||
notify: Restart caddy service
|
||||
|
||||
- name: Format Caddy configuration file
|
||||
ansible.builtin.command:
|
||||
cmd: "caddy fmt --overwrite {{ caddy_config_path }}"
|
||||
become: true
|
||||
notify: Restart caddy service
|
||||
@@ -1,9 +1,18 @@
|
||||
---
|
||||
- name: Install Prerequisites
|
||||
ansible.builtin.include_tasks: prereq.yml
|
||||
- name: Install Go for Caddy
|
||||
ansible.builtin.include_tasks: 00_go_install.yml
|
||||
|
||||
- name: Install Caddy
|
||||
ansible.builtin.include_tasks: install.yml
|
||||
ansible.builtin.include_tasks: 10_caddy_install.yml
|
||||
|
||||
- name: Install xCaddy
|
||||
ansible.builtin.include_tasks: 20_xcaddy_install.yml
|
||||
|
||||
- name: Setup Custom Caddy
|
||||
ansible.builtin.include_tasks: 30_custom_caddy.yml
|
||||
|
||||
- name: Setup Netcup DNS
|
||||
ansible.builtin.include_tasks: 50_netcup_dns.yml
|
||||
|
||||
- name: Configure Caddy
|
||||
ansible.builtin.include_tasks: configure.yml
|
||||
- name: Start Caddy
|
||||
ansible.builtin.include_tasks: start.yml
|
||||
ansible.builtin.include_tasks: 80_configure.yml
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
- name: Ensure Caddy service is running
|
||||
ansible.builtin.command: "{{ caddy_binary }} start --config {{ caddy_config_path }}"
|
||||
become: true
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user