From 6b2804de8283fadd2bf9ab7a19aab7370e6f13b9 Mon Sep 17 00:00:00 2001 From: admin Date: Thu, 6 Feb 2025 00:55:58 +0000 Subject: [PATCH] initial commit --- ansible.cfg | 2 + inventory | 5 + main.yml | 23 ++ tasks/nvidia.yml | 67 +++ tasks/podman.yml | 921 ++++++++++++++++++++++++++++++++++++++++++ tasks/post-podman.yml | 17 + tasks/pre-podman.yml | 7 + tasks/setup.yml | 178 ++++++++ vars | 2 + vault.yml | 14 + 10 files changed, 1236 insertions(+) create mode 100755 ansible.cfg create mode 100755 inventory create mode 100755 main.yml create mode 100755 tasks/nvidia.yml create mode 100755 tasks/podman.yml create mode 100755 tasks/post-podman.yml create mode 100755 tasks/pre-podman.yml create mode 100755 tasks/setup.yml create mode 100755 vars create mode 100644 vault.yml diff --git a/ansible.cfg b/ansible.cfg new file mode 100755 index 0000000..f8fc6cd --- /dev/null +++ b/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +inventory = inventory diff --git a/inventory b/inventory new file mode 100755 index 0000000..fa88637 --- /dev/null +++ b/inventory @@ -0,0 +1,5 @@ +servers: + hosts: + debian-pods: + ansible_host: 192.168.0.30 + ansible_user: admin diff --git a/main.yml b/main.yml new file mode 100755 index 0000000..f4681b1 --- /dev/null +++ b/main.yml @@ -0,0 +1,23 @@ +--- + +- name: configure host + hosts: all + + vars_files: + - vars + - vault.yml + + tasks: + - import_tasks: tasks/setup.yml + tags: ['setup'] + + - import_tasks: tasks/nvidia.yml + tags: ['nvidia'] + + - import_tasks: tasks/pre-podman.yml + tags: ['podman'] + - import_tasks: tasks/podman.yml + tags: ['podman'] + - import_tasks: tasks/post-podman.yml + tags: ['podman'] + diff --git a/tasks/nvidia.yml b/tasks/nvidia.yml new file mode 100755 index 0000000..74b27a1 --- /dev/null +++ b/tasks/nvidia.yml @@ -0,0 +1,67 @@ + +- name: Create systemd service file for NVIDIA CDI generator + become: true + ansible.builtin.copy: + dest: /etc/systemd/system/nvidia-cdi-generator.service + content: | + [Unit] + Description=Generate NVIDIA CDI YAML for Docker at boot + After=network.target + + [Service] + Type=oneshot + ExecStart=/usr/bin/nvidia-ctk cdi generate --output /var/run/cdi/nvidia.yaml + RemainAfterExit=true + + [Install] + WantedBy=multi-user.target +- name: reload systemd daemon + ansible.builtin.systemd_service: + scope: user + daemon_reload: true + +- name: Enable the NVIDIA CDI generator service + become: true + ansible.builtin.systemd_service: + name: nvidia-cdi-generator.service + enabled: yes + +- name: add contrib and nonfree + become: true + ansible.builtin.apt_repository: + repo: deb http://deb.debian.org/debian {{ ansible_distribution_release }} main contrib non-free non-free-firmware + state: present + +- name: install nvidia-container-toolkit repo + become: true + ansible.builtin.shell: + cmd: "curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --yes --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg && curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | tee /etc/apt/sources.list.d/nvidia-container-toolkit.list" + +- name: Gather current kernel version + ansible.builtin.shell: uname -r + register: kernel_version + +- name: install Nvidia driver + become: true + ansible.builtin.apt: + pkg: + - nvidia-container-toolkit + - nvidia-driver + - firmware-misc-nonfree + - "linux-headers-{{ kernel_version.stdout }}" + state: present + update_cache: true + register: updated_driver + +- name: Unconditionally reboot the machine with all defaults + ansible.builtin.reboot: + when: updated_driver.changed + +- name: configure nvidia for rootless podman + become: true + ansible.builtin.shell: + cmd: nvidia-ctk cdi generate --output=/etc/cdi/nvidia.yaml + register: nvidia_ctk_result + ignore_errors: true + failed_when: nvidia_ctk+result.rc != 1 + diff --git a/tasks/podman.yml b/tasks/podman.yml new file mode 100755 index 0000000..3c6945c --- /dev/null +++ b/tasks/podman.yml @@ -0,0 +1,921 @@ +--- + +- name: mealie + containers.podman.podman_container: + state: quadlet + name: podman_mealie + image: ghcr.io/mealie-recipes/mealie:latest + network: bridge + volumes: + - /home/admin/podman/mealie:/app/data/ + ports: + - 9091:9000 + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: ollama + containers.podman.podman_container: + state: quadlet + name: podman_ollama + image: docker.io/ollama/ollama:latest + network: bridge + device: "nvidia.com/gpu=all" + volumes: + - "/home/admin/podman/ollama:/root/.ollama" + ports: + - "11434:11434" + env: + OLLAMA_KEEP_ALIVE: "-1" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: open-webui + containers.podman.podman_container: + state: quadlet + name: podman_open-webui + image: ghcr.io/open-webui/open-webui:latest + network: bridge + volumes: + - "/home/admin/podman/open-webui:/app/backend/data" + ports: + - "3000:8080" + env: + ENABLE_RAG_WEB_SEARCH: "True" + RAG_WEB_SEARCH_ENGINE: "searxng" + RAG_WEB_SEARCH_RESULT_COUNT: 3 + RAG_WEB_SEARCH_CONCURRENT_REQUESTS: 10 + SEARXNG_QUERY_URL: "http://{{ ansible_ssh_host }}:8880/search?q=" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: searxng + containers.podman.podman_container: + state: quadlet + name: podman_searxng + image: docker.io/searxng/searxng:latest + network: bridge + ports: + - "8880:8080" + volumes: + - "/home/admin/podman/searxng:/etc/searxng" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: jellyfin + containers.podman.podman_container: + state: quadlet + name: podman_jellyfin + image: docker.io/jellyfin/jellyfin + network: bridge + device: "nvidia.com/gpu=all" + volumes: + - "/home/admin/podman/jellyfin:/config" + - "/mnt/media/video/movies:/movies:ro" + - "/mnt/media/video/tv:/tv:ro" + - "/mnt/media/audio/music/flac:/music:ro" + - "/mnt/media/video/family:/family:ro" + - "/mnt/media/video/anime/tv:/anime-tv:ro" + - "/mnt/media/video/anime/movies:/anime-movies:ro" + - "/mnt/media/video/livetv:/livetv:ro" + ports: + - "8096:8096" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: freshrss + containers.podman.podman_container: + state: quadlet + name: podman_freshrss + image: lscr.io/linuxserver/freshrss:latest + network: bridge + volumes: + - "/home/admin/podman/freshrss:/config" + ports: + - "8555:80" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: eclipse-mosquitto + containers.podman.podman_container: + state: quadlet + name: podman_eclipse-mosquitto + image: docker.io/eclipse-mosquitto + network: bridge + volumes: + - "/home/admin/podman/eclipse-mosquitto:/mosquitto" + ports: + - "1883:1883" + - "9001:9001" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: frigate + containers.podman.podman_container: + state: quadlet + name: podman_frigate + image: "ghcr.io/blakeblackshear/frigate:{{ frigate_version }}" + network: bridge + device: "nvidia.com/gpu=all" + privileged: true + volumes: + - /etc/localtime:/etc/localtime:ro + - /home/admin/podman/frigate:/config + - /mnt/services/cctv:/media/frigate + - /usr/lib/x86_64-linux-gnu/libcuda.so:/usr/lib/x86_64-linux-gnu/libcuda.so:ro + ports: + - "5005:5000" + - "5001:8971" + - "1935:1935" + - "8554:8554" + env: + FRIGATE_RTSP_PASSWORD: "{{ rtsp_password }}" + YOLO_MODELS: "yolov7-320" + USE_FP16: "false" + quadlet_options: + - "Tmpfs=/tmp/cache" + - "ShmSize=128mb" + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: nginx-proxy-manager + containers.podman.podman_container: + state: quadlet + name: podman_nginx-proxy-manager + image: docker.io/jc21/nginx-proxy-manager:latest + network: bridge + #ip: 192.168.50.10 + privileged: true + volumes: + - "/home/admin/podman/nginx-proxy-manager:/data" + - "/home/admin/podman/letsencrypt:/etc/letsencrypt" + ports: + - "80:80" + - "443:443" + - "81:81" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: ddclient + containers.podman.podman_container: + state: quadlet + name: podman_ddclient + image: lscr.io/linuxserver/ddclient:latest + network: bridge + env: + TZ: "Europe/London" + volumes: + - "/home/admin/podman/ddclient:/config" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: gitea + containers.podman.podman_container: + state: quadlet + name: podman_gitea + image: docker.io/gitea/gitea:latest + network: bridge + #ip: 192.168.50.30 + env: + TZ: "Europe/London" + DISABLE_REGISTRATION: "true" + volumes: + - "/home/admin/podman/gitea:/data" + - "/etc/timezone:/etc/timezone:ro" + - "/etc/localtime:/etc/localtime:ro" + ports: + - "3001:3000" + - "222:22" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: nginx-personal-site + containers.podman.podman_container: + state: quadlet + name: podman_nginx-personal-site + image: docker.io/nginx:latest + network: bridge + #ip: 192.168.50.20 + volumes: + - "/home/admin/podman/nginx/nginx.conf:/etc/nginx/nginx.conf:ro" + - "/home/admin/podman/nginx/html:/usr/share/nginx/html" + ports: + "888:80" + env: + NGINX_HOST: "{{ personal_site_host }}" + NGINX_PORT: "80" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: joplin + containers.podman.podman_container: + state: quadlet + name: podman_joplin + image: docker.io/joplin/server:latest + network: bridge + ports: + "22300:22300" + env: + APP_PORT: "22300" + APP_BASE_URL: "http://{{ ansible_ssh_host }}:22300" + DB_CLIENT: "pg" + POSTGRES_PASSWORD: "{{ joplin_password }}" + POSTGRES_DATABASE: "joplin-db" + POSTGRES_USER: "joplin" + POSTGRES_PORT: "5432" + POSTGRES_HOST: "{{ ansible_ssh_host }}" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: joplin-db + containers.podman.podman_container: + state: quadlet + name: podman_joplin-db + image: docker.io/postgres:15 + network: bridge + volumes: + - "/home/admin/podman/joplin-db:/var/lib/postgresql/data" + ports: + "5432:5432" + env: + TZ: "Europe/London" + POSTGRES_PASSWORD: "{{ joplin_password }}" + POSTGRES_USER: "joplin" + POSTGRES_DB: "joplin-db" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: wireguard + containers.podman.podman_container: + state: quadlet + name: podman_wireguard + image: lscr.io/linuxserver/wireguard:latest + network: bridge + privileged: true + sysctl: net.ipv4.ip_forward=1 + sysctl: net.ipv4.conf.all.src_valid_mark=1 + cap_add: + - NET_RAW + - NET_ADMIN + - SYS_MODULE + volumes: + - "/home/admin/podman/wireguard:/config" + - "/lib/modules:/lib/modules" #optional + ports: + - "51820:51820/udp" + env: + TZ: "Europe/London" + PEERDNS: "1.1.1.1" + SERVERURL: "{{ public_ip }}" + SERVERPORT: "51820" + PEERS: "FarisIOS,FarisMacbook,SafaPhone" + ALLOWEDIPS: "192.168.0.1/24" + LOG_CONFS: "true" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: vaultwarden + containers.podman.podman_container: + state: quadlet + name: podman_vaultwarden + image: docker.io/vaultwarden/server:latest + network: bridge + volumes: + - "/home/admin/podman/vaultwarden/:/data/" + ports: + - "8000:80" + - "3012:3012" + env: + TZ: "Europe/London" + DOMAIN: "https://{{ personal_site_host }}" + SIGNUPS_ALLOWED: "false" + EXPERIMENTAL_CLIENT_FEATURE_FLAGS: "ssh-key-vault-item,ssh-agent" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: homeassistant + containers.podman.podman_container: + state: quadlet + name: podman_homeassistant + image: ghcr.io/home-assistant/home-assistant:stable + network: bridge + volumes: + - "/home/admin/podman/homeassistant:/config" + - "/etc/localtime:/etc/localtime:ro" + ports: + - "8123:8123" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + + +- name: rutorrent + containers.podman.podman_container: + state: quadlet + name: podman_rutorrent + image: docker.io/crazymax/rtorrent-rutorrent:latest + network: bridge + volumes: + - "/home/admin/podman/rutorrent/passwd:/passwd" + - "/home/admin/podman/rutorrent/data:/data" + - "/mnt/media/torrents:/downloads" + ports: + - "8888:8080" + - "5000:8000" + - "50000:50000" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: prowlarr + containers.podman.podman_container: + state: quadlet + name: podman_prowlarr + image: lscr.io/linuxserver/prowlarr:latest + network: bridge + volumes: + - "/home/admin/podman/prowlarr:/config" + ports: + - "9696:9696" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: sonarr + containers.podman.podman_container: + state: quadlet + name: podman_sonarr + image: lscr.io/linuxserver/sonarr:latest + network: bridge + volumes: + - "/home/admin/podman/sonarr:/config" + - "/mnt/media/video/tv:/tv" + - "/mnt/media/torrents:/downloads" + - "/mnt/media/video/anime/tv:/anime-tv" + ports: + - "8989:8989" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: radarr + containers.podman.podman_container: + state: quadlet + name: podman_radarr + image: lscr.io/linuxserver/radarr:latest + network: bridge + volumes: + - "/home/admin/podman/radarr:/config" + - "/mnt/media/video/movies:/movies" + - "/mnt/media/torrents:/downloads" + - "/mnt/media/video/anime/movies:/anime-movies" + ports: + - "7878:7878" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: readarr + containers.podman.podman_container: + state: quadlet + name: podman_readarr + image: lscr.io/linuxserver/readarr:develop + network: bridge + volumes: + - "/home/admin/podman/readarr:/config" + - "/mnt/media/books:/books" + - "/mnt/media/torrents:/downloads" + ports: + - "8787:8787" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: lidarr + containers.podman.podman_container: + state: quadlet + name: podman_lidarr + image: lscr.io/linuxserver/lidarr:latest + network: bridge + volumes: + - "/home/admin/podman/lidarr:/config" + - "/mnt/media/audio/music/flac:/music" + - "/mnt/media/torrents:/downloads" + ports: + - "8686:8686" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: bazarr + containers.podman.podman_container: + state: quadlet + name: podman_bazarr + image: lscr.io/linuxserver/bazarr:latest + network: bridge + volumes: + - "/home/admin/podman/lidarr:/config" + - "/mnt/media/video/movies:/movies" + - "/mnt/media/video/tv:/tv" + ports: + - "6767:6767" + env: + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: kiwix + containers.podman.podman_container: + state: quadlet + name: podman_kiwix + image: ghcr.io/kiwix/kiwix-serve:latest + network: bridge + command: "*.zim" + volumes: + - "/mnt/media/kiwix:/data" + ports: + - "8088:8080" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + + +- name: immich-server + containers.podman.podman_container: + state: quadlet + name: podman_immich-server + image: "ghcr.io/immich-app/immich-server:{{ immich_version }}" + network: bridge + device: "nvidia.com/gpu=all" + volumes: + - "/mnt/services/immich:/usr/src/app/upload" + - "/etc/localtime:/etc/localtime:ro" + ports: + - "2283:2283" + env_file: + - "/home/admin/podman/.env" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: immich-machine-learning + containers.podman.podman_container: + state: quadlet + name: podman_immich-machine-learning + image: "ghcr.io/immich-app/immich-machine-learning:{{ immich_version }}-cuda" + network: bridge + device: "nvidia.com/gpu=all" + volumes: + - "/home/admin/podman/immich/cache:/cache" + env_file: + - "/home/admin/podman/.env" + ports: + - "3003:3003" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: immich-redis + containers.podman.podman_container: + state: quadlet + name: podman_immich-redis + image: registry.hub.docker.com/library/redis:6.2-alpine + network: bridge + ports: + - "6379:6379" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: immich-db + containers.podman.podman_container: + state: quadlet + name: podman_immich-db + image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0 + network: bridge + env: + POSTGRES_PASSWORD: "{{ immich_db_password }}" + POSTGRES_USER: postgres + POSTGRES_DB: immich + volumes: + - "/home/admin/podman/immich/db:/var/lib/postgresql/data" + ports: + - "5433:5432" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: metube + containers.podman.podman_container: + state: quadlet + name: podman_metube + image: ghcr.io/alexta69/metube:latest + network: bridge + volumes: + - "/mnt/media/youtube-dl:/downloads" + - "/mnt/media/audio/music/flac:/music" + ports: + - "8081:8081" + env: + AUDIO_DOWNLOAD_DIR: "/music" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: unifi-network-application + containers.podman.podman_container: + state: quadlet + name: podman_unifi-network-application + image: lscr.io/linuxserver/unifi-network-application:latest + network: bridge + volumes: + - "/home/admin/podman/unifi-network-application:/config" + ports: + - "8443:8443" + - "10001:10001/udp" + env: + TZ: "Europe/London" + MONGO_INITDB_ROOT_USERNAME: "root" + MONGO_INITDB_ROOT_PASSWORD: "{{ rtsp_password }}" + MONGO_USER: "unifi" + MONGO_PASS: "{{ rtsp_password }}" + MONGO_HOST: "{{ ansible_ssh_host }}" + MONGO_PORT: "27017" + MONGO_DBNAME: "unifi" + MONGO_AUTHSOURCE: "admin" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: unifi-network-application-db + containers.podman.podman_container: + state: quadlet + name: podman_unifi-network-application-db + image: docker.io/mongo:7.0 + network: bridge + volumes: + - "/home/admin/podman/unifi-network-application-db" + - "/home/admin/init-mongo.sh:/docker-entrypoint-initdb.d/init-mongo.sh:ro" + ports: + - "27017:27017" + env: + MONGO_USER: "unifi" + MONGO_PASS: "{{ rtsp_password }}" + MONGO_HOST: "{{ ansible_ssh_host }}" + MONGO_PORT: "27017" + MONGO_DBNAME: "unifi" + MONGO_AUTHSOURCE: "admin" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: tube-archivist + containers.podman.podman_container: + state: quadlet + name: podman_tube-archivist + image: docker.io/bbilly1/tubearchivist:latest + network: bridge + volumes: + - "/mnt/media/video/youtube:/youtube" + - "/home/admin/podman/tube-archivist/cache" + ports: + - "8001:8000" + env: + ES_URL: "http://{{ ansible_ssh_host }}:9200" + REDIS_HOST: "{{ ansible_ssh_host }}" + REDIS_PORT: "6380" + TA_HOST: "{{ ansible_ssh_host }}" + TA_USERNAME: "admin" + TA_PASSWORD: "{{ rtsp_password }}" + ELASTIC_PASSWORD: "{{ rtsp_password }}" + TZ: "Europe/London" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: tube-archivist-es + containers.podman.podman_container: + state: quadlet + name: podman_tube-archivist-es + image: docker.io/bbilly1/tubearchivist-es:latest + network: bridge + volumes: + - "/home/admin/podman/tube-archivist/es:/usr/share/elasticsearch/data" + ports: + - "9200:9200" + env: + ELASTIC_PASSWORD: "{{ rtsp_password }}" # matching Elasticsearch password + ES_JAVA_OPTS: "-Xms1g -Xmx1g" + xpack.security.enabled: "true" + discovery.type: "single-node" + path.repo: "/usr/share/elasticsearch/data/snapshot" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: tube-archivist-redis + containers.podman.podman_container: + state: quadlet + name: podman_tube-archivist-redis + image: docker.io/redis/redis-stack-server + network: bridge + volumes: + - "/home/admin/podman/tube-archivist/redis:/data" + ports: + - "6380:6379" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: archivebox + containers.podman.podman_container: + state: quadlet + name: podman_archivebox + image: docker.io/archivebox/archivebox:latest + network: bridge + volumes: + - "/home/admin/podman/archivebox:/data" + ports: + - "8002:8000" + env: + ADMIN_USERNAME: "admin" + ADMIN_PASSWORD: "{{ rtsp_password }}" + PGID: "1000" + PUID: "1000" + SEARCH_BACKEND_ENGINE: "sonic" + SEARCH_BACKEND_HOST_NAME: "sonic" + SEARCH_BACKEND_PASSWORD: "{{ rtsp_password }}" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target + +- name: zigbee2mqtt + containers.podman.podman_container: + state: quadlet + name: podman_zigbee2mqtt + image: docker.io/koenkk/zigbee2mqtt + network: bridge + device: "/dev/ttyACM0:/dev/ttyACM0" + group_add: "keep-groups" + volumes: + - "/home/admin/podman/zigbee2mqtt:/app/data" + ports: + - "8808:8080" + quadlet_options: + - "AutoUpdate=registry" + - "Pull=newer" + - | + [Service] + Restart=always + TimeoutStartSec=900 + [Install] + WantedBy=default.target diff --git a/tasks/post-podman.yml b/tasks/post-podman.yml new file mode 100755 index 0000000..489d7bf --- /dev/null +++ b/tasks/post-podman.yml @@ -0,0 +1,17 @@ +- name: reload systemd daemon + ansible.builtin.systemd_service: + scope: user + daemon_reload: true +- name: get list of all container services + ansible.builtin.shell: + cmd: "systemctl --user list-units --type=service --state=inactive --no-pager --quiet | grep 'podman_' | awk '{print $1}'" + register: inactive_containers + changed_when: false +- name: reload containers + ansible.builtin.systemd_service: + scope: user + state: started + name: "{{ item }}" + with_items: "{{ inactive_containers.stdout_lines }}" + when: inactive_containers.stdout_lines is defined + diff --git a/tasks/pre-podman.yml b/tasks/pre-podman.yml new file mode 100755 index 0000000..a69a939 --- /dev/null +++ b/tasks/pre-podman.yml @@ -0,0 +1,7 @@ +--- + +- name: cleanup unused containers + ansible.builtin.file: + state: absent + path: /home/admin/.config/containers/systemd/ + diff --git a/tasks/setup.yml b/tasks/setup.yml new file mode 100755 index 0000000..6fe9a2b --- /dev/null +++ b/tasks/setup.yml @@ -0,0 +1,178 @@ +--- + +- name: set localtime (only needed for gitea and frigate) + become: true + ansible.builtin.file: + path: /usr/share/zoneinfo/Europe/London + dest: /etc/localtime + state: link + +- name: set timezone (only neede for gitea) + become: true + ansible.builtin.shell: + cmd: echo "Europe/London" | tee /etc/timezone && dpkg-reconfigure -f noninteractive tzdata + changed_when: false + + +- name: Update apt and install required programs + become: true + ansible.builtin.apt: + pkg: + - gpg + - ansible + - cron + - curl + - git + - tmux + - podman + - neovim + - nfs-common + - fzf + - rsync + - restic + state: present + update_cache: true + +- name: create mount directories + become: true + block: + - name: create /mnt/data + ansible.builtin.file: + path: /mnt/data + state: directory + - name: create /mnt/services + ansible.builtin.file: + path: /mnt/services + state: directory + - name: create /mnt/media + ansible.builtin.file: + path: /mnt/media + state: directory + +- name: mount shares + become: true + block: + - name: mount data + ansible.posix.mount: + path: /mnt/data + src: "{{ truenas_ip }}:/mnt/pool/data" + fstype: nfs4 + opts: defaults,auto,rw + state: mounted + - name: mount services + ansible.posix.mount: + path: /mnt/services + src: "{{ truenas_ip }}:/mnt/pool/services" + fstype: nfs4 + opts: defaults,auto,rw + state: mounted + - name: mount media + ansible.posix.mount: + path: /mnt/media + src: "{{ truenas_ip }}:/mnt/pool/media" + fstype: nfs4 + opts: defaults,auto,rw + state: mounted + +- name: setup cronjob to sync podman folder to nfs + become: true + ansible.builtin.cron: + minute: "0" + hour: "4" + name: "backup" + user: "admin" + job: "/bin/bash '/usr/bin/systemctl --user stop podman_* && /usr/bin/rsync -a --delete /home/admin/podman /mnt/services/podman && /usr/bin/systemctl --user start podman_* --all'" + cron_file: backup + +- name: setup cronjob to backup data to b2 + become: true + ansible.builtin.cron: + minute: "40" + hour: "15" + name: "backup-restic" + user: "admin" + job: /bin/bash "env RESTIC_PASSWORD='Outshine-Playmaker-Earthen' B2_ACCOUNT_KEY='004e1d35edc52cd716719a747edd66d5f42111d604' B2_ACCOUNT_ID='257549133968' RESTIC_REPOSITORY='b2:faris-backup' GOGC=20 restic backup --cache-dir=/home/admin/.cache/restic /mnt/services/ /mnt/media/video/family/ /mnt/data/ --exclude /mnt/services/cctv/** --exclude /mnt/services/podman/ollama/models/** --exclude /mnt/services/immich/encoded-video/** --exclude /mnt/services/immich/thumbs/** && restic forget --keep-within 1y --prune --cache-dir=/home/admin/.cache/restic" + + cron_file: backup + +# env RESTIC_PASSWORD="Outshine-Playmaker-Earthen3" B2_ACCOUNT_KEY="004e1d35edc52cd716719a747edd66d5f42111d604" B2_ACCOUNT_ID="257549133968" RESTIC_REPOSITORY="b2:faris-backup" GOGC=20 restic backup --cache-dir=/home/admin/.cache/restic /mnt/services/ /mnt/media/video/family/ /mnt/data/ --exclude /mnt/services/cctv/** --exclude /mnt/services/podman/ollama/models/** --exclude /mnt/services/immich/encoded-video --exclude /mnt/services/immich/thumbs && restic forget --keep-within 1y --prune --cache-dir=/home/admin/.cache/restic +- name: Check if /home/admin/podman exists + ansible.builtin.stat: + path: /home/admin/podman + register: podman_dir + +- name: create /home/admin/podman if it doesn't exist + ansible.builtin.file: + path: /home/admin/podman + state: directory + when: not podman_dir.stat.exists + +- name: initialize podman-compose + ansible.posix.synchronize: + src: /mnt/services/podman/ + dest: /home/admin/podman + archive: true + delegate_to: "{{ inventory_hostname }}" + when: not podman_dir.stat.exists + +- name: change podman to overlayfs (system might need to be reset after this) + block: + - name: create directory if doesn't exist + ansible.builtin.file: + state: directory + path: /home/admin/.config/containers/ + - name: create file if doesn't exist + ansible.builtin.file: + state: touch + path: /home/admin/.config/containers/storage.conf + - name: storage + ansible.builtin.lineinfile: + path: /home/admin/.config/containers/storage.conf + search_string: [storage] + line: [storage] + - name: overlay + ansible.builtin.lineinfile: + path: /home/admin/.config/containers/storage.conf + search_string: driver + line: driver = 'overlay' + +- name: allow rootless podman to access ports below 1000 + become: true + ansible.posix.sysctl: + name: net.ipv4.ip_unprivileged_port_start + value: 80 + sysctl_file: /etc/sysctl.d/99-ports.conf + +- name: allow rootless wireguard src_valid_mark + become: true + ansible.posix.sysctl: + name: net.ipv4.conf.all.src_valid_mark + value: 1 + sysctl_file: /etc/sysctl.d/99-ports.conf + +- name: allow rootless wireguard forwarding all + become: true + ansible.posix.sysctl: + name: net.ipv4.conf.all.forwarding + value: 1 + sysctl_file: /etc/sysctl.d/99-ports.conf + +- name: allow rootless wireguard ip_forward + become: true + ansible.posix.sysctl: + name: net.ipv4.ip_forward + value: 1 + sysctl_file: /etc/sysctl.d/99-ports.conf + +# this might not be needed, haven't tested +- name: allow rootless podmad (wireguard) to access net src + become: true + ansible.posix.sysctl: + name: net.ipv4.conf.all.src_valid_mark + value: 1 + sysctl_file: /etc/sysctl.d/99-ports.conf + + +- name: enable linger (so user services start without login required) + ansible.builtin.shell: + cmd: loginctl enable-linger diff --git a/vars b/vars new file mode 100755 index 0000000..9291b48 --- /dev/null +++ b/vars @@ -0,0 +1,2 @@ +immich_version: v1.125.7 +frigate_version: 0.15.0-rc1-tensorrt diff --git a/vault.yml b/vault.yml new file mode 100644 index 0000000..014ccfb --- /dev/null +++ b/vault.yml @@ -0,0 +1,14 @@ +$ANSIBLE_VAULT;1.1;AES256 +66383037336532363438336262613162663731646161323137653465663138393532323561663633 +3132393938316133323035663233313534626431343731610a393737393461323530646238316266 +39643135653663343836623030653266643738343638346565373239346637336332616139396633 +3037346663633238660a353533383638666631343565306461623230393364343463346232633836 +34353037313932323130393761633438643437393561636635326233386632613633343261373833 +34643233303862393961643366633735623561363038313137383962313666646333636638356637 +63343163366231623336363030366235653665323961616633633733356437643737343836643337 +37373934643230306264613363343932336130383337336435393536613335663265393739383530 +37386230333131396337373130633465653733393830306334303333356536636563363366393031 +66646338356132656665663665636335366564346233623539336432323932333238323066633530 +31343364613265616366616433633661353439333438323230366230663939336361613139383235 +32656664323731363334626230613834663864373232396566363137393233376562353564636638 +37343466643562313261323764326638636264666239313061346134346166343831