From 1afd028926e9d91bcbe364618caefc7e6e677eac Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 14:57:59 +0100 Subject: [PATCH 01/16] build(deps): update ansible collection versions --- ansible/requirements.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ansible/requirements.yml b/ansible/requirements.yml index 1e3d1860..07162301 100644 --- a/ansible/requirements.yml +++ b/ansible/requirements.yml @@ -3,10 +3,10 @@ collections: - name: cloud.terraform version: 4.0.0 - name: community.general - version: 10.2.0 + version: 12.0.1 - name: community.docker - version: 4.0.0 + version: 5.0.1 - name: community.postgresql - version: 3.12.0 + version: 4.1.0 - name: ansible.eda - version: 2.8.0 + version: 2.10.0 From 372160346830a12394c163f3aa16d13ffb9a2ea4 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 14:59:39 +0100 Subject: [PATCH 02/16] feat(project): add project variable to rulebook and UI schema Introduce a `project` extra variable in the Ansible rulebook and propagate it in the UI softwares schema payload. This allows the system to identify the project context when operating on SaaS tasks. --- ansible/rulebook.yml | 1 + ui/index.js.map | 4 ++-- ui/schemas/softwares.js | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/ansible/rulebook.yml b/ansible/rulebook.yml index 9eb42898..61aa3a4c 100644 --- a/ansible/rulebook.yml +++ b/ansible/rulebook.yml @@ -91,6 +91,7 @@ name: playbooks/saas/operate.yml extra_vars: hosts_limit: "{{ event.payload.meta.hosts }}" + project: "{{ event.payload.project }}" catalog: "{{ event.payload.catalog }}" domain: "{{ event.payload.domain }}" task: "{{ event.payload.task }}" diff --git a/ui/index.js.map b/ui/index.js.map index 2e07593c..e4be137c 100644 --- a/ui/index.js.map +++ b/ui/index.js.map @@ -249,7 +249,7 @@ "auth": 1, "params": "id:string", "id": "softwares_update", - "input": "*size:String, domain_alias:String, *exposition:String", + "input": "*instance:String, *software:UID, *size:String, *domain:String, domain_alias:String, *exposition:String", "name": "Update software" }, { @@ -522,7 +522,7 @@ { "name": "Softwares/update", "params": "*id:UID", - "input": "*size:String, domain_alias:String, *exposition:String" + "input": "*instance:String, *software:UID, *size:String, *domain:String, domain_alias:String, *exposition:String" }, { "name": "Softwares/update_version", diff --git a/ui/schemas/softwares.js b/ui/schemas/softwares.js index c623c947..9e8a2c4a 100644 --- a/ui/schemas/softwares.js +++ b/ui/schemas/softwares.js @@ -144,7 +144,7 @@ NEWSCHEMA('Softwares', function (schema) { schema.action('update', { name: 'Update software', params: '*id:UID', - input: '*size:String, domain_alias:String, *exposition:String', + input: '*instance:String, *software:UID, *size:String, *domain:String, domain_alias:String, *exposition:String', action: async function ($, model) { const rules = { size: { regex: REGEX_SOFTWARES.size, comment: REGEX_SOFTWARES.size.comment }, @@ -220,6 +220,7 @@ NEWSCHEMA('Softwares', function (schema) { const payload = { meta: { hosts: item.instance }, + project: item.instance.split('.').pop(), type: 'saas-operate', catalog: catalogName, domain: item.domain, From 7bc58fcde2df7a0bdfebc43bb76faa8638572b6c Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 15:00:58 +0100 Subject: [PATCH 03/16] refactor(ui): remove unused fork attribute and related UI fields --- ui/public/forms/catalogs.html | 6 ++---- ui/public/forms/software.html | 10 ---------- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/ui/public/forms/catalogs.html b/ui/public/forms/catalogs.html index 9bdce250..ddd84de3 100644 --- a/ui/public/forms/catalogs.html +++ b/ui/public/forms/catalogs.html @@ -25,7 +25,7 @@ @@ -56,15 +53,9 @@ var model = exports.model; if(typeof model.item.id == 'undefined'){ $('#info').attr('hidden', true); - $('#instance').attr('hidden', false); - $('#software').attr('hidden', false); - $('#domain').attr('hidden', false); } else { $('#info').attr('hidden', false); - $('#instance').attr('hidden', true); - $('#software').attr('hidden', true); - $('#domain').attr('hidden', true); } header.reconfigure({ title: model.item.id ? '@(Update software)' : '@(Add software)' }); @@ -78,7 +69,6 @@ CLRELOAD('cl'); SETTER('notify/success', '@(Done)'); caller.exec('refresh'); - caller.exec('refreshGraph'); }); }; }); From 0456625bef2bdb816f15aa5aed5f56c45c633185 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 15:03:13 +0100 Subject: [PATCH 04/16] feat(freqtrade): add ansible role for freqtrade --- .../playbooks/saas/roles/freqtrade/README.md | 21 ++++ .../saas/roles/freqtrade/defaults/main.yml | 1 + .../saas/roles/freqtrade/tasks/backup.yml | 1 + .../saas/roles/freqtrade/tasks/build.yml | 12 +++ .../saas/roles/freqtrade/tasks/destroy.yml | 0 .../saas/roles/freqtrade/tasks/main.yml | 32 ++++++ .../saas/roles/freqtrade/tasks/restore.yml | 1 + .../saas/roles/freqtrade/templates/nomad.hcl | 91 ++++++++++++++++++ .../saas/roles/freqtrade/vars/main.yml | 11 +++ .../saas/roles/freqtrade/vars/upstream.yml | 2 + ui/public/img/freqtrade.png | Bin 0 -> 10417 bytes 11 files changed, 172 insertions(+) create mode 100644 ansible/playbooks/saas/roles/freqtrade/README.md create mode 100644 ansible/playbooks/saas/roles/freqtrade/defaults/main.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/tasks/backup.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/tasks/build.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/tasks/destroy.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/tasks/main.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/tasks/restore.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/templates/nomad.hcl create mode 100644 ansible/playbooks/saas/roles/freqtrade/vars/main.yml create mode 100644 ansible/playbooks/saas/roles/freqtrade/vars/upstream.yml create mode 100644 ui/public/img/freqtrade.png diff --git a/ansible/playbooks/saas/roles/freqtrade/README.md b/ansible/playbooks/saas/roles/freqtrade/README.md new file mode 100644 index 00000000..f680977e --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/README.md @@ -0,0 +1,21 @@ +# Role: `freqtrade` + +## How to use this Ansible role? + +1. Create your private user_data repository + +2. Configure your software variable on UI + +``` +strategy: MyCustomStrategy +config: myconfig +``` + +3. Configure your secret yaml configuration + +``` +git_user_data: + repo: http://mydomain.com/muser/myrepo.git + version: latest + token: s3cret! +``` \ No newline at end of file diff --git a/ansible/playbooks/saas/roles/freqtrade/defaults/main.yml b/ansible/playbooks/saas/roles/freqtrade/defaults/main.yml new file mode 100644 index 00000000..ed97d539 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/defaults/main.yml @@ -0,0 +1 @@ +--- diff --git a/ansible/playbooks/saas/roles/freqtrade/tasks/backup.yml b/ansible/playbooks/saas/roles/freqtrade/tasks/backup.yml new file mode 100644 index 00000000..ed97d539 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/tasks/backup.yml @@ -0,0 +1 @@ +--- diff --git a/ansible/playbooks/saas/roles/freqtrade/tasks/build.yml b/ansible/playbooks/saas/roles/freqtrade/tasks/build.yml new file mode 100644 index 00000000..f1029407 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/tasks/build.yml @@ -0,0 +1,12 @@ +--- +- name: Include upstream variables + ansible.builtin.include_vars: upstream.yml + +- name: Set custom variables + ansible.builtin.set_fact: + image_version: "{{ latest_version }}" + image_definition: "{{ image }}" + +- name: End playbook if no new version + ansible.builtin.meta: end_host + when: softwares[catalog_image_name] is defined and softwares[catalog_image_name].version == image_version diff --git a/ansible/playbooks/saas/roles/freqtrade/tasks/destroy.yml b/ansible/playbooks/saas/roles/freqtrade/tasks/destroy.yml new file mode 100644 index 00000000..e69de29b diff --git a/ansible/playbooks/saas/roles/freqtrade/tasks/main.yml b/ansible/playbooks/saas/roles/freqtrade/tasks/main.yml new file mode 100644 index 00000000..436fa0c5 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/tasks/main.yml @@ -0,0 +1,32 @@ +--- +- name: Create default directories + ansible.builtin.file: + path: "{{ item }}" + state: directory + owner: 1000 + group: 1000 + mode: '0755' + loop: + - "{{ software_path }}/db" + delegate_to: "{{ software.instance }}" + +- name: Checkout user data configurations + ansible.builtin.git: + repo: "{{ (lookup('simple-stack-ui', type='secret', key=domain, subkey='git_user_data', missing='error') | from_json).repo }}" + dest: "{{ software_path }}/freqtrade" + delegate_to: "{{ software.instance }}" + +- name: Copy nomad job to destination + ansible.builtin.template: + src: nomad.hcl + dest: "/var/tmp/{{ domain }}.nomad" + owner: root + group: root + mode: '0600' + become: true + delegate_to: "{{ software.instance }}" + +- name: Run nomad job + ansible.builtin.include_role: + name: nomad + tasks_from: job_run.yml diff --git a/ansible/playbooks/saas/roles/freqtrade/tasks/restore.yml b/ansible/playbooks/saas/roles/freqtrade/tasks/restore.yml new file mode 100644 index 00000000..ed97d539 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/tasks/restore.yml @@ -0,0 +1 @@ +--- diff --git a/ansible/playbooks/saas/roles/freqtrade/templates/nomad.hcl b/ansible/playbooks/saas/roles/freqtrade/templates/nomad.hcl new file mode 100644 index 00000000..dd2425c8 --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/templates/nomad.hcl @@ -0,0 +1,91 @@ +job "{{ domain }}" { + region = "{{ fact_instance.region }}" + datacenters = ["{{ fact_instance.datacenter }}"] + type = "service" + +{% if software.constraints is defined and software.constraints.location is defined %} + constraint { + attribute = "${meta.location}" + set_contains = "{{ software.constraints.location }}" + } +{% endif %} + + constraint { + attribute = "${meta.instance}" + set_contains = "{{ software.instance }}" + } + + group "freqtrade" { + + count = 1 + + restart { + attempts = 2 + interval = "10m" + delay = "15s" + mode = "fail" + } + + network { + port "http" { + to = 8080 + } + } + + service { + name = "http" + port = "http" + provider = "nomad" + tags = [ + {{ lookup('template', '../../traefik/templates/traefik_tag.j2') | indent(8) }} + ] + check { + name = "traefik" + type = "tcp" + interval = "60s" + timeout = "30s" + check_restart { + limit = 3 + grace = "90s" + ignore_warnings = false + } + } + } + + task "freqtrade" { + + driver = "docker" + + kill_signal = "SIGTERM" + + env { +{% for env in (lookup('simple-stack-ui', type='secret', key=domain, subkey='additional_env', missing='warn') | from_json) | default(litellm_env) %} + {{ env.key }} = "{{ env.value }}" +{% endfor %} + } + config { + image = "freqtradeorg/freqtrade:{{ softwares.freqtrade.version }}" + volumes = [ + "{{ software_path }}/freqtrade/user_data:/freqtrade/user_data:rw", + "{{ software_path }}/db:/db:rw" + ] + ports = ["http"] + + command = "trade" + args = [ + "--config", + "/freqtrade/user_data/{{ software.config }}.json", + "--db-url", + "sqlite:////db/tradesv3{{ software.strategy }}.sqlite", + "--strategy", + "{{ software.strategy }}" + ] + } + + resources { + cpu = {{ size[software.size].cpu }} + memory = {{ size[software.size].memory }} + } + } + } +} diff --git a/ansible/playbooks/saas/roles/freqtrade/vars/main.yml b/ansible/playbooks/saas/roles/freqtrade/vars/main.yml new file mode 100644 index 00000000..af56e93c --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/vars/main.yml @@ -0,0 +1,11 @@ +--- +image: + build: false + forkable: false + upstream: + source: github + user: freqtrade + repo: freqtrade + type: release + labels: {} + name: freqtrade diff --git a/ansible/playbooks/saas/roles/freqtrade/vars/upstream.yml b/ansible/playbooks/saas/roles/freqtrade/vars/upstream.yml new file mode 100644 index 00000000..74d50b0a --- /dev/null +++ b/ansible/playbooks/saas/roles/freqtrade/vars/upstream.yml @@ -0,0 +1,2 @@ +--- +latest_version: "{{ (lookup('url', 'https://api.github.com/repos/' + image.upstream.user + '/' + image.upstream.repo + '/releases/latest', headers={'Accept': 'application/vnd.github+json', 'Authorization': 'Bearer ' + lookup('ansible.builtin.env', 'GITHUB_API_TOKEN') }) | from_json).get('tag_name') | replace('v', '') }}" diff --git a/ui/public/img/freqtrade.png b/ui/public/img/freqtrade.png new file mode 100644 index 0000000000000000000000000000000000000000..644982b5015bfd2c66b007545b2a529db4c98a32 GIT binary patch literal 10417 zcmW-nWk3{N8-|B&r9rww8tGIzm+tQFmXt<9x=Xr2y1N^ZkOt}Qj&I)Y$L{{vncXvU zp17auo(M&VBpNaSG5`Q*(o$l|;Ai=NFGP6o{lmWP2LPb+lotD<>X~`mf#iuT{&@d( zyOI$51j`$w#Q}>*=KSIF7Y)4>mPl{UT;|%UV^Y1Ot~|2H=Hb>y9~@0QH>ZUlw=asY zyva|9DZ!Lfa_=IozVyDV~P)}>X{RzJ-qq#<2-Wq(a{l~;3HSe{dJ z=(y=*N8Ci~0|N4?B4r9kXRRCdpt>1j@4oo2%Z0B^@KbF@Mn_L{_&hihhLu!S+C|31 zOco;^;k|aIp?74!!1mZoAKlQzNb2cjApuKBUA!k}8Az@ov%r~D|BKW8`Q`uy1_r_F zdO|`1Cp|s=KyYn+d{b2l2B!L1&A-_&5y>Vzmz=7CJPo73Wit+Yr7ROF){fx;}t+%mDr-Pl2du_iP*>J2; zstN}N>pX9VzC*GMPss4pR2Lr@0x*CfHhoPrz`C~no-_FRSt%;_W74QBA-DZ_k~6GZ zO1VaLqnWum2`rF+_{|O~pzZm^QB-P3g#uQs%djV~Vsa9a(G{GiU~g})rm49IhXGv< z6QK0!H%#7c!3Su<3O!K6!NE1W;zEej%jaJA^M8i34G_@ z5J(nT$iz6ETQaE%xF^s^JvY8#sh6u=yY^agEYZrP#xm9VK=Nl)vv{0D6h2lrHmbh| zAc& zsC}--D=l?Ns;4&99qx&jPv580_}}uxC#SRF0=S_JIe`k8fC<1c`;Sv3E)(CE!}H_g zV{`6koz}wf@$r8mz3Iux$^Wb#Q2X{HBO`ygK-c}XO&Bq)ukH~77Q@HM7l3YLz!M5M z6_wZ{_xX$uJi`%}xis}1G2uqSE&S!mh(>w;p5;zy6%G$bV@!{CT7ZtIU30jXOL+BC3lG7M= zwrjskhO((whlb-Q6}~>OCjy@fy7>81jevm00CYJcBO?nb;^WUW2}UNWY^bw}Dk`(% zQ^H_o3Kp~qJ!~h)K0jA?+!8o_zxbn~K)kcZ!oW~YF7JjPT1vG#)U@vBtra-q9ofY% zkoxmM04Hg8Jk90%pPsC&tbV=RAyWTm#8Yl!V&bpAPiIsV1l9Vdgnb{cU4r@e@Ve|?Q|f)f zN9Fq6(010zk9_A*slJQ=RBcu@G&Zhn;|`m$$7OqDN#ra2W0uR}OSVRZNj})8$ZEC- zBzX@;o%Hugu9bvJcihqmgXKq)=lQ;^S<|st;APn;o*o| zLbm4SMQN^;xw&L9|C&=LCnsan%GFQu@0ZhY`BV>uUZiu%LQwFscYL`bkzCqQ3g`o{ z(&Xl)N{S0BDk?@eS;lIQ!CjIjUul^u*TB0R^FQmjlaYF`7*G8bne#TD_9-cm*GBL} zKbcuCU9xQK*R;z7zsITW@9SU`D#}yC4_Xlgg@tZ}82o&EH91@j+Ld@887L|9V;t%E zI63DYACIanvIMmI&q1>1vxaM)|4?n8(G^Zv8_;-fahtg2aMfuRosXyPqtD_`Zv& z2cy=WZwZZ%Yr1`U<=|}61M`})TyFjaSBkP+g3{LYdL!sP&5HZ7otHjW#B#IKKl%K! z1n}RW+caIm$VEdlr^d&RWykVoEW^1f%QbNt5E5wbMu9EmjN+NLO zXjkHz1^9pqT>cPhbx|+)$zoD~&14XDOW+KQ#RG1$(L^GJv6K;0`LK7yGFYR88nTxI9pd}=ri^#L{6zN(8EfYq9v&!j{WRya8!F+rt?6?sCM~vqb`JbLy z_kVja{pkZ%e?mdi;aFXdF$?^nhs9qempwrU3H8W&C3Dt!HR*CR^Ed0zls4V?`v@I1 z6r_NjCL`{#;D{;v%*WKH(>4}fTH2wiIqT9$e7td%j%(#F5XdA?i!QU~5A8~G8ChA$ ziEP2;gG|lX)YR0i_;}2d0_gz+LiP6?>p=2u$~-O zG1L+AC4a=&+t?7c8xMS!1@#qIUk z>m`-V1cKq3rE6Xoe@x8Rc04SJVQtxB795_Vvk4d%Or!BO*+1{sd1p-TBBmO_(B-E8=6SK+oi=n`x z{wCeGmOx8s!`jD+>O^8)=f)j0ec&qA79Lw8Gu&Xa&^mF$6LacSyy%K@`;`VS%DQ`SI{!&h@(D)y-_zML|NKPG<9q})er@#8P{?f&iz-k zngc2fZ%zXFNf!TyvH8WtMa%7PJ2YT^y!~=JnpaPgGj9ep`f`zchH-`Z5x&Sgva2V1 zaK>{rxA%d`P>dk0DdBX+e}^={av`4Gx6SGy6pk2sp!+yt?60b? zrUr+{qJJFM+OHWgv%&DX+RaAQ1qT>-%>kt7aSVEe3bI3A88#s?vjCNVY$A5Y7Ta)^F(F zy9KFSVcRUiMc1@zAgrgYM^!`)8jGBqoJ6%R*H*tRIdo-ZBQ@*<{a=0H3S)Nm&CdRv zL0rNNiM5>eV`(ue^kWkYy?mDKntCd<>e`tkVlA@0;Pdi3_#dfQE#}CH%Z8S4{HHU5 z=cJ2SPz+j}sY~Vp8>dLHFfgFTl9{eri8?v89muFE?AdJ>Sv~JoJ$%()!B&O(@`&N< z6fR*nNst)mGlp26m;uew^~^5t_I#y`voH`(Eo(TL$s3thFnBU%fP#)6JKfy&(<8asj zf0HGxlQ9_;MSO`Bn5A$5^{4a`SW(;@#PXoQK?(brJaJuB)kI5jH^lo|v$5uRb>ncL zda>DwQr?X-AK~!}W6acxxCno_kCs)?XIi_%Z?o}7PHtm6mjoFTH29glgiFiIEe>5T z$r;=ZB;4lXsrJ2ugoIHcukhA{`pE`Sl*#v;H-| z1e;O~N0=BNYPM_f#?(0yc?2ql!tIzTQ8>k(Kl!8x+|JL>7gcvYe&na15Rr34R}eA2 z6;HNnzO>zk4QqR?2{j(oX1S(x2wTZYg_}(QSQmEI@f0m(Xrl$Ux;WiwbT&9TZ}(w%msd9!r%HGxd^)+fOWjz;0;tFWdnR$E>4~3eJp|(M>k@ZK8ah6)b*6dt&+Ky z)bCNLA+vIFof?JqQHW~=0vl_YlV4j>cO-RPeOfuC5M=lceN_v(N8^N&-k$c_Ezjh6 zBp7sAm+#oT-hxN=j^v>*v1XqCu0#L98VQGobC9Hey()8RNk#<<_SYnV&rw)y52@?w zKk-B`9phe?007VA&|6O5xIdocbEuH8mL>I(!_Rm=J(z&^N>MuWixH)LH;$S_)T*yW z7}o66My|fn-v{~m2%DnsxX6VKe8rGUcHhUx2CmTk>Q0^Z-J?$$)vZ+bhLq3k^1PA? zw#kg=%`X2z1ZI6VlwWY<^n7pMZE2>WqJr|f0eR-{^z^jP{`Al91Ce-p#l^*> z7Wkp~)AFW#5!Wfoze!7PbdP@JcC}$D;P>;G)=FP3GSO=4FXB zd*5Ld5d1vZ_qlx#3I?9&2~ytqTVJFLJJ~*G}&x4Fna2nBba; zhu;RaWpgW#Zjlg=tJ%7&F3hj?-={(!(PJH?us-Bm)H_Fn!v{>9B7ajapYVM;X;L`4 zd3+ZEm-t^h)W&1fbQGaUh-ff(kvEXB^pE@EN$(h#7(PSBGfOhs8$s@~qkj46Rutu@ zisklLf93?icX!(Rke4{;1OwyCAMR(o(xYTR_n;d!6&D-Z#r|4{H_J_8z?Pxbd?z^1 z_J)Ut&)Op&|G-=_+<+LgOAjAuqQvGAo))!>3HNPtpL2h5{Jsuf(C!awm$Vgnhy1N6 z<*mr|aVCbHxKvNf&DGS@^mZm+a_oh^0DrMkrwKPSBxEKK`bveO(1YgYM!w4MkMyMTo!Rby zWeO;p{wZqAs0!s5YOj8tt*opZ!^y!BTTuobb9iXom-{7XGW?ijsLrRm&tf0880jMh z&*b`#8oIq$X)=hR6BkT*CY}k@h6A*FY}Enm7qy+y+0^xyH|VZ3`BXd?OA!Us6v&29 z!4-CEPAIrP+2>8?UZbEgd86EExsWWevKJE7z=nl;Wr6kcAlw%62 zHYpmPO+?VRKRo>U`WmrN`JiD_0s?{jTlHAC11q@C3oYrMtp9vpnSM7u!-=SiHo?nG z4DO_+Hv{k-y64$<(eTF0LTdHzpf&du_2IzPClrso+QpbR+$+);$t#Q^D?L@oH!Hl= z0q-g)9ORqU4h{}?u3~lCmB*7={73#TM+P}3v&B5bF)vR7gyf&A(*J(|seC*OW)D>d}79{`%gEN4C1G29tx z6dJ?%1OizYlM>r`?1CwGj9zKBDOcG9=`jy$z)U@AOIMd`BbDp;`lzXvrppU;Iwqe?&4plC@VuEO8(_ZnULW68fB@})3SN3I9<)Au;!fkHz1F*%YADkMI=$|)y2WNQN9^xG*%ue}*GSbN7HHKXTpH|pNP~F!l^d(sZ85us?rsV+ z*8RJtiNiif`;;0c_Rs`v&&P&%Y>;`{%d`f68%YVplh>xQn1vIRB8t=SOa$18g-6&RFI?)*HU8NwT6(P4=rF=ajGrwHvgZ z7R=bSqBZjg;zsI0iKs+rYi4E^*^NOWNFeR-LBfP2@RR0=WAAW-YOdT$pge2G-J17M zzw1`1N^v?BUzpkBZiWLgv6;G>njUyJS0{5Oa)kZ6OkzRUlUz_hwQ^vnkU&mLTdXzS z@gI>AA%yoof%<@iPrDH<9YE;uENm$(c` zogM~#&)~57Ewpe%H00LO{CYjBKt1;j1kEy85ZV%I5SFTQv?h#N_c0p6z?bGX?9cRY zYUIWBW;>J)P#iImld4n>yY1o`_rcq(H2+F4!atN&i1}xDy+0#P5hX0v!z~pN6}4@{ z)jGbp?c7puC0IVk=eW}B1Sj&)+uMsx>UU3Wx6IBS5M**6EGZU)=-{R3PQeHH!l+TG zoP}*7@Si>7f&DvBx_mCVIa%2I_Ly;P*xFtH>G^6Z;?<->n$+j~%g<@wIz}4}I_!EE zy+Xka8R1;?%ixmeUtqF8 znSo;Gd95pE#vQwYgu7*Cs#k}aHRR-sB#-3&Rq58G00p%#@#WN`i;zXN8yu{dfdE4x zOpw;`@*4Le4(;hy%~;l>_OS-Rqt#v=O8q{%sgm?+HLeA<%aDAIkYIH~!5rm3tx0*e;;bZKXM8_^3sXX9-0 zox9_%)q)DB6dDx~z%v`Xj$%*1lWx5{^ZZ}0a(MMy_c<2k=8#HjHA9UYEy^;jTDtf% zepK&PbZ+_m4)MC$Me=K0^ZXa>Yzo;{LZR%Z*c(Ac!T0%1gM87OIPh|G-8|DT?A%1JVFoKrkL_pEM{^r5}IB7!H6 zAjh+!`9?*G=0~m>%$dH0@~1$(yp+ZRGy}{_3AW04Z9qoW0|tD`?qd@UgSsasb~9l} zNQnBi*mk_J6_tSOZt0R?b^wMOnrB2}C)nTF8E`W!d6%v*k;(e*g@ke;5t2?may zLvIW*|922kFO=njwvW=2W}$MaCuBx9uIHWPCO5iCv3bcr zOa|$fX(4mfMkuMemCiq%sxxM3HL$T5SG-z%Jq(D8GdvJa-_mw81r3zDzrKwP4O$Mt zuU%in#DZyABo|29CGz8v5)%sshK4li7?0fcrx!KLkO^YlgWN4uY6nW2V8?YR0lP`6 zX5*M2Kkojf<$0P?Qk0bsV z!y5CiWVtdTBI29v4T<0RM$h>6_V(rO?rxRZL)3>)%K3SD&h1PB<0mpiTq8HfB-hQYfyh794%*u33!^vLWZc{y^JREpNx+qtwJ{T^SJOuKNeJ-os`H=MGjOVo*Ek}#*ZH_t@rlt zy52f+FK!QJi>(d=DI5;Fl6pcGkNH?k+0#nw7pwIM7_iwJj9+k60Y*U(as5u6* z8yUT^*8vtkr$3RG6--P_s(Y>z6BESH#lEx8s|P*bi4Jsry>|yWoOpeV6FMH=-pytw zSW#d0VT>A>b(?FJWv6FmjEwLHP2Pur%(6vf%H-worrr?G?%VASS~4-?=lm`Dc$@RC zmlKDEI*X~IYh>))N?R@+Zj}EFKY|YZ^=-%pnL=f1P*ZGfZqD|jrsXd<0bXF&bodSvI9Iy~jd(Yv7AVqHE!n!>)r0nOsH#eV^MB4J0>7wL zR=xk5-xt!SDWRtd4V7=8B_OYiOq289kao*;I}g&t(cr_swPulu?F>1=mo0N-k7~r6 z)`^DQkwrx`41KJ|CIXuh=e|TkEjSkns2~w9#|NEiD2HHd#TDp;5wVwE9w~YG!nqxJ zTZSAAITv+W9qFOktIJCo&=4cY&(DuoE_ZTr`krFo*Yb6)OpPSVeTV1t_5A!ilU}80 zk%EFEb$D1R60`+S@mW7V3=%rHhDa{8d7SNJ@q4%*_zOSo=POOZ$to!+9f5dM*KcPO zr?I|X)$ieQhc4!yasf~79q56oRnJ(KoPIhwJk(~F8u42>G@l$HUBAC1zPQx_Um^@k2R=X&TbNheS6QEHw(Xfq1Z0oa4v0 zX#7hti@!wmqxn>};3l-hkUsg(mwL@{P+7k?4q57gJ3d8r%%y_*y^9z z&ZJRw>n)WU6`@p$A<_ParNx(p*0HPhXEgWbb!jY>dJCX2RzeFA+yv(fdSW3To(Y6* zx82Ds@4&t2%qiO6jf}y%WL1{FIyyRS(|gkwBSajQidEP-hJKbmK?{s{LmsBJoru|> zQ}_3@NMG1{VTUG^s?z4i$48o_L1bWi-MWY6>vy>NcbNiSm-antZNGLFOJ^(#Qbm{4 zE4AHH+cwo&vyD>i`M{oOL1L;k?{l)$@U=mkDI{b3h#tftfg`4vJ6uA7g1$dKd$~NX zmUiH90@JwC^WZo+X3Y1iwRMRuWo#gr4^O5-m$^$U0_q4gk0B!0nj`&pax!kuKI2L> z*(h&)2t9y}Q0h)+nJRlEmn4(q@Jmz7&0I?>O}|w2>v4Niae{hzhpLW_4X%qrj5tmo ztBApmu;It;K=$I7Spo6c?M}u zx);k8)xNsAVi<6*3Hr|rjdS*+|H9r5I$dz5E28S^>T%)TnxF&NHn$%%X^FuvPLq%{ zx3JLEI{O!VkV76c72``9+rb(sy~kqEY3o>gA}%h@2`I8C-X18%qP($c4Oc)4m2tok z59QE655x;AWoqTf+s1;aCi8_F z_4`1H=#)W;@BoN|7H0TGjNIIJ--`o#e$KdqL!aA}z!gX}jg33`jW$8I@_4=L?O`w5 zoB0hiiQTv|CdxMkz?x3?87eU3aP33ImtgNBAxCx~6BbY|k)#a;Z6zGRyA>CbFqb_Q zaIf1PH$`Oa>?&t_SSeG+KzCR@8|=CfKYnP?Qd5^=Vq#KZrGh(RUa8$+oridrvj;xZ z#20~15g-Kxuoa8`MZ%(aUNB{swtqRZL4^;pTC6cL{$XQ)UDJrXX+c!3r*xs6B(pOB z5(r~|lw3_N+eOE3M)<#OQCq&@dQrn>_W++!N{-Tx!QQvDu(()q*TfoMQdMOyCoRpp zJ-9W&M*RK4gEkmlf#Q;vtTD(<0UY+oJMj)A2YJnedWg#k8Vx?petb=ada;r_RaKm_mCWDtyy zJcBNHBv=RfwfViGW-K{CM#wBoPQF=y@Y>=EoPY8c-bVk;h6w~`3dAZu#gdCcs`NWr za~m5QL;mqpU*{u^fs<6ZdX;Y4YPCU^iM`*Uo=P&Jo8I(qotp|{MVRgk;5?LjkO;N! z4;t7_?MD;oS=|m*?Lcbg3UV@2W8*vuX68vNuPJRt+@WwQdyrKzeEi4`(%KISIYM?k zp69vX6ERH06Xk-1Z~!h;cUSCm=zbv&?0}5|U9jnmaUymG*UCrh(l Date: Tue, 23 Dec 2025 15:05:56 +0100 Subject: [PATCH 05/16] feat(ansible): add public IP fact collection for frontend instances --- ansible/playbooks/paas/main.yml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/ansible/playbooks/paas/main.yml b/ansible/playbooks/paas/main.yml index 00b3e201..4a7f7c1a 100644 --- a/ansible/playbooks/paas/main.yml +++ b/ansible/playbooks/paas/main.yml @@ -16,10 +16,27 @@ ansible.builtin.file: path: /etc/ansible/facts.d state: directory - owner: "root" - group: "root" + owner: root + group: root mode: '0755' + - name: Save Public IP as local fact + when: fact_instance.location == 'frontends' + block: + - name: Get ipinfo.io (if backend instance, delegate_to the first frontend) + ansible.builtin.uri: + url: https://ipinfo.io + http_agent: curl/7.81.0 + register: register_uri + check_mode: false + + - name: Set ipinfo local_fact + ansible.builtin.copy: + content: | + {{ register_uri.json | to_nice_json }} + dest: /etc/ansible/facts.d/ipinfo.fact + mode: '0644' + - name: Install mandatories packages ansible.builtin.apt: pkg: From 2cff3a098f2c6f94dcde9a3e3b60674f9fe14760 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 15:19:28 +0100 Subject: [PATCH 06/16] feat(ansible): dedicated playbook for scan_exporter Remove `scan_exporter` from the metrology playbook list. It can be install anywhere depending on the playbook limit --- ansible/playbooks/paas/metrology.yml | 1 - .../roles/scan_exporter/tasks/uninstall.yml | 20 +++++++++++++++++++ .../scan_exporter/templates/config.yml.j2 | 2 +- .../roles/scan_exporter/templates/default.j2 | 2 +- ansible/playbooks/paas/scan_exporter.yml | 10 ++++++++++ 5 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 ansible/playbooks/paas/roles/scan_exporter/tasks/uninstall.yml create mode 100644 ansible/playbooks/paas/scan_exporter.yml diff --git a/ansible/playbooks/paas/metrology.yml b/ansible/playbooks/paas/metrology.yml index ac821230..78b63ab1 100644 --- a/ansible/playbooks/paas/metrology.yml +++ b/ansible/playbooks/paas/metrology.yml @@ -42,7 +42,6 @@ - mongodb_exporter - blackbox_exporter - nginx_exporter - - scan_exporter - dns_exporter - script_exporter - nvidia_gpu_exporter diff --git a/ansible/playbooks/paas/roles/scan_exporter/tasks/uninstall.yml b/ansible/playbooks/paas/roles/scan_exporter/tasks/uninstall.yml new file mode 100644 index 00000000..38e55e99 --- /dev/null +++ b/ansible/playbooks/paas/roles/scan_exporter/tasks/uninstall.yml @@ -0,0 +1,20 @@ +--- +- name: Scan_exporter | Stopped scan_exporter + ansible.builtin.service: + name: scan_exporter + state: stopped + enabled: false + ignore_errors: true + +- name: Scan_exporter | Remove configuration + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: + - path: /etc/default/scan_exporter + - path: /etc/scan_exporter/config.yml + - path: /etc/systemd/system/scan_exporter.service + +- name: Scan_exporter | Daemon reload + ansible.builtin.systemd_service: + daemon_reload: true diff --git a/ansible/playbooks/paas/roles/scan_exporter/templates/config.yml.j2 b/ansible/playbooks/paas/roles/scan_exporter/templates/config.yml.j2 index 23e26b10..81b86a2b 100644 --- a/ansible/playbooks/paas/roles/scan_exporter/templates/config.yml.j2 +++ b/ansible/playbooks/paas/roles/scan_exporter/templates/config.yml.j2 @@ -10,7 +10,7 @@ tcp_period: 6h targets: [] {% else %} targets: -{% for host in groups['infrastructure'] if (fact_instance.location in ['frontends', 'frontends_vrack'] and host != inventory_hostname) %} +{% for host in groups['infrastructure'] if (host.split(".")[1] == 'frontends' and host != inventory_hostname) %} - name: "{{ host }}" ip: "{{ hostvars[host].ansible_local.ipinfo.ip }}" queries_per_sec: 500 diff --git a/ansible/playbooks/paas/roles/scan_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/scan_exporter/templates/default.j2 index d9990808..940faadc 100644 --- a/ansible/playbooks/paas/roles/scan_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/scan_exporter/templates/default.j2 @@ -1 +1 @@ -ARGS="-metric.addr 127.0.0.1:2112 -config /etc/scan_exporter/config.yml" +ARGS="-metric.addr {% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:2112 -config /etc/scan_exporter/config.yml" diff --git a/ansible/playbooks/paas/scan_exporter.yml b/ansible/playbooks/paas/scan_exporter.yml new file mode 100644 index 00000000..cfb1171f --- /dev/null +++ b/ansible/playbooks/paas/scan_exporter.yml @@ -0,0 +1,10 @@ +--- +- name: Uninstall scan_exporter + any_errors_fatal: true + hosts: "{{ hosts_limit | default('infrastructure') }}" + gather_facts: true + become: true + pre_tasks: + - name: Uninstall scan_exporter + ansible.builtin.include_role: + name: scan_exporter \ No newline at end of file From e0e4a8e085253d3cc28573b33bef04d0dbbbbfdd Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:09:02 +0100 Subject: [PATCH 07/16] feat(prometheus/exporters): add nomad_mode conditional for exporter addresses Update exporter and Prometheus templates to select the listen address based on the `nomad_mode` fact. In single mode the exporters bind to 127.0.0.1, while in cluster mode they use the host's IP address. Applies to blackbox_exporter, dns_exporter, node_exporter, prometheus config, promtail, script_exporter and systemd_exporter. --- .../blackbox_exporter/templates/default.j2 | 2 +- .../roles/dns_exporter/templates/default.j2 | 2 +- .../roles/node_exporter/templates/default.j2 | 2 +- .../paas/roles/prometheus/templates/config.j2 | 70 +++++++++++++++---- .../roles/promtail/templates/config.yaml.j2 | 3 +- .../script_exporter/templates/default.j2 | 2 +- .../systemd_exporter/templates/default.j2 | 2 +- 7 files changed, 62 insertions(+), 21 deletions(-) diff --git a/ansible/playbooks/paas/roles/blackbox_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/blackbox_exporter/templates/default.j2 index c5556fa6..3f0fe429 100644 --- a/ansible/playbooks/paas/roles/blackbox_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/blackbox_exporter/templates/default.j2 @@ -1,2 +1,2 @@ ARGS="--config.file=/etc/blackbox_exporter/config.yaml \ ---web.listen-address=127.0.0.1:9115" +--web.listen-address={% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9115" diff --git a/ansible/playbooks/paas/roles/dns_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/dns_exporter/templates/default.j2 index 03de3a9a..ec64b2ac 100644 --- a/ansible/playbooks/paas/roles/dns_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/dns_exporter/templates/default.j2 @@ -1 +1 @@ -ARGS="-L 127.0.0.1 -c /etc/dns_exporter/config.yml" +ARGS="-L {% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %} -c /etc/dns_exporter/config.yml" diff --git a/ansible/playbooks/paas/roles/node_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/node_exporter/templates/default.j2 index 1289c4c7..389cb76c 100644 --- a/ansible/playbooks/paas/roles/node_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/node_exporter/templates/default.j2 @@ -1,5 +1,5 @@ ARGS="--log.level=info \ ---web.listen-address={{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}:9100 \ +--web.listen-address={% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9100 \ --web.telemetry-path=/metrics \ --collector.diskstats.ignored-devices='^(ram|loop|fd|(h|s|v|xv)d[a-z]|nbd|nvme[0-9]+n[0-9]+p|md|dm-)[0-9]+$' \ --collector.filesystem.mount-points-exclude='^/(dev(/shm)?|proc|run(/.+)?|sys|var/tmp|(var/lib|home)/(docker|kubelet)/.+)($|/)' \ diff --git a/ansible/playbooks/paas/roles/prometheus/templates/config.j2 b/ansible/playbooks/paas/roles/prometheus/templates/config.j2 index 36b81926..500ee307 100644 --- a/ansible/playbooks/paas/roles/prometheus/templates/config.j2 +++ b/ansible/playbooks/paas/roles/prometheus/templates/config.j2 @@ -66,12 +66,19 @@ scrape_configs: regex: "^(__tmp_keep_me)$" static_configs: -{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project %} +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} + - targets: ['127.0.0.1:9100'] + labels: + instance: "{{ inventory_hostname }}" + project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:9100'] labels: instance: "{{ item }}" project: "{{ prometheus_project }}" {% endfor %} +{% endif %} scrape_interval: 60s scrape_timeout: 59s @@ -91,7 +98,7 @@ scrape_configs: params: script: [speedtest] static_configs: - - targets: ['127.0.0.1:9469'] + - targets: ['{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9469'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" @@ -104,7 +111,7 @@ scrape_configs: regex: "^(go_|prometheus_|promhttp_).*" source_labels: [__name__] static_configs: - - targets: ['127.0.0.1:9469'] + - targets: ['{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9469'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" @@ -115,7 +122,7 @@ scrape_configs: regex: "^(go_|prometheus_|promhttp_).*" source_labels: [__name__] static_configs: - - targets: ['127.0.0.1:15353'] + - targets: ['{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:15353'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" @@ -135,7 +142,7 @@ scrape_configs: - source_labels: ["__address__"] target_label: "instance" - target_label: "__address__" - replacement: "127.0.0.1:15353" + replacement: "{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:15353" - target_label: "monitor" replacement: "{{ inventory_hostname }}" static_configs: @@ -150,10 +157,19 @@ scrape_configs: regex: "^(go_|prometheus_|promhttp_).*" source_labels: [__name__] static_configs: +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} - targets: ['127.0.0.1:9323'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} + - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:9323'] + labels: + instance: "{{ item }}" + project: "{{ prometheus_project }}" +{% endfor %} +{% endif %} - job_name: "scan_exporter" metric_relabel_configs: @@ -161,7 +177,7 @@ scrape_configs: regex: "^(go_|prometheus_|promhttp_).*" source_labels: [__name__] static_configs: - - targets: ['127.0.0.1:2112'] + - targets: ['{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:2112'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" @@ -178,13 +194,19 @@ scrape_configs: tls_config: insecure_skip_verify: true static_configs: -{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project %} +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} + - targets: ['127.0.0.1:9558'] + labels: + instance: "{{ inventory_hostname }}" + project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:9558'] labels: instance: "{{ item }}" project: "{{ prometheus_project }}" {% endfor %} - +{% endif %} - job_name: "nvidia_gpu_exporter" metric_relabel_configs: - action: drop @@ -194,13 +216,19 @@ scrape_configs: format: ['prometheus'] metrics_path: /metrics static_configs: -{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project %} +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} + - targets: ['127.0.0.1:9835'] + labels: + instance: "{{ inventory_hostname }}" + project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:9835'] labels: instance: "{{ item }}" project: "{{ prometheus_project }}" {% endfor %} - +{% endif %} - job_name: "nomad_exporter" metric_relabel_configs: - action: drop @@ -214,13 +242,19 @@ scrape_configs: tls_config: insecure_skip_verify: true static_configs: -{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project %} +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} + - targets: ['127.0.0.1:4646'] + labels: + instance: "{{ inventory_hostname }}" + project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:4646'] labels: instance: "{{ item }}" project: "{{ prometheus_project }}" {% endfor %} - +{% endif %} - job_name: 'mimir_exporter' nomad_sd_configs: - server: "https://{{ nomad_primary_master_address | default(hostvars[inventory_hostname]['ansible_' + hostvars[inventory_hostname].nomad_iface]['ipv4']['address']) }}:4646" @@ -246,16 +280,22 @@ scrape_configs: tls_config: insecure_skip_verify: true static_configs: -{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project %} +{% if hostvars[inventory_hostname].nomad_mode == 'single' %} + - targets: ['127.0.0.1:9080'] + labels: + instance: "{{ inventory_hostname }}" + project: "{{ prometheus_project }}" +{% elif hostvars[inventory_hostname].nomad_mode == 'cluster' %} +{% for item in groups['infrastructure'] | default([]) if item.split('.')[4] == prometheus_project and item != inventory_hostname %} - targets: ['{{ hostvars[item]['ansible_' + hostvars[item].nomad_iface]['ipv4']['address'] }}:9080'] labels: instance: "{{ item }}" project: "{{ prometheus_project }}" {% endfor %} - +{% endif %} - job_name: 'blackbox' static_configs: - - targets: ['127.0.0.1:9115'] + - targets: ['{% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9115'] labels: instance: "{{ inventory_hostname }}" project: "{{ prometheus_project }}" diff --git a/ansible/playbooks/paas/roles/promtail/templates/config.yaml.j2 b/ansible/playbooks/paas/roles/promtail/templates/config.yaml.j2 index 50a2def9..0bac2b9b 100644 --- a/ansible/playbooks/paas/roles/promtail/templates/config.yaml.j2 +++ b/ansible/playbooks/paas/roles/promtail/templates/config.yaml.j2 @@ -1,5 +1,6 @@ server: - http_listen_address: {{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }} + http_listen_address: {% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %} + http_listen_port: 9080 grpc_listen_port: 0 log_level: warn diff --git a/ansible/playbooks/paas/roles/script_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/script_exporter/templates/default.j2 index 4bfa0fb3..a3fde25f 100644 --- a/ansible/playbooks/paas/roles/script_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/script_exporter/templates/default.j2 @@ -1 +1 @@ -ARGS="--config.files=/etc/script_exporter/config.yml" +ARGS="--config.files=/etc/script_exporter/config.yml --web.listen-address={% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9469" diff --git a/ansible/playbooks/paas/roles/systemd_exporter/templates/default.j2 b/ansible/playbooks/paas/roles/systemd_exporter/templates/default.j2 index f64efe14..53d370e3 100644 --- a/ansible/playbooks/paas/roles/systemd_exporter/templates/default.j2 +++ b/ansible/playbooks/paas/roles/systemd_exporter/templates/default.j2 @@ -1,2 +1,2 @@ -ARGS="--web.listen-address={{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}:9558 \ +ARGS="--web.listen-address={% if hostvars[inventory_hostname].nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}{% endif %}:9558 \ --systemd.collector.unit-include=docker.service|promtail.service|coredns.service|prometheus.service|blackbox_exporter.service|node_exporter.service|scan_exporter.service" From d318ba35e80f898eb10effd08e4da39d2ce5c398 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:09:38 +0100 Subject: [PATCH 08/16] feat(ansible): add configurable alertmanager support Add a task to fetch alertmanager configuration from UI secrets and expose it as a fact. Update the `user.alertmanager.yml.j2` template to render receivers from the fetched configuration, falling back to an empty list. Extend the README with instructions for configuring alertmanager via a secret. --- ansible/playbooks/saas/mimirtools.yml | 4 ++++ ansible/playbooks/saas/roles/mimir/README.md | 14 +++++++++++ .../mimir/templates/user.alertmanager.yml.j2 | 24 ++++--------------- 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/ansible/playbooks/saas/mimirtools.yml b/ansible/playbooks/saas/mimirtools.yml index c683a0ea..cf617e17 100644 --- a/ansible/playbooks/saas/mimirtools.yml +++ b/ansible/playbooks/saas/mimirtools.yml @@ -23,6 +23,10 @@ state: directory mode: '0755' + - name: Get alertmanager configuration + set_fact: + alertmanager: "{{ lookup('simple-stack-ui', type='secret', key=endpoint, subkey='alertmanager', missing='warn') | from_json }}" + tasks: - name: Deploy configuration ansible.builtin.include_role: diff --git a/ansible/playbooks/saas/roles/mimir/README.md b/ansible/playbooks/saas/roles/mimir/README.md index 0c600f91..11baffa9 100644 --- a/ansible/playbooks/saas/roles/mimir/README.md +++ b/ansible/playbooks/saas/roles/mimir/README.md @@ -12,3 +12,17 @@ www.domain.com: domain_alias: domain.com # (string) Primary domain name for the application. ipfilter: [] # (list) List of allowed IPs for access control (empty for unrestricted access). basic_auth: False # (bool) Enable/disable HTTP Basic Authentication (True/False). +``` + +# Configure alertmanager + +If you want to send alertmanager alert to a custom receiver, add a secret to your domain in the UI: + +``` +alertmanager: + receivers: + - name: default + webhook_configs: + - url: https://www.myreceiver.com/api/webhook/ + send_resolved: true +``` \ No newline at end of file diff --git a/ansible/playbooks/saas/roles/mimir/templates/user.alertmanager.yml.j2 b/ansible/playbooks/saas/roles/mimir/templates/user.alertmanager.yml.j2 index 8b620f0c..c3422568 100644 --- a/ansible/playbooks/saas/roles/mimir/templates/user.alertmanager.yml.j2 +++ b/ansible/playbooks/saas/roles/mimir/templates/user.alertmanager.yml.j2 @@ -7,14 +7,10 @@ route: repeat_interval: 90m group_interval: 5m group_wait: 60s - receiver: http_remote_api + receiver: default routes: - #- receiver: telegram_receiver - # match_re: - # severity: critical|warning - # continue: true - - receiver: http_remote_api + - receiver: default match_re: severity: critical|warning continue: true @@ -27,17 +23,7 @@ inhibit_rules: # Apply inhibition if the alertname is the same. equal: ['alertname', 'cluster', 'service'] -receivers: - - name: 'http_remote_api' - webhook_configs: - - url: 'https://remote_api/api/alerts/trigger' - http_config: - authorization: - credentials: "credz" - send_resolved: true +receivers: {% if alertmanager.receivers is not defined %}[]{% endif %} + +{{ alertmanager.receivers | default('') | to_nice_yaml(indent=4) }} -# - name: telegram_receiver -# telegram_configs: -# - api_url: 'https://api.telegram.org' -# bot_token: '{{ notifications.telegram.token | default('') }}' -# chat_id: {{ notifications.telegram.chat_id | default('') }} From eab647c919bd9009335ebcdb9accbcb7e8f93c20 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:10:00 +0100 Subject: [PATCH 09/16] feat(ansible): use private Docker registry for adguard image --- ansible/playbooks/saas/roles/adguard/templates/nomad.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl b/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl index 3bb996f3..26129dc0 100644 --- a/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl @@ -45,7 +45,7 @@ job "{{ domain }}" { driver = "docker" config { - image = "{{ software }}:{{ softwares.adguard.version }}" + image = "{{ docker_private_registry.url }}/adguard:{{ softwares.adguard.version }}" network_mode = "host" privileged = "true" volumes = [ From ee56c892196d2dd524bb4405997ae3aa63f36f1b Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:10:38 +0100 Subject: [PATCH 10/16] feat(ansible): add forkable flag to image definition Include the `forkable` attribute in the catalog creation payload, defaulting to false when not provided. --- ansible/playbooks/saas/image.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/ansible/playbooks/saas/image.yml b/ansible/playbooks/saas/image.yml index 8cbd717a..cc8bb94a 100644 --- a/ansible/playbooks/saas/image.yml +++ b/ansible/playbooks/saas/image.yml @@ -69,6 +69,7 @@ schema: catalogs_create data: name: "{{ image_definition.name }}" + forkable: "{{ image_definition.forkable | default(false) }}" version: "{{ image_version }}" force_basic_auth: true status_code: 200 From ce6dbd20698abb1dfe2260da84920d10310a0bdf Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:10:49 +0100 Subject: [PATCH 11/16] feat(ansible): add project prompt to operate playbook --- ansible/playbooks/saas/operate.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ansible/playbooks/saas/operate.yml b/ansible/playbooks/saas/operate.yml index d10aa5be..9259fba0 100644 --- a/ansible/playbooks/saas/operate.yml +++ b/ansible/playbooks/saas/operate.yml @@ -7,6 +7,9 @@ - name: catalog prompt: Catalog item private: false + - name: project + prompt: project name + private: false - name: domain prompt: Domain name private: false From 413ae203cb5ff170110cc8bdc4655dd940f8374e Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:11:08 +0100 Subject: [PATCH 12/16] feat(ansible): add privileged mode and caps to homeassistant job Add privileged execution, DBus volume mount, and network capabilities to the Home Assistant Nomad job configuration. This enables Home Assistant to interact with system services (e.g., Bluetooth) that require elevated privileges and specific network capabilities. --- .../playbooks/saas/roles/homeassistant/templates/nomad.hcl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl b/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl index 68e101f1..663016dd 100644 --- a/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl @@ -67,9 +67,14 @@ job "{{ domain }}" { config { image = "homeassistant/home-assistant:{{ softwares.homeassistant.version }}" + privileged = "true" volumes = [ - "/data/{{ domain }}/config:/config:rw" + "/data/{{ domain }}/config:/config:rw", + "/run/dbus:/run/dbus:ro" ] + + cap_add = ["net_admin", "net_raw"] + ports = ["homeassistant"] } From 7b5de91637e1eb2199ffcef5f168c82f0036b10f Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:11:31 +0100 Subject: [PATCH 13/16] feat(ansible): project lookup for restic vars in backup template MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace hard‑coded Restic variables with `lookup('simple-stack-ui', ...)` calls that pull configuration from the project definition. Remove the conditional wrapper around the export task, ensuring the task always resolves its credentials and repository via the centralized project lookups. This centralises Restic configuration and simplifies template maintenance. --- .../saas/roles/nomad/templates/backup.hcl.j2 | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 b/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 index 1c0e4b1a..9d1f18ff 100644 --- a/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 +++ b/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 @@ -49,17 +49,16 @@ job "{{ nomad_job_name }}" { } } -{% if restic_backup is defined and restic_backup %} task "{{ domain }}-export" { driver = "docker" env { - RESTIC_SERVER = "{{ restic_server }}" - RESTIC_HOST = "{{ restic_host }}" - RESTIC_SNAPSHOTS = "{{ restic_snapshots }}" + RESTIC_SERVER = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_server', missing='error') }}" + RESTIC_HOST = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_host', missing='error') }}" + RESTIC_SNAPSHOTS = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_snapshots', missing='error') }}" RESTIC_REPOSITORY = "{{ domain | lower }}" - RESTIC_PASSWORD = "{{ lookup('simple-stack-ui', type='secret', key=restic_server, subkey='passwd', missing='error') }}" - AWS_ACCESS_KEY_ID = "{{ lookup('simple-stack-ui', type='secret', key=restic_server, subkey='user', missing='error') }}" - AWS_SECRET_ACCESS_KEY = "{{ lookup('simple-stack-ui', type='secret', key=restic_server, subkey='passwd', missing='error') }}" + RESTIC_PASSWORD = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_passwd', missing='error') }}" + AWS_ACCESS_KEY_ID = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_user', missing='error') }}" + AWS_SECRET_ACCESS_KEY = "{{ lookup('simple-stack-ui', type='project', key=project, subkey='restic_passwd', missing='error') }}" } config { image = "alpine:latest" @@ -73,7 +72,6 @@ job "{{ nomad_job_name }}" { memory_max = 1024 } } -{% endif %} task "{{ domain }}-clean" { lifecycle { From 54294f37253a7bf3bfb8eeec481bdf2bd16877ee Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:11:40 +0100 Subject: [PATCH 14/16] fix(litellm): correct upstream version parsing Add `from_json` filter to properly decode API response and simplify the regex to match only version tags with optional leading 'v'. This fixes incorrect selection of the latest stable tag. --- ansible/playbooks/saas/roles/litellm/vars/upstream.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansible/playbooks/saas/roles/litellm/vars/upstream.yml b/ansible/playbooks/saas/roles/litellm/vars/upstream.yml index e8085f91..95b30dd7 100644 --- a/ansible/playbooks/saas/roles/litellm/vars/upstream.yml +++ b/ansible/playbooks/saas/roles/litellm/vars/upstream.yml @@ -1,3 +1,3 @@ --- upstream_versions: "{{ (lookup('url', 'https://api.github.com/repos/' + image.upstream.user + '/' + image.upstream.repo + '/tags', headers={'Accept': 'application/vnd.github+json', 'Authorization': 'Bearer ' + lookup('ansible.builtin.env', 'GITHUB_API_TOKEN') })) }}" -latest_version: "{{ upstream_versions | community.general.json_query('[].name') | select('match', '^(v|r)?\\d{1,4}(\\.\\d{1,4})(\\.\\d{1,4})(-stable)$') | community.general.version_sort | last | replace('v', '') }}" \ No newline at end of file +latest_version: "{{ upstream_versions | from_json | community.general.json_query('[].name') | select('match', '^(v)?\\d{1,4}(\\.\\d{1,4})(\\.\\d{1,4})(-stable)$') | community.general.version_sort | last | replace('v', '') }}" \ No newline at end of file From 7723f3e936cf49eef0f5bc0212b2cbb5e820d46f Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:11:53 +0100 Subject: [PATCH 15/16] feat(traefik): rename task to use domain variable --- ansible/playbooks/saas/roles/traefik/templates/nomad.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl index cf901f86..b083df5f 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl @@ -61,7 +61,7 @@ job "{{ domain }}" { } } - task "traefik" { + task "{{ domain }}" { driver = "docker" From e75da365f5f86aeb74fad3817d5f78161247bc11 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Tue, 23 Dec 2025 16:12:07 +0100 Subject: [PATCH 16/16] feat(wordpress): add automated upgrades for core, plugins, themes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extend the WordPress backup script to also run WP‑CLI commands that update the core, all plugins, and all themes after creating the backup. This ensures the site is kept up‑to‑date automatically during the backup process. --- ansible/playbooks/saas/roles/wordpress/files/backup | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ansible/playbooks/saas/roles/wordpress/files/backup b/ansible/playbooks/saas/roles/wordpress/files/backup index aeade5c1..9012362b 100644 --- a/ansible/playbooks/saas/roles/wordpress/files/backup +++ b/ansible/playbooks/saas/roles/wordpress/files/backup @@ -6,3 +6,12 @@ tar -czf /var/backup/data.tgz -C /var/www/html . echo "Backup database..." /usr/bin/wp-cli db export --allow-root --path=/var/www/html /var/backup/dump.sql + +echo "Upgrade core..." +/usr/bin/wp-cli core update --allow-root --path=/var/www/html + +echo "Upgrade plugins..." +/usr/bin/wp-cli plugin update --all --allow-root --path=/var/www/html + +echo "Upgrade themes..." +/usr/bin/wp-cli theme update --all --allow-root --path=/var/www/html \ No newline at end of file