diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 635a09cea3e7777213e84f6b0f95eca08dad5c7b..f9a76e91715d66f0f26033409469cdf4d147b52c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,5 +1,4 @@
 ---
-
 checkpf:
   # https://docs.gitlab.com/ee/ci/pipelines/multi_project_pipelines.html
   trigger:
@@ -7,21 +6,21 @@ checkpf:
     strategy: depend
   # Sends the current branch to the ubicast-environment triggered pipeline as a variable
   variables:
-    ANSIBLE_BRANCH: "$CI_COMMIT_BRANCH"
-    DEPLOY_STD: "$DEPLOY_STD"
-    DEPLOY_HA: "$DEPLOY_HA"
-    DESTROY_STD: "$DESTROY_STD"
-    DESTROY_HA: "$DESTROY_HA"
-    LINT: "$LINT"
+    ANSIBLE_BRANCH: $CI_COMMIT_BRANCH
+    DEPLOY_STD: $DEPLOY_STD
+    DEPLOY_HA: $DEPLOY_HA
+    DESTROY_STD: $DESTROY_STD
+    DESTROY_HA: $DESTROY_HA
+    LINT: $LINT
   # Register the job in a resource group to prevent having multiple running pipelines in parallel
   resource_group: deployment
   rules:
     # Only triggers the pipeline if it's launched manually from the GitLab webinterface
-    - if: '$CI_PIPELINE_SOURCE == "web"'
+    - if: $CI_PIPELINE_SOURCE == "web"
     # Only triggers the pipeline if it's launched by a scheduled job
-    - if: '$CI_PIPELINE_SOURCE == "schedule"'
+    - if: $CI_PIPELINE_SOURCE == "schedule"
     # Only if push on the main branch
-    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "main"'
+    - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "main"
 
 lint:
   # https://docs.gitlab.com/ee/ci/pipelines/multi_project_pipelines.html
@@ -30,12 +29,10 @@ lint:
     strategy: depend
   # Sends the current branch to the ubicast-environment triggered pipeline as a variable
   variables:
-    ANSIBLE_BRANCH: "$CI_COMMIT_BRANCH"
+    ANSIBLE_BRANCH: $CI_COMMIT_BRANCH
     DEPLOY_STD: "false"
     DEPLOY_HA: "false"
   resource_group: deployment
   rules:
     # Only if push in a branch other than main
-    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH != "main"'
-
-...
+    - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH != "main"
diff --git a/.lint/ansible-lint.conf b/.lint/ansible-lint.conf
index 802cfc7b6c6b242dbae858de012743961a76158e..924d005d73b3ffee31c026aec94d6df69da25c79 100644
--- a/.lint/ansible-lint.conf
+++ b/.lint/ansible-lint.conf
@@ -5,9 +5,13 @@ exclude_paths:
   - ${HOME}/.cache/
 
 skip_list:
-  - meta-no-info    # Skip warnings for missing galaxy_info in roles
-  - role-name       # Skip role name pattern verification ("-" should not be used)
-  - package-latest  # Skip warning when package installation state is set to latest
-  - experimental    # Skip all rules tagged as experimental, as schema validation
+  - meta-no-info              # Skip warnings for missing galaxy_info in roles
+  - role-name                 # Skip role name pattern verification ("-" should not be used)
+  - package-latest            # Skip warning when package installation state is set to latest
+  - experimental              # Skip all rules tagged as experimental, as schema validation
+  - name[play]                # Skip the rule dictating that all play should have a name
+  - name[casing]              # Skip the rule dictating that all task name should begin with uppercase
+  - template-instead-of-copy  # Skip forcing the use of templates
+  - name[template]            # Skip forcing to use jinja var at the end of a task name
 
 ...
diff --git a/inventories/example-ha/group_vars/all.yml b/inventories/example-ha/group_vars/all.yml
index a771f99c707c4c1ff9afd08f301e97adecb8b28a..93675cf11e1117a4f8b6606a0b09687ec7698714 100644
--- a/inventories/example-ha/group_vars/all.yml
+++ b/inventories/example-ha/group_vars/all.yml
@@ -1,5 +1,4 @@
 ---
-
 # customer name
 customer_short_name: customer
 
@@ -16,7 +15,7 @@ repmgr_primary_node: "{{ hostvars['pg1']['ansible_default_ipv4']['address'] }}"
 # ha proxy configuration
 hap_config_listen:
   - name: pgsql-primary
-    content: |2
+    content: |
       bind localhost:54321
       default-server inter 2s fall 3 rise 2 on-marked-down shutdown-sessions
       option tcp-check
@@ -24,5 +23,3 @@ hap_config_listen:
       maxconn 500
       server pg1 192.168.122.1:5432 maxconn 500 check port 8543
       server pg2 192.168.122.2:5432 maxconn 500 check port 8543 backup
-
-...
diff --git a/inventories/example-ha/host_vars/cs1.yml b/inventories/example-ha/host_vars/cs1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/cs1.yml
+++ b/inventories/example-ha/host_vars/cs1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mi1.yml b/inventories/example-ha/host_vars/mi1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mi1.yml
+++ b/inventories/example-ha/host_vars/mi1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mm1.yml b/inventories/example-ha/host_vars/mm1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mm1.yml
+++ b/inventories/example-ha/host_vars/mm1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mo1.yml b/inventories/example-ha/host_vars/mo1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mo1.yml
+++ b/inventories/example-ha/host_vars/mo1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/ms1.yml b/inventories/example-ha/host_vars/ms1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/ms1.yml
+++ b/inventories/example-ha/host_vars/ms1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/ms2.yml b/inventories/example-ha/host_vars/ms2.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/ms2.yml
+++ b/inventories/example-ha/host_vars/ms2.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mv1.yml b/inventories/example-ha/host_vars/mv1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mv1.yml
+++ b/inventories/example-ha/host_vars/mv1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mw1.yml b/inventories/example-ha/host_vars/mw1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mw1.yml
+++ b/inventories/example-ha/host_vars/mw1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/mw2.yml b/inventories/example-ha/host_vars/mw2.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/mw2.yml
+++ b/inventories/example-ha/host_vars/mw2.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example-ha/host_vars/pg1.yml b/inventories/example-ha/host_vars/pg1.yml
index 99e0f90874b1e01a9f179d19aa668c6f2bb07a9b..a0748dde5abcf5c4ad4d0912a98afbc17a27a7a7 100644
--- a/inventories/example-ha/host_vars/pg1.yml
+++ b/inventories/example-ha/host_vars/pg1.yml
@@ -1,8 +1,5 @@
 ---
-
 skyreach_system_key: changeme
 
 db_role: primary
 repmgr_node_id: 1
-
-...
diff --git a/inventories/example-ha/host_vars/pg2.yml b/inventories/example-ha/host_vars/pg2.yml
index 240d32eaf861708cf8e7b7c3eafa030b8a5e3033..662eda582498796eb03c14302d02d6d8b7671033 100644
--- a/inventories/example-ha/host_vars/pg2.yml
+++ b/inventories/example-ha/host_vars/pg2.yml
@@ -1,8 +1,5 @@
 ---
-
 skyreach_system_key: changeme
 
 db_role: standby
 repmgr_node_id: 2
-
-...
diff --git a/inventories/example-ha/host_vars/pg3.yml b/inventories/example-ha/host_vars/pg3.yml
index f703de213c54ce12c2b76e8be6b03b926e19aeee..7026628524a60ff0cad35a12ff164de906f40253 100644
--- a/inventories/example-ha/host_vars/pg3.yml
+++ b/inventories/example-ha/host_vars/pg3.yml
@@ -1,8 +1,5 @@
 ---
-
 skyreach_system_key: changeme
 
 db_role: witness
 repmgr_node_id: 3
-
-...
diff --git a/inventories/example-ha/host_vars/ws1.yml b/inventories/example-ha/host_vars/ws1.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example-ha/host_vars/ws1.yml
+++ b/inventories/example-ha/host_vars/ws1.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example/group_vars/all.yml b/inventories/example/group_vars/all.yml
index fd64295010da14e414f90e362424e61cd2dba0de..0fa94b435ffb9e5517eed06d959ba24e4dd3f795 100644
--- a/inventories/example/group_vars/all.yml
+++ b/inventories/example/group_vars/all.yml
@@ -1,5 +1,4 @@
 ---
-
 # customer name
 customer_short_name: customer
 
@@ -8,5 +7,3 @@ letsencrypt_enabled: false
 
 # update conf.sh
 conf_update: false
-
-...
diff --git a/inventories/example/host_vars/mymediaserver.yml b/inventories/example/host_vars/mymediaserver.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example/host_vars/mymediaserver.yml
+++ b/inventories/example/host_vars/mymediaserver.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example/host_vars/mymediavault.yml b/inventories/example/host_vars/mymediavault.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example/host_vars/mymediavault.yml
+++ b/inventories/example/host_vars/mymediavault.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example/host_vars/mymediaworker.yml b/inventories/example/host_vars/mymediaworker.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example/host_vars/mymediaworker.yml
+++ b/inventories/example/host_vars/mymediaworker.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/example/host_vars/mynetcapture.yml b/inventories/example/host_vars/mynetcapture.yml
index 607026068ab1e5851f9a163629fae52ad5a53787..ada1c36354c2589bbec076996eb3af64b4fbdd18 100644
--- a/inventories/example/host_vars/mynetcapture.yml
+++ b/inventories/example/host_vars/mynetcapture.yml
@@ -1,5 +1,2 @@
 ---
-
 skyreach_system_key: changeme
-
-...
diff --git a/inventories/local-full/host_vars/localhost.dist.yml b/inventories/local-full/host_vars/localhost.dist.yml
index a2fceb242fae619a3f8b0b4ce40a8360e568eaab..64334eceaf738b4e5506792694af16ec28357c80 100644
--- a/inventories/local-full/host_vars/localhost.dist.yml
+++ b/inventories/local-full/host_vars/localhost.dist.yml
@@ -1,5 +1,4 @@
 ---
-
 # customer name
 customer_short_name: customer
 
@@ -12,5 +11,3 @@ conf_update: false
 # activation keys
 skyreach_system_key:
 skyreach_activation_key:
-
-...
diff --git a/inventories/local-mediaimport/host_vars/localhost.dist.yml b/inventories/local-mediaimport/host_vars/localhost.dist.yml
index acc1ed0a4725fb9b4d82ffc6bb48471499d52646..55e2d8a4aadc7dd56064f452a939b59b7e5f2372 100644
--- a/inventories/local-mediaimport/host_vars/localhost.dist.yml
+++ b/inventories/local-mediaimport/host_vars/localhost.dist.yml
@@ -1,7 +1,4 @@
 ---
-
 # activation keys
 skyreach_system_key:
 skyreach_activation_key:
-
-...
diff --git a/inventories/local-mediaserver/host_vars/localhost.dist.yml b/inventories/local-mediaserver/host_vars/localhost.dist.yml
index a2fceb242fae619a3f8b0b4ce40a8360e568eaab..64334eceaf738b4e5506792694af16ec28357c80 100644
--- a/inventories/local-mediaserver/host_vars/localhost.dist.yml
+++ b/inventories/local-mediaserver/host_vars/localhost.dist.yml
@@ -1,5 +1,4 @@
 ---
-
 # customer name
 customer_short_name: customer
 
@@ -12,5 +11,3 @@ conf_update: false
 # activation keys
 skyreach_system_key:
 skyreach_activation_key:
-
-...
diff --git a/inventories/local-mediavault/host_vars/localhost.dist.yml b/inventories/local-mediavault/host_vars/localhost.dist.yml
index d3e5920e6d785a0b78e30060d02dca673653439c..c4a35380be5381a18d5c6a742726492882fc66d6 100644
--- a/inventories/local-mediavault/host_vars/localhost.dist.yml
+++ b/inventories/local-mediavault/host_vars/localhost.dist.yml
@@ -1,6 +1,3 @@
 ---
-
 skyreach_system_key:
 skyreach_activation_key:
-
-...
diff --git a/inventories/local-mediaworker/host_vars/localhost.dist.yml b/inventories/local-mediaworker/host_vars/localhost.dist.yml
index d3e5920e6d785a0b78e30060d02dca673653439c..c4a35380be5381a18d5c6a742726492882fc66d6 100644
--- a/inventories/local-mediaworker/host_vars/localhost.dist.yml
+++ b/inventories/local-mediaworker/host_vars/localhost.dist.yml
@@ -1,6 +1,3 @@
 ---
-
 skyreach_system_key:
 skyreach_activation_key:
-
-...
diff --git a/inventories/offline-mediaserver/host_vars/localhost.dist.yml b/inventories/offline-mediaserver/host_vars/localhost.dist.yml
index 8e7d14d73903a6cebbef5b274a5ee2233e5f8c9d..9e688644ed1d322a5427aeb9a42430c73d008fc8 100644
--- a/inventories/offline-mediaserver/host_vars/localhost.dist.yml
+++ b/inventories/offline-mediaserver/host_vars/localhost.dist.yml
@@ -1,9 +1,6 @@
 ---
-
 # customer name
 customer_short_name: customer
 
 # install in offline environment
 offline_mode: true
-
-...
diff --git a/inventories/offline-mediaworker/host_vars/localhost.dist.yml b/inventories/offline-mediaworker/host_vars/localhost.dist.yml
index f54433c0b32d94b04392fec656542b6fe02446d6..742fb1fa4d53914b6eba0576d39a3d66cc674e88 100644
--- a/inventories/offline-mediaworker/host_vars/localhost.dist.yml
+++ b/inventories/offline-mediaworker/host_vars/localhost.dist.yml
@@ -1,6 +1,3 @@
 ---
-
 # install in offline environment
 offline_mode: true
-
-...
diff --git a/inventories/test-container/host_vars/ansibletest.yml b/inventories/test-container/host_vars/ansibletest.yml
index d4f78ba9ad7067bec2bd224b953d8d8b3e8e31ed..55e2d8a4aadc7dd56064f452a939b59b7e5f2372 100644
--- a/inventories/test-container/host_vars/ansibletest.yml
+++ b/inventories/test-container/host_vars/ansibletest.yml
@@ -2,4 +2,3 @@
 # activation keys
 skyreach_system_key:
 skyreach_activation_key:
-...
diff --git a/playbooks/base.yml b/playbooks/base.yml
index fdb0ed4c0528cdaf66cd63cfbb9c195cd4fd71f7..1f11bd108edc33870c71e22a0b45fa56e5fa5493 100755
--- a/playbooks/base.yml
+++ b/playbooks/base.yml
@@ -1,10 +1,7 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: BASE
   hosts: all
   tags: all
   roles:
     - base
-
-...
diff --git a/playbooks/bench-monitoring.yml b/playbooks/bench-monitoring.yml
index 7784c8b299d7389165ffd80666c9534c7d7d7db8..bd9dd5b7d7e3015322a1c4c78d620f90001c2e7e 100755
--- a/playbooks/bench-monitoring.yml
+++ b/playbooks/bench-monitoring.yml
@@ -1,9 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: DEPLOY ELASTIC KIBANA SERVER
   hosts: elastic
-  tags: ['never', 'monbench']
+  tags: [never, monbench]
   vars:
     - es_heap_size: 2g
     - es_config:
@@ -20,12 +19,10 @@
 
 - name: DEPLOY METRICBEAT WORKERS
   hosts: mediaserver,postgres
-  tags: ['never', 'monbench']
+  tags: [never, monbench]
   vars:
     - kibana_server_host: "{{ hostvars[groups['elastic'][0]]['ansible_default_ipv4']['address'] }}"
     - elastic_host: "{{ es_config['network.host'] }}"
     - es_api_host: "{{ es_config['network.host'] }}"
   roles:
     - metricbeat
-
-...
diff --git a/playbooks/bench.yml b/playbooks/bench.yml
index 02602c622dd117de0d2de258fbad498e97dd45dd..a8fe110c39b502d4248c758c62cedea3f2550ccf 100755
--- a/playbooks/bench.yml
+++ b/playbooks/bench.yml
@@ -1,12 +1,11 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: DEPLOY BENCHMARK SERVER
   hosts: bench_server
   pre_tasks:
-    - name: "Fail is benchmark server is not unique"
+    - name: Fail is benchmark server is not unique
       ansible.builtin.fail:
-        msg: "Benchmark server must be unique"
+        msg: Benchmark server must be unique
       when: groups['bench_server'] | length > 1
   tags: bench_server
   roles:
@@ -33,5 +32,3 @@
       ansible.builtin.service:
         name: bench-worker
         state: restarted
-
-...
diff --git a/playbooks/celerity.yml b/playbooks/celerity.yml
index 5094f5b899f2bb0de32ccf05de5d82896d626f22..b32b531b5c24493d2e7be038a56df98b0ff5c984 100755
--- a/playbooks/celerity.yml
+++ b/playbooks/celerity.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: CELERITY SERVER
   hosts: celerity
   tags: celerity
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/letsencrypt.yml b/playbooks/letsencrypt.yml
index 571d59282b9a0bc534ad935fa899d4d4cf98938f..46e195295fe883d119e582658dfb13b3dfad7fb1 100755
--- a/playbooks/letsencrypt.yml
+++ b/playbooks/letsencrypt.yml
@@ -1,10 +1,7 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: Let's encrypt
   hosts: all
   tags: all
   roles:
     - letsencrypt
-
-...
diff --git a/playbooks/live/deploy-minimal.yml b/playbooks/live/deploy-minimal.yml
index d2b73cdb586e182bae7af594903262265dc44e3d..dcbcbf39fced87343c88d76c02847de0ecd15b76 100644
--- a/playbooks/live/deploy-minimal.yml
+++ b/playbooks/live/deploy-minimal.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: LIVE
   hosts: live
   gather_facts: false
@@ -8,17 +7,7 @@
     - live
 
 - import_playbook: subplays/standard-case.yml
-  when: groups['live'] | d('') | length >= 1
-        and (
-          hostvars[groups['live'][0]].ip_live is undefined
-          or hostvars[groups['live'][0]].ip_live == "127.0.0.1"
-        )
+  when: groups['live'] | d('') | length >= 1 and ( hostvars[groups['live'][0]].ip_live is undefined or hostvars[groups['live'][0]].ip_live == "127.0.0.1" )
 
 - import_playbook: subplays/ha-case.yml
-  when: groups['live'] | d('') | length >= 1
-        and (
-          hostvars[groups['live'][0]].ip_live is defined
-          and hostvars[groups['live'][0]].ip_live != "127.0.0.1"
-        )
-
-...
+  when: groups['live'] | d('') | length >= 1 and ( hostvars[groups['live'][0]].ip_live is defined and hostvars[groups['live'][0]].ip_live != "127.0.0.1" )
diff --git a/playbooks/live/deploy-standalone.yml b/playbooks/live/deploy-standalone.yml
index 4c7803530bd767ed23814f20b5257f0d0749564c..751da97be759d06447bf6a72944937433746177a 100644
--- a/playbooks/live/deploy-standalone.yml
+++ b/playbooks/live/deploy-standalone.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: LIVE
   hosts: live
   tags: live
@@ -35,5 +34,3 @@
 
 - import_playbook: deploy-minimal.yml
   tags: live
-
-...
diff --git a/playbooks/live/functions/create-live-app.yml b/playbooks/live/functions/create-live-app.yml
index a411f29598693dd7990b1aaea1bd2948ec51da11..eba2859e7c5c95c08fc0293f012da98d34b36fd5 100644
--- a/playbooks/live/functions/create-live-app.yml
+++ b/playbooks/live/functions/create-live-app.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: Checking the live(s) server(s) live configuration state
   hosts: live
   gather_facts: false
@@ -66,8 +65,7 @@
     - name: Comparing the app secrets from MS an live servers with the reference
       ansible.builtin.set_fact:
         app_secret_diff: true
-      when: base_live_secret is defined
-            and hostvars[item].live_secret != base_live_secret
+      when: base_live_secret is defined and hostvars[item].live_secret != base_live_secret
       with_items:
         - "{{ groups['live'] }}"
         - "{{ groups['mediaserver'] }}"
@@ -79,9 +77,8 @@
       register: secret
       args:
         executable: /bin/bash
-      failed_when: false  # Ansible-lint requires pipefail, but the return is then non-null so we have to force this
-      when: base_live_secret is not defined
-            or app_secret_diff
+      failed_when: false # Ansible-lint requires pipefail, but the return is then non-null so we have to force this
+      when: base_live_secret is not defined or app_secret_diff
 
     - name: Deciding the application secret to use
       ansible.builtin.set_fact:
@@ -110,7 +107,7 @@
         owner: nginx
         group: root
         state: directory
-        mode: '0700'
+        mode: "0700"
 
     - name: Create the nginx RTMP web directory symlink
       notify: Reload nginx
@@ -167,7 +164,7 @@
     - name: Set the RTMP_PLAYBACK_URL in lives configuration
       vars:
         rtmp_playback_line:
-          RTMP_PLAYBACK_URL: null
+          RTMP_PLAYBACK_URL:
       ansible.builtin.set_fact:
         lives_config: "{{ lives_config | combine(rtmp_playback_line) }}"
 
@@ -182,15 +179,13 @@
       notify: Restart mediaserver
       ansible.builtin.copy:
         content: "{{ lives_config | to_nice_json }}"
-        dest: "/home/{{ live_app_name }}/msinstance/conf/lives.json"
+        dest: /home/{{ live_app_name }}/msinstance/conf/lives.json
         owner: "{{ live_app_name }}"
         group: "{{ live_app_name }}"
-        mode: '0600'
+        mode: "0600"
 
   handlers:
     - name: Restart mediaserver
       ansible.builtin.systemd:
         name: mediaserver
         state: restarted
-
-...
diff --git a/playbooks/live/subplays/ha-case.yml b/playbooks/live/subplays/ha-case.yml
index 1ee19ff95e8ab2ed16d663968d9e3b0e8eb70aad..659b697f7c1238cb87cc5e71e28c3afac8f04477 100644
--- a/playbooks/live/subplays/ha-case.yml
+++ b/playbooks/live/subplays/ha-case.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: Live vhost setup
   hosts: live
   tags: live
@@ -8,15 +7,15 @@
     - name: resolve domain name to localhost
       ansible.builtin.lineinfile:
         path: /etc/hosts
-        line: '127.0.1.1 {{ live_domain }}'
+        line: 127.0.1.1 {{ live_domain }}
         backup: true
 
     - name: fill the vhost file
       notify: Restart nginx
       ansible.builtin.replace:
         path: /etc/nginx/sites-available/live-rtmp.conf
-        regexp: '^(\s+server_name)\s+.*(;)$'
-        replace: '\1 {{ live_domain }}\2'
+        regexp: ^(\s+server_name)\s+.*(;)$
+        replace: \1 {{ live_domain }}\2
 
     - name: Activating the live vhost configuration
       notify: Restart nginx
@@ -34,8 +33,8 @@
 - import_playbook: ../functions/create-live-app.yml
   vars:
     live_app_name: msuser
-    rtmp_hls_url: "https://{{ hostvars[groups['live'][0]].live_domain }}/streaming-rtmp/%(rtmp_name)s/%(stream_id)s.m3u8"
-    rtmp_pub_url: "rtmp://{{ hostvars[groups['live'][0]].live_domain }}/%(rtmp_app)s/%(stream_id)s"
+    rtmp_hls_url: https://{{ hostvars[groups['live'][0]].live_domain }}/streaming-rtmp/%(rtmp_name)s/%(stream_id)s.m3u8
+    rtmp_pub_url: rtmp://{{ hostvars[groups['live'][0]].live_domain }}/%(rtmp_app)s/%(stream_id)s
     deploy_case: ha
 
 - hosts: mediaserver
@@ -52,5 +51,3 @@
       args:
         warn: false
       when: rtmp_conf_dir.stat.exists
-
-...
diff --git a/playbooks/live/subplays/standard-case.yml b/playbooks/live/subplays/standard-case.yml
index 00623dc40acca499c07b6c36340bf2ebd86340ce..b0a637ef2ffc9bca4fd261bf1e2cf0dca52a4238 100644
--- a/playbooks/live/subplays/standard-case.yml
+++ b/playbooks/live/subplays/standard-case.yml
@@ -1,10 +1,7 @@
 ---
-
 - import_playbook: ../functions/create-live-app.yml
   vars:
     live_app_name: msuser
-    rtmp_hls_url: "https://%(ms_host)s/streaming-rtmp/%(stream_id)s.m3u8"
-    rtmp_pub_url: "rtmp://%(ms_host)s/%(rtmp_app)s/%(stream_id)s"
+    rtmp_hls_url: https://%(ms_host)s/streaming-rtmp/%(stream_id)s.m3u8
+    rtmp_pub_url: rtmp://%(ms_host)s/%(rtmp_app)s/%(stream_id)s
     deploy_case: standard
-
-...
diff --git a/playbooks/mediacache.yml b/playbooks/mediacache.yml
index 045b1c3817365b1204321988ea21ed90a5c354ab..e15bc736239b96837c93609f702443e4b2637d29 100755
--- a/playbooks/mediacache.yml
+++ b/playbooks/mediacache.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIACACHE
   hosts: mediacache
   tags: mediacache
@@ -19,5 +18,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/mediacache/deploy-minimal.yml b/playbooks/mediacache/deploy-minimal.yml
index 7caadf517a962b3fd3108ca1a560d6924a2b04a1..0227b8ab8b9ba13b9bcbe2fd8e26bfdc80da688b 100644
--- a/playbooks/mediacache/deploy-minimal.yml
+++ b/playbooks/mediacache/deploy-minimal.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: MEDIACACHE
   hosts: mediacache
   tags: mediacache
@@ -14,8 +13,8 @@
       notify: restart nginx on mediaservers
       ansible.builtin.lineinfile:
         path: /etc/nginx/conf.d/mediaserver-securelink.conf
-        line: "{{'\t'}}{{ securelink_ip }} 1;"  # noqa: no-tabs
-        insertafter: '^geo'
+        line: "{{'\t'}}{{ securelink_ip }} 1;"  # noqa no-tabs jinja[spacing]
+        insertafter: ^geo
       delegate_to: "{{ item }}"
       delegate_facts: true
       loop: "{{ groups['mediaserver'] }}"
@@ -28,5 +27,3 @@
       delegate_to: "{{ item }}"
       delegate_facts: true
       loop: "{{ groups['mediaserver'] }}"
-
-...
diff --git a/playbooks/mediacache/deploy-standalone.yml b/playbooks/mediacache/deploy-standalone.yml
index 9efd3e7456630f83f20196175c1146dc06a43f80..6621c3d0c37a478fb35b9b6f15c0ec6a7f2af40a 100644
--- a/playbooks/mediacache/deploy-standalone.yml
+++ b/playbooks/mediacache/deploy-standalone.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIACACHE
   hosts: mediacache
   tags: mediacache
@@ -35,5 +34,3 @@
 
 - import_playbook: deploy-minimal.yml
   tags: mediacache
-
-...
diff --git a/playbooks/mediaimport.yml b/playbooks/mediaimport.yml
index 84363dfd7b8db8caa964c97726cce1b21cfd6aeb..a0f8abdbcfef2a14444dfa8c967ec5ee4056d28b 100755
--- a/playbooks/mediaimport.yml
+++ b/playbooks/mediaimport.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIAIMPORT
   hosts: mediaimport
   tags: mediaimport
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/mediaserver.yml b/playbooks/mediaserver.yml
index f6c798bc60cf7c0952f5e5a02ae4b96cdbc2b2cb..5ce70e337aa6296d0a6f0ad889f7a0d8acb13b65 100755
--- a/playbooks/mediaserver.yml
+++ b/playbooks/mediaserver.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIASERVER
   hosts: mediaserver
   tags: mediaserver
@@ -19,5 +18,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/mediavault/add_backup.yml b/playbooks/mediavault/add_backup.yml
index 62e2ed43d65211a7309ad5dd96284aa0da0ebf60..7aba526674f3517902b10bb4f14598f2bdb73853 100755
--- a/playbooks/mediavault/add_backup.yml
+++ b/playbooks/mediavault/add_backup.yml
@@ -1,11 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIAVAULT
   hosts: mediavault
   tags: mediavault
   tasks:
-  - include_tasks: ressources/add_backup_task.yml
-    loop: "{{ mvt_backups }}"
-
-...
+    - include_tasks: ressources/add_backup_task.yml
+      loop: "{{ mvt_backups }}"
diff --git a/playbooks/mediavault/deploy.yml b/playbooks/mediavault/deploy.yml
index a18f2914af1f11dd96719881fc86580c23ef0ae9..37e555938e5b049738a1c1eb86448c0f6bb8edc8 100755
--- a/playbooks/mediavault/deploy.yml
+++ b/playbooks/mediavault/deploy.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIAVAULT
   hosts: mediavault
   tags: mediavault
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/mediavault/ressources/add_backup_task.yml b/playbooks/mediavault/ressources/add_backup_task.yml
index eebe94c78a62e6eb4f2d50cf89cd9915d2c791e7..3c930f6230b8496836ab4b26c75ea94e51d3c959 100644
--- a/playbooks/mediavault/ressources/add_backup_task.yml
+++ b/playbooks/mediavault/ressources/add_backup_task.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: get {{ item.name  }} backup information
   stat: path={{ item.dest }}/backup.marker
   register: backup_marker
@@ -7,5 +6,3 @@
 - name: create {{ item.name }} backup
   ansible.builtin.shell: mediavaultctl add --backup-name "{{ item.name }}" --source-folder "{{ item.source  }}" --dest-folder "{{ item.dest }}"
   when: not backup_marker.stat.exists
-
-...
diff --git a/playbooks/mediaworker.yml b/playbooks/mediaworker.yml
index 0922e95a2019918fc17712b6f4f17908363ca721..07712b6cc4d3cad63f32020716df33b5740cadfc 100755
--- a/playbooks/mediaworker.yml
+++ b/playbooks/mediaworker.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MEDIAWORKER
   hosts: mediaworker
   tags: mediaworker
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/mirismanager.yml b/playbooks/mirismanager.yml
index 0cab6e08f7e6e3a219b394787e1b1ab9c8a65dba..6ba1ce6bb48499ca3bfed095746d2e3119f66a6b 100755
--- a/playbooks/mirismanager.yml
+++ b/playbooks/mirismanager.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: MIRIS MANAGER
   hosts: mirismanager
   tags: mirismanager
@@ -19,5 +18,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/munin/all.yml b/playbooks/munin/all.yml
index 0e46d98d456f9d9a2476705ca2575bffa0c418c3..1de5d53f17ea8d5751215ab09316f7592f112b21 100644
--- a/playbooks/munin/all.yml
+++ b/playbooks/munin/all.yml
@@ -1,11 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - import_playbook: msmonitor.yml
   tags: monitor
 - import_playbook: munin-node.yml
   tags: monitor
 - import_playbook: munin-server.yml
   tags: monitor
-
-...
diff --git a/playbooks/munin/msmonitor.yml b/playbooks/munin/msmonitor.yml
index 7b066cd2a18517ae5303a7a3296447c8533153a7..b0036f1a67d4e4072a797b106a5da69138ae4863 100644
--- a/playbooks/munin/msmonitor.yml
+++ b/playbooks/munin/msmonitor.yml
@@ -1,10 +1,7 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: msmonitor
   hosts: msmonitor
   tags: munin
   roles:
     - munin/msmonitor
-
-...
diff --git a/playbooks/munin/munin-node.yml b/playbooks/munin/munin-node.yml
index fd3de0664613d652bf616a59ec68bb9f8addf6e4..a395c88509600d75d69907f5099870406afbc0b8 100644
--- a/playbooks/munin/munin-node.yml
+++ b/playbooks/munin/munin-node.yml
@@ -1,10 +1,7 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: Munin node
   hosts: munin_node
   tags: munin
   roles:
     - munin/munin-node
-
-...
diff --git a/playbooks/munin/munin-server.yml b/playbooks/munin/munin-server.yml
index 9408dcb017789e6a1e500596a462c9a167a2ea3a..ff26894a4e0ff7d4223a8eeedae0adaaf9a2eca4 100644
--- a/playbooks/munin/munin-server.yml
+++ b/playbooks/munin/munin-server.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: Munin server
   hosts: munin_server
   tags: munin
@@ -12,5 +11,3 @@
       with_items: "{{ groups['munin_node'] }}"
   roles:
     - munin/munin-server
-
-...
diff --git a/playbooks/netcapture.yml b/playbooks/netcapture.yml
index 29c756d942a9fc6c270d6d80d134881b385fab08..b622c77b28eb592d9d9d3efb0ab7020d3a5655f3 100755
--- a/playbooks/netcapture.yml
+++ b/playbooks/netcapture.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: NETCAPTURE
   hosts: netcapture
   tags: netcapture
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/netcapture/deploy-minimal.yml b/playbooks/netcapture/deploy-minimal.yml
index be79c389c76f774f3f204874ad526c82cf752ddb..1dc1c0e33869cb64a1d90a335ee2117d1bcdc221 100644
--- a/playbooks/netcapture/deploy-minimal.yml
+++ b/playbooks/netcapture/deploy-minimal.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: NETCAPTURE
   hosts: netcapture
   gather_facts: false
@@ -7,5 +6,3 @@
   roles:
     - lxc
     - netcapture
-
-...
diff --git a/playbooks/netcapture/deploy-standalone.yml b/playbooks/netcapture/deploy-standalone.yml
index f9b55731471ecaae307b03b2b9b39f9a49c031f9..4d94cfbe55c5d2b55c00603f1fbb338d5da06b84 100644
--- a/playbooks/netcapture/deploy-standalone.yml
+++ b/playbooks/netcapture/deploy-standalone.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: NETCAPTURE
   hosts: netcapture
   tags: netcapture
@@ -15,8 +14,7 @@
     server_ferm_global_settings:
   tasks:
     - name: firewall
-      when: ((server_firewall_enabled is defined) and server_firewall_enabled)
-            or (server_firewall_enabled is undefined)
+      when: ((server_firewall_enabled is defined) and server_firewall_enabled) or (server_firewall_enabled is undefined)
       vars:
         ferm_rules_filename: "{{ server_ferm_rules_filename }}"
         ferm_input_rules: "{{ server_ferm_input_rules }}"
@@ -27,5 +25,3 @@
 
 - import_playbook: deploy-minimal.yml
   tags: netcapture
-
-...
diff --git a/playbooks/postfix.yml b/playbooks/postfix.yml
index 719fba051f559046933342f416bab88ee0a94377..606eeccf493226315b8b9467df6ff45ed4499ebb 100755
--- a/playbooks/postfix.yml
+++ b/playbooks/postfix.yml
@@ -1,11 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: POSTFIX
   hosts: all
   tags: all
   roles:
     - conf
     - postfix
-
-...
diff --git a/playbooks/postgres-ha.yml b/playbooks/postgres-ha.yml
index b7fc0a0ab4066aba886ff713e25be3f4665c0b69..858fad5592f65da951c02570adf8a422d5481929 100755
--- a/playbooks/postgres-ha.yml
+++ b/playbooks/postgres-ha.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: POSTGRES HA
   hosts: postgres
   tags: postgres
@@ -26,7 +25,7 @@
 
 - name: POSTGRES HA CLIENTS
   hosts: mediaserver
-  tags: ['postgres', 'mediaserver']
+  tags: [postgres, mediaserver]
   pre_tasks:
     - name: check that haproxy is configured
       ansible.builtin.assert:
@@ -34,5 +33,3 @@
         quiet: true
   roles:
     - haproxy
-
-...
diff --git a/playbooks/postgres-maintenance.yml b/playbooks/postgres-maintenance.yml
index 4e0a4c9517f3a497506f26bbc0d1b5c375502c84..65ac99cf63efbdd637d1fcc4e8c400801cd5b569 100755
--- a/playbooks/postgres-maintenance.yml
+++ b/playbooks/postgres-maintenance.yml
@@ -1,16 +1,10 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - import_playbook: postgres-maintenance/rephacheck_status.yml
-  tags: ['always']
-
+  tags: [always]
 - import_playbook: postgres-maintenance/fenced_to_standby.yml
-  tags: ['never', 'fenced-to-standby']
-
+  tags: [never, fenced-to-standby]
 - import_playbook: postgres-maintenance/standby_to_primary.yml
-  tags: ['never', 'standby-to-primary']
-
+  tags: [never, standby-to-primary]
 - import_playbook: postgres-maintenance/restart_repmgrd.yml
-  tags: ['never', 'restart-repmgrd', 'standby-to-primary']
-
-...
+  tags: [never, restart-repmgrd, standby-to-primary]
diff --git a/playbooks/postgres-maintenance/fenced_to_standby.yml b/playbooks/postgres-maintenance/fenced_to_standby.yml
index bbd813bcda90578934cde9871ea1e91af2edfc66..a13c8e290933bf02048c839d6d5b1d9e57253023 100644
--- a/playbooks/postgres-maintenance/fenced_to_standby.yml
+++ b/playbooks/postgres-maintenance/fenced_to_standby.yml
@@ -5,7 +5,7 @@
   tasks:
     - name: fail if node status if not fenced
       ansible.builtin.fail:
-        msg: "Current status {{ rephacheck['stdout'] }} must be fenced."
+        msg: Current status {{ rephacheck['stdout'] }} must be fenced.
       when: rephacheck['stdout'] != "fenced"
 
     - name: stop postgresql
@@ -39,9 +39,7 @@
       when: copy_from_primary is succeeded
 
     - name: register node as standby
-      ansible.builtin.command: "repmgr -f /etc/postgresql/13/main/repmgr.conf --force --verbose standby register"
+      ansible.builtin.command: repmgr -f /etc/postgresql/13/main/repmgr.conf --force --verbose standby register
       become: true
       become_user: postgres
       when: copy_from_primary is succeeded
-
-...
diff --git a/playbooks/postgres-maintenance/rephacheck_status.yml b/playbooks/postgres-maintenance/rephacheck_status.yml
index 4984ead9f2595631429d5e9543934611956c3000..c0ad7448c7acd9a7e67e1ed5b0084be3a1206e31 100644
--- a/playbooks/postgres-maintenance/rephacheck_status.yml
+++ b/playbooks/postgres-maintenance/rephacheck_status.yml
@@ -4,13 +4,11 @@
   hosts: postgres_primary:postgres_standby:postgres_fenced
   tasks:
     - name: get cluster state
-      ansible.builtin.command: "rephacheck"
+      ansible.builtin.command: rephacheck
       register: rephacheck
       changed_when: false
 
     - name: show status for each node
       ansible.builtin.debug:
-        msg: "Current node {{ ansible_hostname }} status {{ rephacheck['stdout'] }}"
+        msg: Current node {{ ansible_hostname }} status {{ rephacheck['stdout'] }}
       when: rephacheck['stdout'] | length > 0
-
-...
diff --git a/playbooks/postgres-maintenance/restart_repmgrd.yml b/playbooks/postgres-maintenance/restart_repmgrd.yml
index 68d0da2db2ab6dd61e21132171e3da117f07ae6c..eca93c4a4516afafd166698cefdf295fa0be162e 100644
--- a/playbooks/postgres-maintenance/restart_repmgrd.yml
+++ b/playbooks/postgres-maintenance/restart_repmgrd.yml
@@ -4,7 +4,7 @@
   hosts: postgres
   tasks:
     - name: kill repmgrd
-      ansible.builtin.command: "pkill repmgrd"
+      ansible.builtin.command: pkill repmgrd
       # TOFIX: implement a proper verification
       changed_when: false
       failed_when: false
@@ -13,5 +13,3 @@
       ansible.builtin.systemd:
         name: repmgrd
         state: restarted
-
-...
diff --git a/playbooks/postgres-maintenance/standby_to_primary.yml b/playbooks/postgres-maintenance/standby_to_primary.yml
index a4be1c4bcd84791d9e603995bd13f05a35862c95..ca7b31356c8e6216f3a90e96e8585cc3a322f578 100644
--- a/playbooks/postgres-maintenance/standby_to_primary.yml
+++ b/playbooks/postgres-maintenance/standby_to_primary.yml
@@ -5,20 +5,18 @@
   tasks:
     - name: fail if node status if not standby
       ansible.builtin.fail:
-        msg: "Current status {{ rephacheck['stdout'] }} must be standby."
+        msg: Current status {{ rephacheck['stdout'] }} must be standby.
       when: rephacheck['stdout'] != "standby"
     - name: check if node is currently in standby
-      ansible.builtin.command: "repmgr standby switchover -f /etc/postgresql/13/main/repmgr.conf --siblings-follow --dry-run"
+      ansible.builtin.command: repmgr standby switchover -f /etc/postgresql/13/main/repmgr.conf --siblings-follow --dry-run
       become: true
       become_user: postgres
       when: rephacheck['stdout'] == "standby"
       register: standby_dry_run
     - name: switch standby node to primary
-      ansible.builtin.command: "repmgr standby switchover -f /etc/postgresql/13/main/repmgr.conf --siblings-follow"
+      ansible.builtin.command: repmgr standby switchover -f /etc/postgresql/13/main/repmgr.conf --siblings-follow
       become: true
       become_user: postgres
       when:
         - standby_dry_run is succeeded
         - rephacheck['stdout'] == "standby"
-
-...
diff --git a/playbooks/postgres.yml b/playbooks/postgres.yml
index 3d873561d335bcbfa554b4cb01d708bffe8e6cdc..73ebd6e1f2a0a23684bbd38901aa63b5d99c7cb3 100755
--- a/playbooks/postgres.yml
+++ b/playbooks/postgres.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: POSTGRESQL
   hosts: postgres
   tags: postgres
@@ -15,5 +14,3 @@
       when: proxy_apply | d(false)
       ansible.builtin.include_role:
         name: proxy
-
-...
diff --git a/playbooks/site.yml b/playbooks/site.yml
index 9092d56176326c7f6bf58b29f5b724036073aa7c..9f18d8eeea0cd12e812821375140667d88b8f7af 100755
--- a/playbooks/site.yml
+++ b/playbooks/site.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: PYTHON
   hosts: all
   gather_facts: false
@@ -50,5 +49,3 @@
 
 - import_playbook: tester.yml
   tags: tester
-
-...
diff --git a/playbooks/tester.yml b/playbooks/tester.yml
index 1b9dcae29bcb4b460566084e5479324ae4a4d88b..f68bad3984a0fbf85ddb63c187da247019f63009 100755
--- a/playbooks/tester.yml
+++ b/playbooks/tester.yml
@@ -1,10 +1,7 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: Install UbiCast tester
   hosts: mediaserver:mediaworker:mirismanager:postgres:msmonitor:live:celerity:mediaimport:mediacache:mediavault
   tags: all
   roles:
     - tester
-
-...
diff --git a/playbooks/tests/data-partition.yml b/playbooks/tests/data-partition.yml
index cbd9cac0dc429a3e4f9148d43f2eb9f901d18870..b6e479864650fec08c5860ce5def2f6d76b2aa7c 100755
--- a/playbooks/tests/data-partition.yml
+++ b/playbooks/tests/data-partition.yml
@@ -1,11 +1,9 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: TEST DATA PARTITION
   hosts: mediaserver
   gather_facts: false
   tasks:
-
     - name: verify /data partition existence
       ansible.builtin.shell: findmnt /data
       register: data_exist
@@ -14,7 +12,6 @@
 
     # /data exist
     - block:
-
         - name: get /data size
           ansible.builtin.shell: df -BG /data --output=size | tail -n1 | grep -o '[0-9]*'
           register: data_size
@@ -23,13 +20,13 @@
 
         - name: print size
           ansible.builtin.debug:
-            msg: "/data size is {{ data_size.stdout }}G"
+            msg: /data size is {{ data_size.stdout }}G
 
         - name: create a test directory in /data
           ansible.builtin.file:
             path: /data/test
             state: directory
-            mode: '0755'
+            mode: "0755"
             owner: nobody
             group: nogroup
           ignore_errors: true
@@ -39,7 +36,7 @@
           ansible.builtin.file:
             state: touch
             path: /data/test/file
-            mode: '0644'
+            mode: "0644"
             owner: nobody
             group: nogroup
           ignore_errors: true
@@ -52,7 +49,6 @@
 
     # /data missing
     - block:
-
         - name: get /home size
           ansible.builtin.shell: df -BG /home --output=size | tail -n1 | grep -o '[0-9]*'
           register: home_size
@@ -61,11 +57,9 @@
 
         - name: verify size
           ansible.builtin.debug:
-            msg: "/home size is too short ({{ home_size.stdout }}G < 200G)"
+            msg: /home size is too short ({{ home_size.stdout }}G < 200G)
           when: home_size.stdout | int < 200
           ignore_errors: true
           failed_when: true
 
       when: data_exist.rc != 0
-
-...
diff --git a/playbooks/tests/exec-tester.yml b/playbooks/tests/exec-tester.yml
index 8c22df2f3f4df02368e9b47026b74057ef7e6114..2ac3069189887a5be1127e19545944e1f33a63a0 100755
--- a/playbooks/tests/exec-tester.yml
+++ b/playbooks/tests/exec-tester.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: RUN TESTER
   hosts: all
   tags: tester
@@ -20,5 +19,3 @@
           python3 /root/envsetup/tests/tester.py 2>&1 | tee /root/envsetup/tests/logs/tester_pb.log
         creates: /root/envsetup/tests/logs/tester_pb.log
         executable: /bin/bash
-
-...
diff --git a/playbooks/tests/firewall-rules.yml b/playbooks/tests/firewall-rules.yml
index 9165a3586491dbc78d831a345539e3ff55d8d130..b22cfaf088cbe5881212dc800dac97add5f69edf 100755
--- a/playbooks/tests/firewall-rules.yml
+++ b/playbooks/tests/firewall-rules.yml
@@ -1,6 +1,5 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: GATHER ALL FACTS
   hosts: all
   tasks:
@@ -14,7 +13,6 @@
   hosts: all
   gather_facts: false
   tasks:
-
     - include_vars:
         file: ressources/firewall/rules.yml
 
@@ -30,7 +28,6 @@
   hosts: all
   gather_facts: false
   tasks:
-
     - include_vars:
         file: ressources/firewall/rules.yml
 
@@ -42,5 +39,3 @@
         loop_var: outer_item
       # execute loop only when group exists and host is in listen.groupname_src
       when: (outer_item.groupname_src in groups) and (inventory_hostname in groups[outer_item.groupname_src])
-
-...
diff --git a/playbooks/tests/ressources/firewall/listen.yml b/playbooks/tests/ressources/firewall/listen.yml
index 997e084aada933bec6110a89ae047a05e627de0a..3094bcb1824f2ad447920d37f18d2a69057e8927 100644
--- a/playbooks/tests/ressources/firewall/listen.yml
+++ b/playbooks/tests/ressources/firewall/listen.yml
@@ -1,11 +1,8 @@
 ---
-
 - debug:
-    msg: "On {{ outer_item.groupname }} server(s) put {{ outer_item.ports }} port(s) in listen mode"
+    msg: On {{ outer_item.groupname }} server(s) put {{ outer_item.ports }} port(s) in listen mode
 
-- ansible.builtin.shell: "nohup timeout 300 nc -lp {{ item }} >/dev/null 2>&1 &"
+- ansible.builtin.shell: nohup timeout 300 nc -lp {{ item }} >/dev/null 2>&1 &
   ignore_errors: true
   loop: "{{ outer_item.ports }}"
   changed_when: false
-
-...
diff --git a/playbooks/tests/ressources/firewall/rules.yml b/playbooks/tests/ressources/firewall/rules.yml
index 63fb6ac0f0908232c675782b5ca0fb5d8c97b7af..44ed151182671140e8eece746619d44d64feb6e8 100644
--- a/playbooks/tests/ressources/firewall/rules.yml
+++ b/playbooks/tests/ressources/firewall/rules.yml
@@ -1,69 +1,49 @@
 ---
-
 listen:
-
   - groupname: mediaserver
     ports: ["80", "443"]
-
   - groupname: celerity
     ports: ["6200"]
-
   - groupname: wowza
     ports: ["1935"]
-
   - groupname: mirismanager
     ports: ["22", "443"]
-
   - groupname: mediaimport
     ports: ["20", "22"]
-
   - groupname: all
     ports: ["4949"]
-
   - groupname: postgres
     ports: ["5432", "22"]
 
 
 test:
-
   - groupname_src: mediaworker
     groupname_dst: mediaserver
     ports: ["80", "443"]
-
   - groupname_src: mediaworker
     groupname_dst: celerity
     ports: ["6200"]
-
   - groupname_src: mediaserver
     groupname_dst: celerity
     ports: ["6200"]
-
   - groupname_src: mediaserver
     groupname_dst: mediacache
     ports: ["22", "443"]
-
   - groupname_src: mediacache
     groupname_dst: mediaserver
     ports: ["80", "443"]
-
   - groupname_src: mediaserver
-    hosts_dst: ["mirismanager.ubicast.eu"]
+    hosts_dst: [mirismanager.ubicast.eu]
     ports: ["80", "443"]
-
   - groupname_src: mediaserver
     groupname_dst: netcapture
     ports: ["22"]
-
   - groupname_src: netcapture
     groupname_dst: mediaserver
     ports: ["443", "1935"]
-
   - groupname_src: mediaserver
-    hosts_dst: ["git.ubicast.net"]
+    hosts_dst: [git.ubicast.net]
     ports: ["22"]
-
   - groupname_src: localhost
     groupname_dst: mediaserver
     ports: ["80", "443"]
-
-...
diff --git a/playbooks/tests/ressources/firewall/test-rule.yml b/playbooks/tests/ressources/firewall/test-rule.yml
index 7ca6414c7c2bc9aebe6d3686436e827252800a2b..3b4edefb10e0446461b813ad2986aaa3cfa44d41 100644
--- a/playbooks/tests/ressources/firewall/test-rule.yml
+++ b/playbooks/tests/ressources/firewall/test-rule.yml
@@ -1,17 +1,16 @@
 ---
-
 # test rules with direct hosts destination
 - block:
     - debug:
-        msg: "Test rule from {{ outer_item.groupname_src }} to {{ outer_item.hosts_dst }} on {{ outer_item.ports }} port(s)"
+        msg: Test rule from {{ outer_item.groupname_src }} to {{ outer_item.hosts_dst }} on {{ outer_item.ports }} port(s)
 
-    - shell: "nc -zv {{ item.0 }} {{ item.1 }}"
+    - shell: nc -zv {{ item.0 }} {{ item.1 }}
       ignore_errors: true
       loop: "{{ outer_item.hosts_dst | product(outer_item.ports) | list }}"
       when: proxy is not defined
       changed_when: false
 
-    - shell: "nc -x {{ proxy }} -X Connect -zv {{ item.0 }} {{ item.1 }}"
+    - shell: nc -x {{ proxy }} -X Connect -zv {{ item.0 }} {{ item.1 }}
       ignore_errors: true
       loop: "{{ outer_item.hosts_dst | product(outer_item.ports) | list }}"
       when: proxy is defined
@@ -21,21 +20,17 @@
 # test rules with ansible group destination
 - block:
     - debug:
-        msg: "Test rule from {{ outer_item.groupname_src }} to {{ outer_item.groupname_dst }} on {{ outer_item.ports }} port(s)"
+        msg: Test rule from {{ outer_item.groupname_src }} to {{ outer_item.groupname_dst }} on {{ outer_item.ports }} port(s)
 
-    - shell: "nc -zv {{ item.0 }} {{ item.1 }}"
+    - shell: nc -zv {{ item.0 }} {{ item.1 }}
       ignore_errors: true
-      loop: "{{ groups[outer_item.groupname_dst] | map('extract', hostvars, ['ansible_default_ipv4', 'address']) | list
-                | product(outer_item.ports) | list }}"
+      loop: "{{ groups[outer_item.groupname_dst] | map('extract', hostvars, ['ansible_default_ipv4', 'address']) | list | product(outer_item.ports) | list }}"
       when: proxy is not defined
       changed_when: false
 
-    - shell: "nc -x {{ proxy }} -X Connect -zv {{ item.0 }} {{ item.1 }}"
+    - shell: nc -x {{ proxy }} -X Connect -zv {{ item.0 }} {{ item.1 }}
       ignore_errors: true
-      loop: "{{ groups[outer_item.groupname_dst] | map('extract', hostvars, ['ansible_default_ipv4', 'address']) | list
-                | product(outer_item.ports) | list }}"
+      loop: "{{ groups[outer_item.groupname_dst] | map('extract', hostvars, ['ansible_default_ipv4', 'address']) | list | product(outer_item.ports) | list }}"
       when: proxy is defined
       changed_when: false
   when: outer_item.groupname_dst is defined
-
-...
diff --git a/playbooks/upgrade.yml b/playbooks/upgrade.yml
index 4aac96cfbef0c0d069071ad82c46ebcccd231f38..c456648e83e8cc4ff17d0fcacbce1043457e11a3 100755
--- a/playbooks/upgrade.yml
+++ b/playbooks/upgrade.yml
@@ -1,10 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: UPGRADE SERVERS
   hosts: all
   tasks:
-
     - name: apt-get dist-upgrade
       when: ansible_os_family == "Debian"
       ansible.builtin.apt:
@@ -21,5 +19,3 @@
       ansible.builtin.yum:
         name: "*"
         state: latest
-
-...
diff --git a/playbooks/users.yml b/playbooks/users.yml
index 0972b6100f02f87dbb85a54c6669bc9c4b2e2e29..74b9bf0eec619bce2f864f0125934f23a58d8ac8 100755
--- a/playbooks/users.yml
+++ b/playbooks/users.yml
@@ -1,11 +1,8 @@
 #!/usr/bin/env ansible-playbook
 ---
-
 - name: USERS
   hosts: all
   tags: all
   roles:
     - conf
     - users
-
-...
diff --git a/requirements.yml b/requirements.yml
index 6b1fe0558ef539bd67aa2ea23d2c4a539ffd9dc1..69f4b8f2428a100398e04abf540b9a3cdfe404fd 100644
--- a/requirements.yml
+++ b/requirements.yml
@@ -1,5 +1,3 @@
 ---
 - src: elastic.elasticsearch
   version: 7.9.0
-
-...
diff --git a/roles/base/meta/main.yml b/roles/base/meta/main.yml
index ea5745729fa6af9e0c1b02bc804a60545d1e8657..7c8be6e0fad0b8faa9b8a1869a469207f4763abd 100644
--- a/roles/base/meta/main.yml
+++ b/roles/base/meta/main.yml
@@ -1,5 +1,4 @@
 ---
-
 dependencies:
   - role: conf
   - role: init
@@ -9,5 +8,3 @@ dependencies:
   - role: ferm-install
   - role: ferm-configure
   - role: fail2ban
-
-...
diff --git a/roles/bench-server/defaults/main.yml b/roles/bench-server/defaults/main.yml
index feccdd5a25d91ced913e75b888c8dca74ec4ac2f..175757889e335bb9bde89a869fce4d362c48d517 100644
--- a/roles/bench-server/defaults/main.yml
+++ b/roles/bench-server/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 bench_server_packages:
   - ubicast-benchmark
 
@@ -14,5 +13,3 @@ bench_dl_streams: false
 
 bench_stream_repo: https://git.ubicast.net/mediaserver/ms-testing-suite.git
 bench_host_api_key: "{{ envsetup_ms_api_key | d() }}"
-
-...
diff --git a/roles/bench-server/meta/main.yml b/roles/bench-server/meta/main.yml
index 91d0a5d794147e734592ab2aab6005487dfd9bbc..152fcf1fab309ff8196530b1e4914a71fafef7d8 100644
--- a/roles/bench-server/meta/main.yml
+++ b/roles/bench-server/meta/main.yml
@@ -1,8 +1,5 @@
 ---
-
 dependencies:
   - role: conf
   - role: init
   - role: sysconfig
-
-...
diff --git a/roles/bench-server/tasks/main.yml b/roles/bench-server/tasks/main.yml
index 38923fdfc2bca0cebab29ade2c357bf280dcf76f..361765abfe198fe4811970a4f29909223413f5a6 100644
--- a/roles/bench-server/tasks/main.yml
+++ b/roles/bench-server/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install bench-server packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -15,7 +14,7 @@
   ansible.builtin.file:
     path: /etc/mediaserver
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: benchmark configuration settings
   ansible.builtin.copy:
@@ -30,7 +29,7 @@
       "DL_STREAMS":{{ bench_dl_streams }},
       "TIME_STATS":{{ bench_time_stat }}
       }
-    mode: '644'
+    mode: "644"
 
 - name: reload systemd daemon
   ansible.builtin.systemd:
@@ -45,7 +44,7 @@
   ansible.builtin.template:
     src: bench-streaming.conf.j2
     dest: /etc/mediaserver/bench-streaming.conf
-    mode: '644'
+    mode: "644"
 
 - name: clone ms-testing-suite repository
   ansible.builtin.git:
@@ -60,7 +59,7 @@
     src: /etc/mediaserver/bench-streaming.conf
     dest: /usr/share/ms-testing-suite/config.json
     remote_src: true
-    mode: '644'
+    mode: "644"
 
 - name: add docker key
   when:
@@ -75,7 +74,7 @@
     - not offline_mode | d(false)
     - not in_docker | d(false)
   ansible.builtin.apt_repository:
-    repo: "deb https://download.docker.com/linux/debian buster stable"
+    repo: deb https://download.docker.com/linux/debian buster stable
     state: present
     update_cache: true
 
@@ -101,5 +100,3 @@
     cmd: make build_docker_img
     chdir: /usr/share/ms-testing-suite
   run_once: true
-
-...
diff --git a/roles/bench-worker/defaults/main.yml b/roles/bench-worker/defaults/main.yml
index 8e51cf19d09a0ab404db26c568c3a268de6a92f6..f8e06f1f077e190aa4e1f672b1ca0124647c80fe 100644
--- a/roles/bench-worker/defaults/main.yml
+++ b/roles/bench-worker/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 bench_worker_packages:
   - ubicast-benchmark
 
@@ -11,5 +10,3 @@ bench_user: admin
 bench_password: "{{ envsetup_ms_admin_pwd | d() }}"
 bench_oid:
 bench_dl_streams: false
-
-...
diff --git a/roles/bench-worker/meta/main.yml b/roles/bench-worker/meta/main.yml
index 5f2a4de7e5fcd35ce77b6aac53633d3871e95335..152fcf1fab309ff8196530b1e4914a71fafef7d8 100644
--- a/roles/bench-worker/meta/main.yml
+++ b/roles/bench-worker/meta/main.yml
@@ -1,7 +1,5 @@
 ---
-
 dependencies:
   - role: conf
   - role: init
   - role: sysconfig
-...
diff --git a/roles/bench-worker/tasks/main.yml b/roles/bench-worker/tasks/main.yml
index 56423bd06ccd987fd4bfd8b711536e1cb95fb998..5f5f46de613e688cb03cd51fd3ac25427b15571d 100644
--- a/roles/bench-worker/tasks/main.yml
+++ b/roles/bench-worker/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install bench-worker packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -15,7 +14,7 @@
   ansible.builtin.file:
     path: /etc/mediaserver
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: benchmark configuration settings
   ansible.builtin.copy:
@@ -30,7 +29,7 @@
       "DL_STREAMS":{{ bench_dl_streams }},
       "TIME_STATS":{{ bench_time_stat }}
       }
-    mode: '644'
+    mode: "644"
 
 - name: reload systemd daemon
   ansible.builtin.systemd:
@@ -40,5 +39,3 @@
   ansible.builtin.systemd:
     name: bench-worker
     state: restarted
-
-...
diff --git a/roles/celerity/defaults/main.yml b/roles/celerity/defaults/main.yml
index a257c13ee866ad5c8b10db0db709a42dfd8fa7dd..377595af195484af31ab20015c5e1345068c9a63 100644
--- a/roles/celerity/defaults/main.yml
+++ b/roles/celerity/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 celerity_signing_key: "{{ envsetup_celerity_signing_key | d('change-me', true) }}"
 celerity_server: "{{ envsetup_celerity_server | d(envsetup_ms_server_name, true) | d('127.0.0.1', true) }}"
 
@@ -20,5 +19,3 @@ celerity_ferm_input_rules:
       - 6200
 celerity_ferm_output_rules: []
 celerity_ferm_global_settings:
-
-...
diff --git a/roles/celerity/handlers/main.yml b/roles/celerity/handlers/main.yml
index fd32b76eb927e63c7f350cc4ff0825aaa0f99e54..77b9d81951cc9d33c219c2dc200d63b4fc8b8488 100644
--- a/roles/celerity/handlers/main.yml
+++ b/roles/celerity/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart celerity-server
   ansible.builtin.service:
     name: celerity-server
     state: restarted
-
-...
diff --git a/roles/celerity/meta/main.yml b/roles/celerity/meta/main.yml
index e45d692ae3567f856967cd6f66c91d13e2e94e4e..471eb65e53129fb19aa41af0c049719b93c0ef91 100644
--- a/roles/celerity/meta/main.yml
+++ b/roles/celerity/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: base
-
-...
diff --git a/roles/celerity/tasks/main.yml b/roles/celerity/tasks/main.yml
index 28783ff8b9fee3c414bfc53aaadca444562f2f4d..6595e32af2a2318dfc36c625422181b19815c1ca 100644
--- a/roles/celerity/tasks/main.yml
+++ b/roles/celerity/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: celerity server install
   ansible.builtin.apt:
     force_apt_get: true
@@ -14,7 +13,7 @@
   ansible.builtin.template:
     src: celerity-config.py.j2
     dest: /etc/celerity/config.py
-    mode: '644'
+    mode: "644"
 
 - name: ensure celerity server is running
   ansible.builtin.service:
@@ -36,5 +35,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/conf/defaults/main.yml b/roles/conf/defaults/main.yml
index 35c19dd4188ef4823c4c25fb4f75d882d2695e2c..7f3df0ade59248aa7d7caa6dfd532f912a1a8953 100644
--- a/roles/conf/defaults/main.yml
+++ b/roles/conf/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 conf_req_packages:
   - ca-certificates
   - openssh-client
@@ -18,5 +17,3 @@ skyreach_system_key: "{{ lookup('env', 'SKYREACH_SYSTEM_KEY') }}"
 conf_update: false
 
 conf_debug: false
-
-...
diff --git a/roles/conf/tasks/main.yml b/roles/conf/tasks/main.yml
index 2089992b53de4441dae2731ea692efd5731ade21..0497da29719296b3394ef75cb149f4c9c43f4b6c 100644
--- a/roles/conf/tasks/main.yml
+++ b/roles/conf/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: proxy
   when:
     - proxy_http | d()
@@ -83,7 +82,7 @@
     return_content: true
     validate_certs: "{{ conf_valid_cert }}"
 
-- name: save generated conf  # noqa no-handler
+- name: save generated conf # noqa no-handler
   loop:
     - "{{ conf_dl_ak }}"
     - "{{ conf_dl_sk }}"
@@ -93,7 +92,7 @@
     dest: "{{ conf_dir }}/auto-generated-conf.sh"
     force: true
     backup: true
-    mode: '644'
+    mode: "644"
 
 - name: check if auto-generated-conf.sh exists
   check_mode: false
@@ -129,5 +128,3 @@
   when: conf_debug
   ansible.builtin.debug:
     var: ansible_facts
-
-...
diff --git a/roles/docker/tasks/main.yml b/roles/docker/tasks/main.yml
index 5334f82e6aea6dc80f8094ab5354695c05d408e4..a5e3ed7322cef804e9abe452a71a2ebb8b1b2e8a 100644
--- a/roles/docker/tasks/main.yml
+++ b/roles/docker/tasks/main.yml
@@ -58,5 +58,3 @@
   register: apt_status
   retries: 60
   until: apt_status is success or ('Failed to lock apt for exclusive operation' not in apt_status.msg and '/var/lib/dpkg/lock' not in apt_status.msg)
-
-...
diff --git a/roles/elastic/defaults/main.yml b/roles/elastic/defaults/main.yml
index b8be8f11ecee8857764d8db5d79bfdef925ba950..2aa7f00ede6f7a3e549262f65cf026b7ebe2c5a4 100644
--- a/roles/elastic/defaults/main.yml
+++ b/roles/elastic/defaults/main.yml
@@ -1,5 +1,3 @@
 ---
 kibana_default_port: 5601
 kibana_server_host: localhost
-
-...
diff --git a/roles/elastic/handlers/main.yml b/roles/elastic/handlers/main.yml
index c40d88041f8e35a0ba692a8c582ec79654605d32..ca0aed386a224646013f166ae48380f2928f1bea 100644
--- a/roles/elastic/handlers/main.yml
+++ b/roles/elastic/handlers/main.yml
@@ -8,5 +8,3 @@
   ansible.builtin.service:
     name: apm-server
     state: restarted
-
-...
diff --git a/roles/elastic/tasks/main.yml b/roles/elastic/tasks/main.yml
index 1a8c6ceeee8fadb0e61cb26b1fe1e5e07a164c50..a0cece5d8be421d7b655ae68a3e6f7f1fa037378 100644
--- a/roles/elastic/tasks/main.yml
+++ b/roles/elastic/tasks/main.yml
@@ -13,7 +13,7 @@
   ansible.builtin.template:
     src: kibana.yml.j2
     dest: /etc/kibana/kibana.yml
-    mode: '644'
+    mode: "644"
   notify: restart kibana
 
 - name: install apm-server package
@@ -30,7 +30,5 @@
   ansible.builtin.template:
     src: apm-server.yml.j2
     dest: /etc/apm-server/apm-server.yml
-    mode: '644'
+    mode: "644"
   notify: restart apm-server
-
-...
diff --git a/roles/fail2ban/defaults/main.yml b/roles/fail2ban/defaults/main.yml
index c5221a36e10da3dff0f59f7924c861965c6085b3..5cdd83796e1ae03f00408a8b3649c9f500993408 100644
--- a/roles/fail2ban/defaults/main.yml
+++ b/roles/fail2ban/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 f2b_packages:
   - fail2ban
   - rsyslog
@@ -28,5 +27,3 @@ f2b_destemail_admins: >
   {% endif %}"
 
 f2b_action: "{% if envsetup_fail2ban_send_email | default(false) %}action_mwl{% else %}action_{% endif %}"
-
-...
diff --git a/roles/fail2ban/handlers/main.yml b/roles/fail2ban/handlers/main.yml
index 191b0cc20b4f9a73bc30da5921c9a6c6f950adfd..6f6780a1d6d7ce7674b0334f22144ad34db9376a 100644
--- a/roles/fail2ban/handlers/main.yml
+++ b/roles/fail2ban/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart fail2ban
   ansible.builtin.systemd:
     name: fail2ban
     state: restarted
-
-...
diff --git a/roles/fail2ban/tasks/main.yml b/roles/fail2ban/tasks/main.yml
index f64a9161730e232e2191338a18a6c1df5ff7fa7f..097142d649312f502046533d853ff0df1e570fa5 100644
--- a/roles/fail2ban/tasks/main.yml
+++ b/roles/fail2ban/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -15,12 +14,10 @@
   ansible.builtin.template:
     src: jail.local.j2
     dest: /etc/fail2ban/jail.local
-    mode: '644'
+    mode: "644"
 
 - name: service
   ansible.builtin.systemd:
     name: fail2ban
     enabled: true
     state: started
-
-...
diff --git a/roles/ferm-configure/defaults/main.yml b/roles/ferm-configure/defaults/main.yml
index 947f9f5b836d280a446994e0ed3642774eeeeac2..133033a39a1099009770bc827000fdd736a53726 100644
--- a/roles/ferm-configure/defaults/main.yml
+++ b/roles/ferm-configure/defaults/main.yml
@@ -1,19 +1,13 @@
 ---
-
 # filename into which rules will be written
 # /etc/ferm/{ferm|input|output|forward}.d/<filename>.conf
 ferm_rules_filename: default
 
 # input rule
 ferm_input_rules: []
-
 # ouput rule
 ferm_output_rules: []
-
 # forward rule
 ferm_forward_rules: []
-
 # global settings to be put in ferm.d directory
 ferm_global_settings:
-
-...
diff --git a/roles/ferm-configure/handlers/main.yml b/roles/ferm-configure/handlers/main.yml
index 13856b0370876966bbcd8ac4a5625ff53e26bd9d..da492c2d05d150b0765392b8fcff32813d02db62 100644
--- a/roles/ferm-configure/handlers/main.yml
+++ b/roles/ferm-configure/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: restart ferm
   when: ansible_facts.services['ferm.service'] is defined
   ansible.builtin.systemd:
@@ -11,4 +10,3 @@
   ansible.builtin.systemd:
     name: fail2ban
     state: started
-...
diff --git a/roles/ferm-configure/tasks/main.yml b/roles/ferm-configure/tasks/main.yml
index eb141341250c586cc1329a9cf297ce188e54a8cd..0fc98bf57df2e9759fa7f70d3025706e6c5bcf59 100644
--- a/roles/ferm-configure/tasks/main.yml
+++ b/roles/ferm-configure/tasks/main.yml
@@ -1,8 +1,6 @@
 ---
-
 - name: populate service facts
   ansible.builtin.service_facts:
-
 - name: directories
   loop:
     - /etc/ferm/ferm.d
@@ -12,7 +10,7 @@
   ansible.builtin.file:
     path: "{{ item }}"
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: global
   when: ferm_global_settings | d(false)
@@ -22,7 +20,7 @@
   ansible.builtin.copy:
     dest: /etc/ferm/ferm.d/{{ ferm_rules_filename }}.conf
     content: "{{ ferm_global_settings }}"
-    mode: '644'
+    mode: "644"
 
 - name: input
   when: ferm_input_rules | length > 0
@@ -32,7 +30,7 @@
   ansible.builtin.template:
     src: ferm_rules_input.conf.j2
     dest: /etc/ferm/input.d/{{ ferm_rules_filename }}.conf
-    mode: '644'
+    mode: "644"
 
 - name: output
   when: ferm_output_rules | length > 0
@@ -42,7 +40,7 @@
   ansible.builtin.template:
     src: ferm_rules_output.conf.j2
     dest: /etc/ferm/output.d/{{ ferm_rules_filename }}.conf
-    mode: '644'
+    mode: "644"
 
 - name: forward
   when: ferm_forward_rules | length > 0
@@ -52,6 +50,4 @@
   ansible.builtin.template:
     src: ferm_rules_forward.conf.j2
     dest: /etc/ferm/forward.d/{{ ferm_rules_filename }}.conf
-    mode: '644'
-
-...
+    mode: "644"
diff --git a/roles/ferm-install/defaults/main.yml b/roles/ferm-install/defaults/main.yml
index fc3d06fb67fb0d3a1e787489a6f907e0d274b26e..c1a2181eb47ee452b599acb657f8e2ec495473cc 100644
--- a/roles/ferm-install/defaults/main.yml
+++ b/roles/ferm-install/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 # packages to install
 ferm_packages:
   - ferm
@@ -21,5 +20,3 @@ ferm_forward_log_prefix: "{{ ferm_forward_policy }} FORWARD "
 
 # enable anti-lockout rule
 ferm_antilockout_enabled: true
-
-...
diff --git a/roles/ferm-install/handlers/main.yml b/roles/ferm-install/handlers/main.yml
index cd10766d5ab5821f8baeee93e742eadd9ef0d6ab..956196934a5b26c10ace07b475411f8a78415f1c 100644
--- a/roles/ferm-install/handlers/main.yml
+++ b/roles/ferm-install/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart ferm
   ansible.builtin.systemd:
     name: ferm
     state: restarted
-
-...
diff --git a/roles/ferm-install/tasks/main.yml b/roles/ferm-install/tasks/main.yml
index 5ec0a4f68cbea2ccf4152629b2ea8b1dc21d98fd..309839233414d99686f1cf7a2970e75be33b8b59 100644
--- a/roles/ferm-install/tasks/main.yml
+++ b/roles/ferm-install/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -22,7 +21,7 @@
     src: ferm.conf.j2
     dest: /etc/ferm/ferm.conf
     backup: true
-    mode: '644'
+    mode: "644"
 
 - name: service
   ansible.builtin.systemd:
@@ -30,5 +29,3 @@
     enabled: true
     masked: false
     state: started
-
-...
diff --git a/roles/haproxy/defaults/main.yml b/roles/haproxy/defaults/main.yml
index 8057e778a2357f77e860c751d647a43c4cc51f46..0cc08fbe01e9a7ce220a191f20d9417f208ddd4b 100644
--- a/roles/haproxy/defaults/main.yml
+++ b/roles/haproxy/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 hap_packages:
   - haproxy
   - rsyslog
@@ -7,49 +6,47 @@ hap_packages:
 
 hap_config: /etc/haproxy/haproxy.cfg
 
-hap_config_global: |2
-    log /dev/log    local0 warning
-    log /dev/log    local1 warning
-    chroot /var/lib/haproxy
-    stats socket /run/haproxy/admin.sock mode 664 level admin
-    stats timeout 30s
-    user haproxy
-    group haproxy
-    daemon
+hap_config_global: |
+  log /dev/log    local0 warning
+  log /dev/log    local1 warning
+  chroot /var/lib/haproxy
+  stats socket /run/haproxy/admin.sock mode 664 level admin
+  stats timeout 30s
+  user haproxy
+  group haproxy
+  daemon
 
-hap_config_defaults: |2
-    log global
-    mode tcp
-    balance leastconn
-    retries 2
-    timeout tunnel 30m
-    timeout client 30m
-    timeout connect 5s
-    timeout server 30m
-    timeout check 15s
-    option tcplog
-    option tcpka
-    option clitcpka
-    option srvtcpka
+hap_config_defaults: |
+  log global
+  mode tcp
+  balance leastconn
+  retries 2
+  timeout tunnel 30m
+  timeout client 30m
+  timeout connect 5s
+  timeout server 30m
+  timeout check 15s
+  option tcplog
+  option tcpka
+  option clitcpka
+  option srvtcpka
 
 hap_stats_enabled: false
-hap_config_stats: |2
-    bind :9000
-    mode http
-    stats enable
-    stats uri /
-    stats auth admin:password
+hap_config_stats: |
+  bind :9000
+  mode http
+  stats enable
+  stats uri /
+  stats auth admin:password
 
 hap_config_listen:
   - name: pgsql-primary
-    content: |2
-        bind :54321
-        default-server inter 2s fall 3 rise 2 on-marked-down shutdown-sessions
-        option tcp-check
-        tcp-check expect string primary
-        maxconn 500
-        server {{ groups['postgres'][0] }} {{ hostvars[groups['postgres'][0]]['ansible_default_ipv4']['address'] }}:5432 maxconn 500 check port 8543
-        server {{ groups['postgres'][1] }} {{ hostvars[groups['postgres'][1]]['ansible_default_ipv4']['address'] }}:5432 maxconn 500 check port 8543 backup
+    content: |
+      bind :54321
+      default-server inter 2s fall 3 rise 2 on-marked-down shutdown-sessions
+      option tcp-check
+      tcp-check expect string primary
+      maxconn 500
+      server {{ groups['postgres'][0] }} {{ hostvars[groups['postgres'][0]]['ansible_default_ipv4']['address'] }}:5432 maxconn 500 check port 8543
+      server {{ groups['postgres'][1] }} {{ hostvars[groups['postgres'][1]]['ansible_default_ipv4']['address'] }}:5432 maxconn 500 check port 8543 backup
 # hap_config_listen: []
-
-...
diff --git a/roles/haproxy/handlers/main.yml b/roles/haproxy/handlers/main.yml
index 8c0e406861b2c586a396f64d18f3a4d00ca20f04..f7073fe4a8af1e38d27b46592a694f7fc38cac56 100644
--- a/roles/haproxy/handlers/main.yml
+++ b/roles/haproxy/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: reload haproxy
   ansible.builtin.systemd:
     name: haproxy
     state: reloaded
-
-...
diff --git a/roles/haproxy/tasks/main.yml b/roles/haproxy/tasks/main.yml
index f610bcc37b2d1b8eb705899d8d52f0f635221cb6..d39595fb0601c6d916b4f3307e28da17baed2dfb 100644
--- a/roles/haproxy/tasks/main.yml
+++ b/roles/haproxy/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -15,8 +14,6 @@
     src: haproxy.cfg.j2
     dest: /etc/haproxy/haproxy.cfg
     backup: true
-    mode: '644'
-
-- ansible.builtin.meta: flush_handlers  # noqa unnamed-task
+    mode: "644"
 
-...
+- ansible.builtin.meta: flush_handlers # noqa name[missing]
diff --git a/roles/init/defaults/main.yml b/roles/init/defaults/main.yml
index d140d408264d82ff19e5703844a0e95a76e715db..51d63db7d93611705a3119fc5ca0ecf60cdc6859 100644
--- a/roles/init/defaults/main.yml
+++ b/roles/init/defaults/main.yml
@@ -1,9 +1,6 @@
 ---
-
 init_packages:
   - apt-utils
   - gnupg
   - ssh-client
   - sudo
-
-...
diff --git a/roles/init/tasks/main.yml b/roles/init/tasks/main.yml
index 5f4c3895a3216e64cf00118d6d4f4be7d79480c3..bf0cede5de41ff009a3e8acf55e6a48d010e206d 100644
--- a/roles/init/tasks/main.yml
+++ b/roles/init/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install initial packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -14,5 +13,3 @@
   ansible.builtin.include_role:
     name: proxy
     allow_duplicates: true
-
-...
diff --git a/roles/letsencrypt/defaults/main.yml b/roles/letsencrypt/defaults/main.yml
index f38c2f2dd5c4d484ad695896b24d698442bc145d..adf9796366aabc4493536c34aa28db5e5bd89b9f 100644
--- a/roles/letsencrypt/defaults/main.yml
+++ b/roles/letsencrypt/defaults/main.yml
@@ -1,8 +1,5 @@
 ---
-
 letsencrypt_domains: []
 letsencrypt_webroot: /var/www/letsencrypt
 letsencrypt_email: sysadmin@ubicast.eu
 letsencrypt_testing: false
-
-...
diff --git a/roles/letsencrypt/handlers/main.yml b/roles/letsencrypt/handlers/main.yml
index 8a97cff137ee67aeb5dd79f2058fa75ba8a28f00..06947324ccfdfd28f5795805cbd51f9526f16624 100644
--- a/roles/letsencrypt/handlers/main.yml
+++ b/roles/letsencrypt/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart nginx
   ansible.builtin.service:
     name: nginx
     state: restarted
-
-...
diff --git a/roles/letsencrypt/tasks/main.yml b/roles/letsencrypt/tasks/main.yml
index 97bb2fceb32e96971eb3403dbe106243f9399629..ee110a4f9f79e6dadd185b18d075a5ba6fa4f8a3 100644
--- a/roles/letsencrypt/tasks/main.yml
+++ b/roles/letsencrypt/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install certbot
   ansible.builtin.package:
     force_apt_get: true
@@ -29,19 +28,19 @@
       {% for domain in letsencrypt_domains %}
       {{ domain }}
       {% endfor %}
-    mode: '644'
+    mode: "644"
 
 - name: create webroot directory
   ansible.builtin.file:
     path: "{{ letsencrypt_webroot }}"
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: create pre hook directory
   ansible.builtin.file:
     path: /etc/letsencrypt/renewal-hooks/pre
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: create pre hook script
   ansible.builtin.copy:
@@ -57,7 +56,7 @@
   ansible.builtin.file:
     path: /etc/letsencrypt/renewal-hooks/deploy
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: create deploy hook script
   ansible.builtin.copy:
@@ -65,7 +64,6 @@
     mode: 0755
     content: |
       #!/usr/bin/env bash
-      nginx -t > /dev/null 2>&1
       systemctl reload nginx
 
 - name: test generate certificates
@@ -92,7 +90,6 @@
 - name: exit in case of failure
   when: letsencrypt_dry_run is failed
   ansible.builtin.fail:
-
 - name: generate certificates
   notify: restart nginx
   when:
@@ -116,8 +113,8 @@
   notify: restart nginx
   ansible.builtin.lineinfile:
     path: /etc/nginx/conf.d/ssl_certificate.conf
-    regexp: 'ssl_certificate\s+([\w/\-\_\.]+);'
-    line: 'ssl_certificate /etc/letsencrypt/live/{{ letsencrypt_domains[0] }}/fullchain.pem;'
+    regexp: ssl_certificate\s+([\w/\-\_\.]+);
+    line: ssl_certificate /etc/letsencrypt/live/{{ letsencrypt_domains[0] }}/fullchain.pem;
 
 - name: update nginx certificate key configuration
   when:
@@ -127,7 +124,5 @@
   notify: restart nginx
   ansible.builtin.lineinfile:
     path: /etc/nginx/conf.d/ssl_certificate.conf
-    regexp: 'ssl_certificate_key\s+([\w/\-\_\.]+);'
-    line: 'ssl_certificate_key /etc/letsencrypt/live/{{ letsencrypt_domains[0] }}/privkey.pem;'
-
-...
+    regexp: ssl_certificate_key\s+([\w/\-\_\.]+);
+    line: ssl_certificate_key /etc/letsencrypt/live/{{ letsencrypt_domains[0] }}/privkey.pem;
diff --git a/roles/live/defaults/main.yml b/roles/live/defaults/main.yml
index 8f2c2a8f326f10e86059208a7369805bcc6902a9..4fddcbaa4f120d39580f42e80867d81fcc82b4e1 100644
--- a/roles/live/defaults/main.yml
+++ b/roles/live/defaults/main.yml
@@ -1,8 +1,5 @@
 ---
-
 debian_packages:
   - ubicast-live
 
 role_tmpfs_size: "{{ tmpfs_size | d('2048m') }}"
-
-...
diff --git a/roles/live/handlers/main.yml b/roles/live/handlers/main.yml
index b0abf18b3894fe1af147e0ca063f8e9e9a913d58..abb7b115ab9c7d835c04a6320b219dd71fe3f789 100644
--- a/roles/live/handlers/main.yml
+++ b/roles/live/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart nginx
   ansible.builtin.systemd:
     name: nginx
     state: restarted
-
-...
diff --git a/roles/live/tasks/main.yml b/roles/live/tasks/main.yml
index 3d3c1d1c9591d597c6dcb3ff7c35f9f697e6f6b7..743d5e92560a21717d8521870f1e5d6af7a7e002 100644
--- a/roles/live/tasks/main.yml
+++ b/roles/live/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: Live packages installation
   ansible.builtin.apt:
     force_apt_get: true
@@ -14,9 +13,9 @@
   notify: restart nginx
   mount:
     path: /var/tmp/nginx-rtmp
-    src: 'tmpfs'
+    src: tmpfs
     fstype: tmpfs
-    opts: 'defaults,size={{ role_tmpfs_size }}'
+    opts: defaults,size={{ role_tmpfs_size }}
     state: mounted
 
 - name: Changing the rights on the TMPFS directory
@@ -25,6 +24,4 @@
     path: /var/tmp/nginx-rtmp
     owner: nginx
     group: root
-    mode: '0700'
-
-...
+    mode: "0700"
diff --git a/roles/lxc/defaults/main.yml b/roles/lxc/defaults/main.yml
index 79c76c6d54abde0fc369b920884eb625447d8387..d77c199eba13ee6a2f30f0ad26fabb6ffbccf654 100644
--- a/roles/lxc/defaults/main.yml
+++ b/roles/lxc/defaults/main.yml
@@ -1,8 +1,5 @@
 ---
-
 # lxc_network_type possible value:
 #  - masquerade_bridge => independent private bridge
 #  - host_bridge => host shared network bridge
-lxc_network_type: 'masquerade_bridge'
-
-...
+lxc_network_type: masquerade_bridge
diff --git a/roles/lxc/handlers/main.yml b/roles/lxc/handlers/main.yml
index a237a6dbb4186d659b0a05546ba1bc5201a077ef..f5bddc8da042bca2d144ef8e30bd6f917d5e7eee 100644
--- a/roles/lxc/handlers/main.yml
+++ b/roles/lxc/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: restart lxc
   ansible.builtin.systemd:
     name: lxc
@@ -11,5 +10,3 @@
   ansible.builtin.systemd:
     name: lxc-net
     state: restarted
-
-...
diff --git a/roles/lxc/tasks/main.yml b/roles/lxc/tasks/main.yml
index 64e69613709438f10a58145d3e1427711b3d321b..168a3607ba2c674de556e284ae131183198837a9 100644
--- a/roles/lxc/tasks/main.yml
+++ b/roles/lxc/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: Masquerade bridge configuration
   block:
     - name: Ask confirmation
@@ -15,10 +14,9 @@
       register: confirm_continue
       no_log: true
 
-    - name: 'check parm is null or invalid'
+    - name: check parm is null or invalid
       ansible.builtin.fail: msg='Installation aborted'
-      when: not ((confirm_continue.user_input | bool)
-            or (confirm_continue.user_input | length == 0))
+      when: not ((confirm_continue.user_input | bool) or (confirm_continue.user_input | length == 0))
   when: lxc_network_type == 'host_bridge'
 
 - name: LXC packages installation
@@ -38,7 +36,7 @@
   ansible.builtin.template:
     src: lxc-default.j2
     dest: /etc/lxc/default.conf
-    mode: '644'
+    mode: "644"
 
 - name: Masquerade bridge configuration
   block:
@@ -47,7 +45,5 @@
       ansible.builtin.template:
         src: lxc-net.j2
         dest: /etc/default/lxc-net
-        mode: '644'
+        mode: "644"
   when: lxc_network_type == 'masquerade_bridge'
-
-...
diff --git a/roles/mediacache/defaults/main.yml b/roles/mediacache/defaults/main.yml
index fce7af4ed375d7f5c5be8055f06f4f5149e731d9..f4e4fa060158a0cffc5dda8b9b6d98beac055b07 100644
--- a/roles/mediacache/defaults/main.yml
+++ b/roles/mediacache/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 # MediaCache data folder - for VOD
 role_mc_vod_folder: "{{ mediacache_vod_folder | d('/var/cache/nginx/mediacache-vod') }}"
 # MediaCache size in Gb - for VOD
@@ -9,5 +8,3 @@ role_mc_vod_size: "{{ mediacache_vod_size }}"
 role_mc_live_folder: "{{ mediacache_live_folder | d('/var/cache/nginx/mediacache-live') }}"
 # MediaCache size in Gb - for live
 role_mc_live_size: "{{ mediacache_live_size | d('1') }}"
-
-...
diff --git a/roles/mediacache/handlers/main.yml b/roles/mediacache/handlers/main.yml
index b0abf18b3894fe1af147e0ca063f8e9e9a913d58..abb7b115ab9c7d835c04a6320b219dd71fe3f789 100644
--- a/roles/mediacache/handlers/main.yml
+++ b/roles/mediacache/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart nginx
   ansible.builtin.systemd:
     name: nginx
     state: restarted
-
-...
diff --git a/roles/mediacache/meta/main.yml b/roles/mediacache/meta/main.yml
index 9c7711bb55dd2f0f0059aad32d2ac8d0f2050997..dffb451ec1574450eec462a7f3d53003c2137d22 100644
--- a/roles/mediacache/meta/main.yml
+++ b/roles/mediacache/meta/main.yml
@@ -1,7 +1,4 @@
 ---
-
 dependencies:
   - role: base
   - role: nginx
-
-...
diff --git a/roles/mediacache/tasks/main.yml b/roles/mediacache/tasks/main.yml
index c4c7a0665ce5ca2883a82798d409557d82ba1a81..0517815229945a1ab043efc0134cab72aa9effb4 100644
--- a/roles/mediacache/tasks/main.yml
+++ b/roles/mediacache/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: MediaCache packages installation
   ansible.builtin.apt:
     force_apt_get: true
@@ -13,73 +12,71 @@
 - name: resolve domain name to localhost
   ansible.builtin.lineinfile:
     path: /etc/hosts
-    line: '127.0.1.1 {{ mediacache_url }}'
+    line: 127.0.1.1 {{ mediacache_url }}
     backup: true
 
 - name: create mediacache VOD data directory
   ansible.builtin.file:
-    dest: '{{ role_mc_vod_folder }}'
+    dest: "{{ role_mc_vod_folder }}"
     state: directory
     owner: nginx
     group: root
-    mode: '0700'
+    mode: "0700"
 
 - name: create mediacache live data directory
   ansible.builtin.file:
-    dest: '{{ role_mc_live_folder }}'
+    dest: "{{ role_mc_live_folder }}"
     state: directory
     owner: nginx
     group: root
-    mode: '0700'
+    mode: "0700"
   when: live_url is defined
 
 - name: fill the vhost file
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/nginx/sites-available/mediacache.conf
-    regexp: '^(\s+server_name)\s+.*(;)$'
-    replace: '\1 {{ mediacache_url }}\2'
+    regexp: ^(\s+server_name)\s+.*(;)$
+    replace: \1 {{ mediacache_url }}\2
 
 - name: fill the mediacache zones file - VOD folder
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-zones.conf
-    regexp: '/var/cache/nginx/mediacache-vod'
-    replace: '{{ role_mc_vod_folder }}'
+    regexp: /var/cache/nginx/mediacache-vod
+    replace: "{{ role_mc_vod_folder }}"
 
 - name: fill the mediacache zones file - Live folder
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-zones.conf
-    regexp: '/var/cache/nginx/mediacache-live'
-    replace: '{{ role_mc_live_folder }}'
+    regexp: /var/cache/nginx/mediacache-live
+    replace: "{{ role_mc_live_folder }}"
 
 - name: fill the mediacache zones file - VOD folder size
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-zones.conf
-    regexp: '(?P<key>keys_zone=mediacache-vod.*max_size=).*(?P<unit>g)'
-    replace: '\g<key>{{ role_mc_vod_size }}\g<unit>'
+    regexp: (?P<key>keys_zone=mediacache-vod.*max_size=).*(?P<unit>g)
+    replace: \g<key>{{ role_mc_vod_size }}\g<unit>
 
 - name: fill the mediacache zones file - Live folder size
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-zones.conf
-    regexp: '(?P<key>keys_zone=mediacache-live.*max_size=).*(?P<unit>g)'
-    replace: '\g<key>{{ role_mc_live_size }}\g<unit>'
+    regexp: (?P<key>keys_zone=mediacache-live.*max_size=).*(?P<unit>g)
+    replace: \g<key>{{ role_mc_live_size }}\g<unit>
 
 - name: fill the nginx VOD proxypass
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-proxy-mediaserver.conf
-    regexp: '^(proxy_pass)\s+.*(;)$'
-    replace: '\1 https://{{ ms_url }}\2'
+    regexp: ^(proxy_pass)\s+.*(;)$
+    replace: \1 https://{{ ms_url }}\2
 
 - name: fill the nginx Live proxypass
   notify: restart nginx
   ansible.builtin.replace:
     path: /etc/mediacache/nginx-proxy-live.conf
-    regexp: '^(proxy_pass)\s+.*(;)$'
-    replace: '\1 https://{{ live_url }}\2'
-
-...
+    regexp: ^(proxy_pass)\s+.*(;)$
+    replace: \1 https://{{ live_url }}\2
diff --git a/roles/mediacache/vars/main.yml b/roles/mediacache/vars/main.yml
index dc62c20bdd01e84c20e66b16efb00a0767671957..80fb5049e456f3164423ffc851ce96a715373932 100644
--- a/roles/mediacache/vars/main.yml
+++ b/roles/mediacache/vars/main.yml
@@ -1,6 +1,3 @@
 ---
-
 debian_packages:
   - ubicast-mediacache
-
-...
diff --git a/roles/mediaimport/defaults/main.yml b/roles/mediaimport/defaults/main.yml
index 6e90ebc7e208eaa66fa5e3e63fadedc352d99d9a..b5a32e37856b0d4d2ebdb2eb4869871595663b73 100644
--- a/roles/mediaimport/defaults/main.yml
+++ b/roles/mediaimport/defaults/main.yml
@@ -1,14 +1,13 @@
 ---
-
 mediaimport_users:
   - name: "{{ envsetup_mediaimport_user | d() }}"
     passwd: "{{ envsetup_mediaimport_password | d() }}"
 
 mediaimport_packages:
   - openssh-server
-  - mysecureshell  # unreleased version that includes CallbackUpload, packaged by UbiCast
-  - pure-ftpd  # must be installed before ubicast-mediaimport so that config can be tweaked by the postinst
-  - ssl-cert  # for optionnal FTPS support (the mediaimport postinst will will use the snakeoil certificate for pure-ftpd)
+  - mysecureshell # unreleased version that includes CallbackUpload, packaged by UbiCast
+  - pure-ftpd # must be installed before ubicast-mediaimport so that config can be tweaked by the postinst
+  - ssl-cert # for optionnal FTPS support (the mediaimport postinst will will use the snakeoil certificate for pure-ftpd)
   - ubicast-mediaimport
 
 mediaimport_ms_api_key: "{{ envsetup_ms_api_key | d() }}"
@@ -28,5 +27,3 @@ mediaimport_ferm_input_rules:
     helper: ftp
 mediaimport_ferm_output_rules: []
 mediaimport_ferm_global_settings:
-
-...
diff --git a/roles/mediaimport/handlers/main.yml b/roles/mediaimport/handlers/main.yml
index 60e664ba04be31637362603611e5959415a35aff..3da4ab3289c646dd1aa2f778239882d0915d9cac 100644
--- a/roles/mediaimport/handlers/main.yml
+++ b/roles/mediaimport/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: reload systemd
   ansible.builtin.systemd:
     daemon_reload: true
@@ -28,5 +27,3 @@
   ansible.builtin.systemd:
     name: fail2ban
     state: restarted
-
-...
diff --git a/roles/mediaimport/meta/main.yml b/roles/mediaimport/meta/main.yml
index e45d692ae3567f856967cd6f66c91d13e2e94e4e..471eb65e53129fb19aa41af0c049719b93c0ef91 100644
--- a/roles/mediaimport/meta/main.yml
+++ b/roles/mediaimport/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: base
-
-...
diff --git a/roles/mediaimport/tasks/main.yml b/roles/mediaimport/tasks/main.yml
index e432c46903bf514622cfc2bbfe35e0f9615811ed..12cb50c3a9586ae7981e7b6161ab010371ef6bb0 100644
--- a/roles/mediaimport/tasks/main.yml
+++ b/roles/mediaimport/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install packages
   ansible.builtin.package:
     force_apt_get: true
@@ -12,7 +11,7 @@
   notify: restart sshd
   ansible.builtin.replace:
     dest: /etc/ssh/sshd_config
-    regexp: "^PasswordAuthentication no"
+    regexp: ^PasswordAuthentication no
     replace: "#PasswordAuthentication yes"
 
 - name: enable mediaimport service
@@ -61,5 +60,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/mediaserver/defaults/main.yml b/roles/mediaserver/defaults/main.yml
index ba8a4045e04af981f751a497019a07a6a39c93fb..40c0d198401f8687d57ee22a2c7d8fb1703e53d0 100644
--- a/roles/mediaserver/defaults/main.yml
+++ b/roles/mediaserver/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 server_packages:
   - postgresql-client
   - cron
@@ -8,7 +7,7 @@ server_packages:
   - postfix
   - ubicast-mediaserver
 
-server_default_email_sender: "noreply@{{ server_hostname }}"
+server_default_email_sender: noreply@{{ server_hostname }}
 server_email_sender: "{{ envsetup_email_sender | default(server_default_email_sender, true) }}"
 
 server_id: "{{ envsetup_ms_id | d() }}"
@@ -42,7 +41,4 @@ server_ferm_input_rules:
       - 443
 server_ferm_output_rules: []
 server_ferm_global_settings:
-
-real_ip_from: ""  # default for OVH is 10.108.0.0/14
-
-...
+real_ip_from: "" # default for OVH is 10.108.0.0/14
diff --git a/roles/mediaserver/handlers/main.yml b/roles/mediaserver/handlers/main.yml
index 7013e6027ecc094f6e42a6c5b309d3be380a8633..93ab5aeaa3365ff2dcec9f40450b8b6360775dbd 100644
--- a/roles/mediaserver/handlers/main.yml
+++ b/roles/mediaserver/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: mscontroller restart
   ansible.builtin.command:
     cmd: mscontroller.py restart
@@ -18,5 +17,3 @@
   ansible.builtin.systemd:
     name: systemd-sysusers
     state: restarted
-
-...
diff --git a/roles/mediaserver/meta/main.yml b/roles/mediaserver/meta/main.yml
index 0cab502e411a1f22fa6c250879e80586ff6c5698..4d40ad38c5820de6e0de9766693fa0cd21f0a1fe 100644
--- a/roles/mediaserver/meta/main.yml
+++ b/roles/mediaserver/meta/main.yml
@@ -1,9 +1,6 @@
 ---
-
 dependencies:
   - role: base
   - role: nginx
-  - when: "groups['postgres'] | length > 1"
+  - when: groups['postgres'] | length > 1
     role: haproxy
-
-...
diff --git a/roles/mediaserver/tasks/main.yml b/roles/mediaserver/tasks/main.yml
index e7b3beefce4478dd7555a8be7fb74e0dbf1e3016..22518d385ebe4ef70d153281abe1ba0bceca3def 100644
--- a/roles/mediaserver/tasks/main.yml
+++ b/roles/mediaserver/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: mediaserver install
   ansible.builtin.apt:
     force_apt_get: true
@@ -30,26 +29,26 @@
 - name: Update the MS configuration with the celerity server IP
   ansible.builtin.lineinfile:
     path: /etc/mediaserver/msconf.py
-    regexp: '^CELERITY_SERVER_URL = '
-    line: "CELERITY_SERVER_URL = 'https://{{ server_celerity_server_url }}:6200'"
+    regexp: "^CELERITY_SERVER_URL = "
+    line: CELERITY_SERVER_URL = 'https://{{ server_celerity_server_url }}:6200'
     create: true
     owner: root
     group: root
     # 644 as all the instances must reach this file
     # The instances cannot be in a common group as of now => https://redmine.ubicast.net/issues/33046
-    mode: '0644'
+    mode: "0644"
 
 - name: Update the MS configuration with the celerity server secret
   ansible.builtin.lineinfile:
     path: /etc/mediaserver/msconf.py
-    regexp: '^CELERITY_SIGNING_KEY = '
-    line: "CELERITY_SIGNING_KEY = '{{ server_celerity_signing_key }}'"
+    regexp: "^CELERITY_SIGNING_KEY = "
+    line: CELERITY_SIGNING_KEY = '{{ server_celerity_signing_key }}'
     create: true
     owner: root
     group: root
     # 644 as all the instances must reach this file
     # The instances cannot be in a common group as of now => https://redmine.ubicast.net/issues/33046
-    mode: '0644'
+    mode: "0644"
 
 - name: create instances
   loop: "{{ server_instances }}"
@@ -69,9 +68,9 @@
     creates: /etc/nginx/sites-available/mediaserver-{{ item.name }}.conf
   throttle: 1
 
-- name: synchronize configuration between servers  # noqa 303
+- name: synchronize configuration between servers # noqa command-instead-of-module
   # Cannot use the ansible synchronization module, cause there is no way to set a destination IP intead of the destination ansible hostname
-  # noqa 303 = warn to use the synchronization module instead of rsync in the command module
+  # noqa command-instead-of-module = warn to use the synchronization module instead of rsync in the command module
   when:
     - groups['mediaserver'] | length > 1
     - inventory_hostname != groups['mediaserver'][0]
@@ -96,9 +95,9 @@
   changed_when: false
   tags: mediaserver-synchronize
 
-- name: synchronize letsencrypt configuration between servers  # noqa 303
+- name: synchronize letsencrypt configuration between servers # noqa command-instead-of-module
   # Cannot use the ansible synchronization module, cause there is no way to set a destination IP intead of the destination ansible hostname
-  # noqa 303 = warn to use the synchronization module instead of rsync in the command module
+  # noqa command-instead-of-module = warn to use the synchronization module instead of rsync in the command module
   when:
     - groups['mediaserver'] | length > 1
     - inventory_hostname != groups['mediaserver'][0]
@@ -128,9 +127,9 @@
     group: root
     # 644 as all the instances must reach this file
     # The instances cannot be in a common group as of now => https://redmine.ubicast.net/issues/33046
-    mode: '0644'
-    regexp: '^#? ?DEFAULT_FROM_EMAIL.*'
-    line: "DEFAULT_FROM_EMAIL = '{{ server_email_sender }}'"
+    mode: "0644"
+    regexp: ^#? ?DEFAULT_FROM_EMAIL.*
+    line: DEFAULT_FROM_EMAIL = '{{ server_email_sender }}'
     validate: python3 -m py_compile %s
 
 - name: configure domain name in nginx conf
@@ -138,8 +137,8 @@
   loop: "{{ server_instances }}"
   ansible.builtin.replace:
     path: /etc/nginx/sites-available/mediaserver-{{ item.name }}.conf
-    regexp: '^(\s*server_name).*;$'
-    replace: '\1 {{ item.ms_server_name }};'
+    regexp: ^(\s*server_name).*;$
+    replace: \1 {{ item.ms_server_name }};
     backup: true
 
 - name: configure domain name in database
@@ -168,7 +167,7 @@
   ansible.builtin.template:
     src: realip.conf.j2
     dest: /etc/nginx/conf.d/realip.conf
-    mode: '644'
+    mode: "644"
 
 - name: ensure mediaserver is running
   ansible.builtin.service:
@@ -190,5 +189,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/mediavault/defaults/main.yml b/roles/mediavault/defaults/main.yml
index 3333bb11890723ef2facb82f26e3c2a657a3237d..62505f147c614e47628d289de48fb85e6994daf5 100644
--- a/roles/mediavault/defaults/main.yml
+++ b/roles/mediavault/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 mvt_packages:
   - ubicast-mediavault
 
@@ -20,5 +19,3 @@ mvt_ferm_output_rules:
     dport:
       - 22
 mvt_ferm_global_settings:
-
-...
diff --git a/roles/mediavault/meta/main.yml b/roles/mediavault/meta/main.yml
index e45d692ae3567f856967cd6f66c91d13e2e94e4e..471eb65e53129fb19aa41af0c049719b93c0ef91 100644
--- a/roles/mediavault/meta/main.yml
+++ b/roles/mediavault/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: base
-
-...
diff --git a/roles/mediavault/tasks/mailer.yml b/roles/mediavault/tasks/mailer.yml
index 37ffc09ca4871eb2191a382d70cf6af22bb3e7e2..0bdfc28d622ff129ad98de655bb605c747904d05 100644
--- a/roles/mediavault/tasks/mailer.yml
+++ b/roles/mediavault/tasks/mailer.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: create mailer script
   when: mvt_mailer_enabled
   ansible.builtin.template:
@@ -13,6 +12,4 @@
   ansible.builtin.template:
     src: systemd-mailer-service.j2
     dest: "{{ mvt_mailer_service_path }}"
-    mode: '644'
-
-...
+    mode: "644"
diff --git a/roles/mediavault/tasks/main.yml b/roles/mediavault/tasks/main.yml
index 76d6020b360082d5b37953dbefd0947a7f6f287f..fcb7fe4ded173077f29cbb99e38370472f0ea558 100644
--- a/roles/mediavault/tasks/main.yml
+++ b/roles/mediavault/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install packages
   ansible.builtin.package:
     force_apt_get: true
@@ -28,6 +27,4 @@
   ansible.builtin.include_role:
     name: ferm-configure
 
-- ansible.builtin.meta: flush_handlers  # noqa unnamed-task
-
-...
+- ansible.builtin.meta: flush_handlers # noqa name[missing]
diff --git a/roles/mediaworker/defaults/main.yml b/roles/mediaworker/defaults/main.yml
index 3a35f11096680aae3c9527395f029fb63b24bc2e..6dfb346b6ff0d1ebcc8ce97aa2ad16c31489180e 100644
--- a/roles/mediaworker/defaults/main.yml
+++ b/roles/mediaworker/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 worker_celerity_signing_key: "{{ envsetup_celerity_signing_key | d('change-me', true) }}"
 worker_celerity_server: "{{ envsetup_celerity_server | d(envsetup_ms_server_name, true) | d('127.0.0.1', true) }}"
 
@@ -25,5 +24,3 @@ worker_ferm_output_rules:
     dport:
       - 6200
 worker_ferm_global_settings:
-
-...
diff --git a/roles/mediaworker/handlers/main.yml b/roles/mediaworker/handlers/main.yml
index 41c27f7ea08c5d4bb8dd25d6680cd02a9d477ad5..3e54611b77aaf83afac5e9990767cdd580125ebe 100644
--- a/roles/mediaworker/handlers/main.yml
+++ b/roles/mediaworker/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart celerity-workers
   ansible.builtin.service:
     name: celerity-workers
     state: restarted
-
-...
diff --git a/roles/mediaworker/meta/main.yml b/roles/mediaworker/meta/main.yml
index e45d692ae3567f856967cd6f66c91d13e2e94e4e..471eb65e53129fb19aa41af0c049719b93c0ef91 100644
--- a/roles/mediaworker/meta/main.yml
+++ b/roles/mediaworker/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: base
-
-...
diff --git a/roles/mediaworker/tasks/main.yml b/roles/mediaworker/tasks/main.yml
index d4282ed4524851fd805dbc0d51160d0770c83579..059bc8c7d6a1f9303758ea07c1c0f1060512a21c 100644
--- a/roles/mediaworker/tasks/main.yml
+++ b/roles/mediaworker/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install celerity worker
   ansible.builtin.apt:
     force_apt_get: true
@@ -14,7 +13,7 @@
   ansible.builtin.template:
     src: celerity-config.py.j2
     dest: /etc/celerity/config.py
-    mode: '644'
+    mode: "644"
 
 - name: ensure celerity worker is running
   ansible.builtin.service:
@@ -36,5 +35,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/metricbeat/defaults/main.yml b/roles/metricbeat/defaults/main.yml
index f3a01583e783f8e6d5b8617f9646486e79fb3713..5d388213b6a2e5240688a53ba5ef0bf0d4e49a95 100644
--- a/roles/metricbeat/defaults/main.yml
+++ b/roles/metricbeat/defaults/main.yml
@@ -2,5 +2,3 @@
 elastic_host: localhost
 elastic_port: 9200
 kibana_server_host: localhost
-
-...
diff --git a/roles/metricbeat/handlers/main.yml b/roles/metricbeat/handlers/main.yml
index 273514a5571eb631a32969e2b565d31ceef7ddee..5236104415be8b4c7a18d28b310504c462eb9549 100644
--- a/roles/metricbeat/handlers/main.yml
+++ b/roles/metricbeat/handlers/main.yml
@@ -3,5 +3,3 @@
   ansible.builtin.service:
     name: metricbeat
     state: restarted
-
-...
diff --git a/roles/metricbeat/tasks/main.yml b/roles/metricbeat/tasks/main.yml
index 95d966913771b8e3723d8ec1cc352b1d3dba2fd3..169e9b30d39c22e77de67614a47bb3ba5fd84434 100644
--- a/roles/metricbeat/tasks/main.yml
+++ b/roles/metricbeat/tasks/main.yml
@@ -32,7 +32,7 @@
   ansible.builtin.template:
     src: metricbeat.yml.j2
     dest: /etc/metricbeat/metricbeat.yml
-    mode: '644'
+    mode: "644"
   notify: restart metricbeat
 
 - name: enable metricbeat dashboard
@@ -43,7 +43,7 @@
   ansible.builtin.template:
     src: postgresql.yml.j2
     dest: /etc/metricbeat/modules.d/postgresql.yml
-    mode: '644'
+    mode: "644"
   when: "'postgres' in group_names"
   notify: restart metricbeat
 
@@ -52,5 +52,3 @@
     name: metricbeat
     enabled: true
     state: started
-
-...
diff --git a/roles/mirismanager/defaults/main.yml b/roles/mirismanager/defaults/main.yml
index 2f144db52681691d2869af195cca0d6de98e668f..17e07d35392635e07ce3fc9e5f8ef22e2c21954b 100644
--- a/roles/mirismanager/defaults/main.yml
+++ b/roles/mirismanager/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 # ubicast-skyreach-runtime packages: todo: manage the database in inventory (/!\ can be idenpendent of the MS database)
 dependencies_packages:
   - apt-cacher-ng
@@ -13,7 +12,7 @@ manager_packages:
 
 manager_testing: false
 manager_hostname: "{{ envsetup_cm_server_name | d('mirismanager', true) }}"
-manager_default_email_sender: "noreply@{{ manager_hostname }}"
+manager_default_email_sender: noreply@{{ manager_hostname }}
 manager_email_sender: "{{ envsetup_email_sender | default(manager_default_email_sender, true) }}"
 manager_proxy_http: "{{ envsetup_proxy_http | d() }}"
 
@@ -31,5 +30,3 @@ manager_ferm_input_rules:
       - 3142
 manager_ferm_output_rules: []
 manager_ferm_global_settings:
-
-...
diff --git a/roles/mirismanager/handlers/main.yml b/roles/mirismanager/handlers/main.yml
index 90192c9b7a29fbd47b74cb3f6522433505e13028..b84c28ab94874e8bac3cc52e4af671167add1c39 100644
--- a/roles/mirismanager/handlers/main.yml
+++ b/roles/mirismanager/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: restart nginx
   ansible.builtin.service:
     name: nginx
@@ -14,5 +13,3 @@
   ansible.builtin.service:
     name: apt-cacher-ng
     state: restarted
-
-...
diff --git a/roles/mirismanager/meta/main.yml b/roles/mirismanager/meta/main.yml
index c4cc4780bdd8beed15375c59565a39585cff6b22..9ab21e04b40be9b7a81c45464ae84dba2afe4d34 100644
--- a/roles/mirismanager/meta/main.yml
+++ b/roles/mirismanager/meta/main.yml
@@ -1,9 +1,6 @@
 ---
-
 dependencies:
   - role: base
   - role: nginx
   - when: "'postgres' in group_names"
     role: postgres
-
-...
diff --git a/roles/mirismanager/tasks/main.yml b/roles/mirismanager/tasks/main.yml
index 6e9506b90b7a8cc4a8a96734acf74b6786fef3ca..9e8c9f5e9aaebcb6315db39bfebf7096926ccec5 100644
--- a/roles/mirismanager/tasks/main.yml
+++ b/roles/mirismanager/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: mirismanager dependencies install
   ansible.builtin.apt:
     force_apt_get: true
@@ -29,8 +28,8 @@
   notify: restart skyreach
   ansible.builtin.lineinfile:
     path: /home/skyreach/skyreach_data/private/settings_override.py
-    regexp: '^#? ?DEFAULT_FROM_EMAIL.*'
-    line: "DEFAULT_FROM_EMAIL = '{{ manager_email_sender }}'"
+    regexp: ^#? ?DEFAULT_FROM_EMAIL.*
+    line: DEFAULT_FROM_EMAIL = '{{ manager_email_sender }}'
     backup: true
 
 - name: ensure skyreach is running
@@ -51,8 +50,8 @@
   notify: restart apt-cacher-ng
   ansible.builtin.lineinfile:
     path: /etc/apt-cacher-ng/acng.conf
-    regexp: '^Proxy: .*'
-    line: 'Proxy: {{ manager_proxy_http }}'
+    regexp: "^Proxy: .*"
+    line: "Proxy: {{ manager_proxy_http }}"
 
 - name: ensure apt-cacher-ng is running
   ansible.builtin.service:
@@ -74,5 +73,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/munin/msmonitor/defaults/main.yml b/roles/munin/msmonitor/defaults/main.yml
index 47019bc9ff066808741c1de15fd1bf2fdc81e125..f47297ab5b3e4db3fe9460cc97e2e4780b70568d 100644
--- a/roles/munin/msmonitor/defaults/main.yml
+++ b/roles/munin/msmonitor/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 monitor_shell_pwd: "{{ envsetup_monitor_shell_pwd | d() }}"
 monitor_admin_pwd: "{{ envsetup_monitor_admin_pwd | d() }}"
 monitor_superuser_pwd: "{{ envsetup_monitor_superuser_pwd | d() }}"
@@ -16,5 +15,3 @@ monitor_ferm_input_rules:
       - 443
 monitor_ferm_output_rules: []
 monitor_ferm_global_settings:
-
-...
diff --git a/roles/munin/msmonitor/handlers/main.yml b/roles/munin/msmonitor/handlers/main.yml
index 79ac1936cc627c8e403355aa0459de373f6f5cd8..06947324ccfdfd28f5795805cbd51f9526f16624 100644
--- a/roles/munin/msmonitor/handlers/main.yml
+++ b/roles/munin/msmonitor/handlers/main.yml
@@ -3,4 +3,3 @@
   ansible.builtin.service:
     name: nginx
     state: restarted
-...
diff --git a/roles/munin/msmonitor/meta/main.yml b/roles/munin/msmonitor/meta/main.yml
index 531dba176fd1cedcdbca88a1fd3d4495af8c8412..c56295711751b70094d2d899b083d9a1801d0742 100644
--- a/roles/munin/msmonitor/meta/main.yml
+++ b/roles/munin/msmonitor/meta/main.yml
@@ -1,11 +1,8 @@
 ---
-
 dependencies:
-  - role: conf           # get conf.sh
-  - role: init           # setup keys
-  - role: sysconfig      # setup repos
+  - role: conf # get conf.sh
+  - role: init # setup keys
+  - role: sysconfig # setup repos
   - role: ferm-install
   - role: ferm-configure
   - role: nginx
-
-...
diff --git a/roles/munin/msmonitor/tasks/main.yml b/roles/munin/msmonitor/tasks/main.yml
index 3251fd361ee65a84fc3e9780d1689fba6bd62127..c0daa5ad17ff182ddab2152979c7d697ac151522 100644
--- a/roles/munin/msmonitor/tasks/main.yml
+++ b/roles/munin/msmonitor/tasks/main.yml
@@ -1,11 +1,10 @@
 ---
-
 - name: install ubicast msmonitor
   environment:
     MONITOR_SERVER_NAME: "{{ monitor_hostname }}"
     MONITOR_SHELL_PWD: "{{ monitor_shell_pwd | password_hash('sha512', 'monitor') }}"
     MONITOR_ADMIN_PWD: "{{ monitor_admin_pwd | password_hash('sha512', 'monitor') }}"
-    MONITOR_SUPERUSER_PWD: "{{ monitor_superuser_pwd  }}"
+    MONITOR_SUPERUSER_PWD: "{{ monitor_superuser_pwd }}"
     SSH_MAINTENANCE_PORT: "{{ ssh_maintenance_port }}"
   ansible.builtin.apt:
     force_apt_get: true
@@ -35,5 +34,3 @@
     ferm_global_settings: "{{ monitor_ferm_global_settings }}"
   ansible.builtin.include_role:
     name: ferm-configure
-
-...
diff --git a/roles/munin/munin-node/defaults/main.yml b/roles/munin/munin-node/defaults/main.yml
index 57776454f2c2b02ff3248390d30252e1c0d2d2be..57b76bedc49714660da1b3efed2c4b16bb9e1cb9 100644
--- a/roles/munin/munin-node/defaults/main.yml
+++ b/roles/munin/munin-node/defaults/main.yml
@@ -1,6 +1,3 @@
 ---
-
 munin_node_logfile: /var/log/munin/munin-node.log
 munin_node_pidfile: /var/run/munin/munin-node.pid
-
-...
diff --git a/roles/munin/munin-node/handlers/main.yml b/roles/munin/munin-node/handlers/main.yml
index 04737382a43c73eee1fbf9d0305bfe2527bf1912..5ca5295a86983fb9374c74c8aff1aa34bb9de7fb 100644
--- a/roles/munin/munin-node/handlers/main.yml
+++ b/roles/munin/munin-node/handlers/main.yml
@@ -3,4 +3,3 @@
   ansible.builtin.service:
     name: munin-node
     state: restarted
-...
diff --git a/roles/munin/munin-node/meta/main.yml b/roles/munin/munin-node/meta/main.yml
index f58686c2c01077136bd9a0cc83e0d5216a743efb..abeb01c5ebe3bb839d52b380fe06093a2911ad7b 100644
--- a/roles/munin/munin-node/meta/main.yml
+++ b/roles/munin/munin-node/meta/main.yml
@@ -1,14 +1,11 @@
 ---
-
 dependencies:
-  - role: conf           # get conf.sh
-  - role: init           # setup keys pkgs
-  - role: sysconfig      # setup repos
+  - role: conf # get conf.sh
+  - role: init # setup keys pkgs
+  - role: sysconfig # setup repos
 
 # TODO:
 # - remove all uneeded dependencies
 # - only need :
 #     * ubicast repo
 #     * firewall input 4949
-
-...
diff --git a/roles/munin/munin-node/tasks/main.yml b/roles/munin/munin-node/tasks/main.yml
index d001c1a560082ec98a7357d83cc634515f5ec3bc..963103195b22ea1ff51b43dab280b3fcd2abf118 100644
--- a/roles/munin/munin-node/tasks/main.yml
+++ b/roles/munin/munin-node/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install required packages for munin-node
   ansible.builtin.apt:
     force_apt_get: true
@@ -17,7 +16,7 @@
   ansible.builtin.template:
     src: munin-node.conf.j2
     dest: /etc/munin/munin-node.conf
-    mode: '644'
+    mode: "644"
 
 - name: setup munin-node plugins link
   notify: restart munin-node
@@ -26,5 +25,3 @@
   # sh -x print executed cmd to stderr
   register: munin_plugin_linked
   changed_when: munin_plugin_linked.stderr | length > 0
-
-...
diff --git a/roles/munin/munin-server/handlers/main.yml b/roles/munin/munin-server/handlers/main.yml
index 9a7279bde041ef9e7569289a7c47355478c0d9a4..6bdfb0cad5f908aed5139a8e3d3577c0932c8fab 100644
--- a/roles/munin/munin-server/handlers/main.yml
+++ b/roles/munin/munin-server/handlers/main.yml
@@ -3,4 +3,3 @@
   ansible.builtin.service:
     name: munin
     state: restarted
-...
diff --git a/roles/munin/munin-server/tasks/main.yml b/roles/munin/munin-server/tasks/main.yml
index e9868e9bccbc6617b1a961f232d27846969abd0b..743e81a4191c23e2075b86306eeffb9d6acb4f0a 100644
--- a/roles/munin/munin-server/tasks/main.yml
+++ b/roles/munin/munin-server/tasks/main.yml
@@ -1,6 +1,5 @@
 ---
-
-- name: "install required packages for munin-server"
+- name: install required packages for munin-server
   ansible.builtin.apt:
     force_apt_get: true
     install_recommends: false
@@ -11,20 +10,18 @@
   retries: 60
   until: apt_status is success or ('Failed to lock apt for exclusive operation' not in apt_status.msg and '/var/lib/dpkg/lock' not in apt_status.msg)
 
-- name: "copy munin-server configuration"
+- name: copy munin-server configuration
   notify: restart munin-server
   ansible.builtin.template:
     src: munin.conf.j2
     dest: /etc/munin/munin.conf
-    mode: '644'
+    mode: "644"
 
 # Force munin-server restart to avoid default localdomain graph creation after remove
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
 
-- name: "remove default localdomain files"
+- name: remove default localdomain files
   ansible.builtin.file:
     path: /var/cache/munin/www/localdomain
     state: absent
-
-...
diff --git a/roles/netcapture/defaults/main.yml b/roles/netcapture/defaults/main.yml
index 9eab36d358b136a77e40741b7dccedf9f38513a1..4faa6797e4d85eaab06f8073d81911437646dd84 100644
--- a/roles/netcapture/defaults/main.yml
+++ b/roles/netcapture/defaults/main.yml
@@ -1,6 +1,5 @@
 ---
-
-netcapture_mm_url: "https://{{ envsetup_cm_server_name | default('mirismanager.ubicast.eu', true) }}"
+netcapture_mm_url: https://{{ envsetup_cm_server_name | default('mirismanager.ubicast.eu', true) }}
 netcapture_mm_ssl: true
 netcapture_conf_folder: /etc/miris/conf
 netcapture_media_folder: /data/netcapture/media
@@ -8,5 +7,3 @@ netcapture_pkg_folder: /data/netcapture/packages
 netcapture_hw_acceleration: false
 netcapture_miris_user_pwd: "{{ lookup('password', '/tmp/passwordfile length=12 chars=ascii_letters,digits') }}"
 netcapture_miris_auth: true
-
-...
diff --git a/roles/netcapture/tasks/main.yml b/roles/netcapture/tasks/main.yml
index 0e797c9695c6ec3f3fc3dc063dbc7a3957eddfe6..41f207241e72f4862c14540d3722f12129e927c0 100644
--- a/roles/netcapture/tasks/main.yml
+++ b/roles/netcapture/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: netcapture install
   ansible.builtin.apt:
     force_apt_get: true
@@ -13,13 +12,13 @@
   ansible.builtin.template:
     src: netcapture.json.j2
     dest: /etc/miris/netcapture.json
-    mode: '644'
+    mode: "644"
 
 - name: netcapture miris
   ansible.builtin.template:
     src: miris-api.json.j2
     dest: /etc/miris/conf/api.json
-    mode: '644'
+    mode: "644"
 
 - name: netcapture config dir
   ansible.builtin.file:
@@ -41,5 +40,3 @@
     path: "{{ netcapture_pkg_folder }}"
     mode: u=rwX,g=rwX,o=rx
     state: directory
-
-...
diff --git a/roles/network/defaults/main.yml b/roles/network/defaults/main.yml
index 8bb78b758d30ac8515ad091d073567ed7071bf98..ffd1d4cfb23ce97dac84708b7a6485f428590157 100644
--- a/roles/network/defaults/main.yml
+++ b/roles/network/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 network_apply: false
 
 network_packages:
@@ -15,5 +14,3 @@ network_ip_mask: "{{ network_ip }}/{{ network_mask }}"
 network_ip_mask_cidr: "{{ network_ip_mask | ipaddr }}"
 network_gateway: "{{ envsetup_network_gateway | d() }}"
 network_dns: "{{ envsetup_network_dns.split(',') | d() }}"
-
-...
diff --git a/roles/network/tasks/main.yml b/roles/network/tasks/main.yml
index a0dec2a1cbe042f0aed410a0e1de0c12c16e9ff9..8f46cded4723b3a4e21da7d46dbaf5cdc1817d55 100644
--- a/roles/network/tasks/main.yml
+++ b/roles/network/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: if network settings are set
   when:
     - network_apply | bool
@@ -8,7 +7,6 @@
     - network_gateway | d(false)
     - network_dns | d(false)
   block:
-
     - name: packages
       ansible.builtin.apt:
         force_apt_get: true
@@ -24,7 +22,7 @@
       ansible.builtin.copy:
         dest: /etc/network/interfaces
         backup: true
-        mode: '644'
+        mode: "644"
         content: |
           # This file describes the network interfaces available on your system
           # and how to activate them. For more information, se interfaces(5).
@@ -44,7 +42,7 @@
 
     - name: interface
       nmcli:
-        conn_name: "envsetup-{{ ansible_default_ipv4.interface }}"
+        conn_name: envsetup-{{ ansible_default_ipv4.interface }}
         type: ethernet
         ifname: "{{ ansible_default_ipv4.interface }}"
         ip4: "{{ network_ip_mask_cidr | ipv4 }}"
@@ -53,5 +51,3 @@
         autoconnect: true
         activate: false
         state: present
-
-...
diff --git a/roles/nginx/defaults/main.yml b/roles/nginx/defaults/main.yml
index ce0f4ececd1b81ff4d24311549a0de482a898c38..f1ac547180fd5365484bfa5a922e6e0846d2322e 100644
--- a/roles/nginx/defaults/main.yml
+++ b/roles/nginx/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 nginx_packages:
   - nginx
   - uwsgi
@@ -7,5 +6,3 @@ nginx_packages:
 
 nginx_ssl_certificate: /etc/ssl/certs/ssl-cert-snakeoil.pem
 nginx_ssl_certificate_key: /etc/ssl/private/ssl-cert-snakeoil.key
-
-...
diff --git a/roles/nginx/handlers/main.yml b/roles/nginx/handlers/main.yml
index b0abf18b3894fe1af147e0ca063f8e9e9a913d58..abb7b115ab9c7d835c04a6320b219dd71fe3f789 100644
--- a/roles/nginx/handlers/main.yml
+++ b/roles/nginx/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart nginx
   ansible.builtin.systemd:
     name: nginx
     state: restarted
-
-...
diff --git a/roles/nginx/tasks/main.yml b/roles/nginx/tasks/main.yml
index fd7de4cab5e7f5ff639b64824e4481497cb7cc13..357dbb13255953dd25b87f452758482d06e9a6d2 100644
--- a/roles/nginx/tasks/main.yml
+++ b/roles/nginx/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: nginx install
   ansible.builtin.apt:
     force_apt_get: true
@@ -45,8 +44,8 @@
   notify: restart nginx
   ansible.builtin.lineinfile:
     path: /etc/nginx/conf.d/ssl_certificate.conf
-    regexp: 'ssl_certificate\s+([\w/\-\_\.]+);'
-    line: 'ssl_certificate {{ nginx_ssl_certificate }};'
+    regexp: ssl_certificate\s+([\w/\-\_\.]+);
+    line: ssl_certificate {{ nginx_ssl_certificate }};
 
 - name: nginx update ssl certificate key conf
   when:
@@ -55,7 +54,5 @@
   notify: restart nginx
   ansible.builtin.lineinfile:
     path: /etc/nginx/conf.d/ssl_certificate.conf
-    regexp: 'ssl_certificate_key\s+([\w/\-\_\.]+);'
-    line: 'ssl_certificate_key {{ nginx_ssl_certificate_key }};'
-
-...
+    regexp: ssl_certificate_key\s+([\w/\-\_\.]+);
+    line: ssl_certificate_key {{ nginx_ssl_certificate_key }};
diff --git a/roles/postfix/defaults/main.yml b/roles/postfix/defaults/main.yml
index 7a901669844bf40deb96c0772111146429689ed0..e31952669f8403785d91ce1ac49d9a6f71f20060 100644
--- a/roles/postfix/defaults/main.yml
+++ b/roles/postfix/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 postfix_packages:
   - postfix
   - bsd-mailx
@@ -11,5 +10,3 @@ postfix_relay_host: "{{ envsetup_email_smtp_server | d() }}"
 postfix_relay_user: "{{ envsetup_email_smtp_user | d() }}"
 postfix_relay_pass: "{{ envsetup_email_smtp_pwd | d() }}"
 postfix_admin: sysadmin@ubicast.eu
-
-...
diff --git a/roles/postfix/handlers/main.yml b/roles/postfix/handlers/main.yml
index ef16bd5ac889dd4f632385b76e10c30cfa7a6591..56cbbb41b15a09681b7f6a87403742579e2de467 100644
--- a/roles/postfix/handlers/main.yml
+++ b/roles/postfix/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: postmap sasl
   ansible.builtin.command: postmap hash:/etc/postfix/sasl-passwords
 
@@ -16,5 +15,3 @@
   ansible.builtin.service:
     name: postfix
     state: restarted
-
-...
diff --git a/roles/postfix/meta/main.yml b/roles/postfix/meta/main.yml
index e8c55ae416ea2a478accac6868dbe270825cf1b2..13aee63ee90c4187d44ad6da8185d0468e43f6d3 100644
--- a/roles/postfix/meta/main.yml
+++ b/roles/postfix/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: conf
-
-...
diff --git a/roles/postfix/tasks/main.yml b/roles/postfix/tasks/main.yml
index a65ace761ffb32d5274239bba057f93bed234a6e..aee1a72c70b7af7a479a399fcf9934774fe20676 100644
--- a/roles/postfix/tasks/main.yml
+++ b/roles/postfix/tasks/main.yml
@@ -1,10 +1,9 @@
 ---
-
 - name: create postfix dir
   ansible.builtin.file:
     path: /etc/postfix
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: postfix main config
   notify: restart postfix
@@ -12,7 +11,7 @@
     backup: true
     src: main.cf.j2
     dest: /etc/postfix/main.cf
-    mode: '644'
+    mode: "644"
 
 - name: postfix mailname
   notify: restart postfix
@@ -20,7 +19,7 @@
     backup: true
     dest: /etc/mailname
     content: "{{ postfix_mailname }}"
-    mode: '644'
+    mode: "644"
 
 - name: postfix local aliases
   notify:
@@ -29,7 +28,7 @@
   ansible.builtin.copy:
     backup: true
     dest: /etc/aliases
-    mode: '644'
+    mode: "644"
     content: |
       devnull: /dev/null
       clamav: root
@@ -42,7 +41,7 @@
   ansible.builtin.copy:
     backup: true
     dest: /etc/postfix/virtual
-    mode: '644'
+    mode: "644"
     content: |
       postmaster@{{ postfix_mailname }} root
       bounces@{{ postfix_mailname }} root
@@ -55,7 +54,7 @@
   ansible.builtin.copy:
     backup: true
     dest: /etc/postfix/generic
-    mode: '644'
+    mode: "644"
     content: |
       root@localhost {{ postfix_email_sender }}
       root@{{ postfix_mailname }} {{ postfix_email_sender }}
@@ -74,7 +73,7 @@
   ansible.builtin.copy:
     backup: true
     dest: /etc/postfix/sasl-passwords
-    mode: '644'
+    mode: "644"
     content: "{{ postfix_relay_host }} {{ postfix_relay_user }}:{{ postfix_relay_pass }}"
 
 - name: install postfix
@@ -92,5 +91,3 @@
     name: postfix
     enabled: true
     state: started
-
-...
diff --git a/roles/postgres-ha/defaults/main.yml b/roles/postgres-ha/defaults/main.yml
index dcdd831a34ffc4c8cbd98c49aaff540755a71ccb..304c9db92aaeb006dfee2df48985e7e37a1e8c96 100644
--- a/roles/postgres-ha/defaults/main.yml
+++ b/roles/postgres-ha/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 repmgr_packages:
   - repmgr
   # rephacheck:
@@ -22,7 +21,7 @@ repmgr_primary_node: "{{ hostvars[groups['postgres'][0]]['ansible_default_ipv4']
 
 repmgr_timeout: 5
 
-repmgr_node_id: "{{ (groups['postgres'].index(inventory_hostname))+1 | int }}"
+repmgr_node_id: "{{ (groups['postgres'].index(inventory_hostname)) + 1 | int }}"
 repmgr_node_name: "{{ ansible_hostname }}"
 repmgr_conninfo: host={{ ansible_default_ipv4.address }} dbname={{ repmgr_db }} user={{ repmgr_user }} connect_timeout={{ repmgr_timeout }}
 
@@ -43,5 +42,3 @@ pg_ferm_output_rules:
       - 54321
       - 54322
 pg_ferm_global_settings:
-
-...
diff --git a/roles/postgres-ha/handlers/main.yml b/roles/postgres-ha/handlers/main.yml
index 8e43f565efc01e13ec4aca133934d3b3a39502a0..8d59175c43fce4b4292263dfc69ce2dc50b24367 100644
--- a/roles/postgres-ha/handlers/main.yml
+++ b/roles/postgres-ha/handlers/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: reload systemd
   ansible.builtin.systemd:
     daemon_reload: true
@@ -18,5 +17,3 @@
   ansible.builtin.systemd:
     name: rephacheck.socket
     state: restarted
-
-...
diff --git a/roles/postgres-ha/tasks/main.yml b/roles/postgres-ha/tasks/main.yml
index 15e2d51d7b6f66c6e500faabde9e6894e88e68ac..40345d22f9a8f4fe1f2b7e05870bea8b0fe3016f 100644
--- a/roles/postgres-ha/tasks/main.yml
+++ b/roles/postgres-ha/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 # INSTALLATION
 
 - name: install packages
@@ -73,25 +72,25 @@
     dest: "{{ repmgr_config }}"
     owner: postgres
     group: postgres
-    mode: '644'
+    mode: "644"
 
 - name: configure debian default
   notify: restart repmgrd
   loop:
     - key: REPMGRD_ENABLED
-      value: 'yes'
+      value: "yes"
     - key: REPMGRD_CONF
       value: "{{ repmgr_config }}"
   ansible.builtin.replace:
     path: /etc/default/repmgrd
-    regexp: '^#?{{ item.key }}=.*$'
-    replace: '{{ item.key }}={{ item.value }}'
+    regexp: ^#?{{ item.key }}=.*$
+    replace: "{{ item.key }}={{ item.value }}"
 
 - name: configure sudo
   ansible.builtin.copy:
     dest: /etc/sudoers.d/postgres
     validate: visudo -cf %s
-    mode: '440'
+    mode: "440"
     content: |
       Defaults:postgres !requiretty
       postgres ALL=NOPASSWD: \
@@ -129,7 +128,7 @@
     dest: ~postgres/.ssh/config
     owner: postgres
     group: postgres
-    mode: '640'
+    mode: "640"
     content: |
       IdentityFile ~/.ssh/id_ed25519
       StrictHostKeyChecking no
@@ -140,7 +139,6 @@
 - name: setup primary
   when: (db_role is defined and db_role == "primary") or (db_role is undefined and inventory_hostname == groups['postgres'][0])
   block:
-
     - name: check if primary already joined
       become: true
       become_user: postgres
@@ -157,14 +155,13 @@
       ansible.builtin.command:
         cmd: repmgr --config-file={{ repmgr_config }} primary register
 
-- ansible.builtin.meta: flush_handlers  # noqa unnamed-task
+- ansible.builtin.meta: flush_handlers # noqa name[missing]
 
 # REGISTER STANDBY
 
 - name: setup standby
   when: (db_role is defined and db_role == "standby") or (db_role is undefined and inventory_hostname == groups['postgres'][1])
   block:
-
     - name: check if standby already joined
       become: true
       become_user: postgres
@@ -239,14 +236,13 @@
       ansible.builtin.command:
         cmd: repmgr --config-file={{ repmgr_config }} standby register
 
-- ansible.builtin.meta: flush_handlers  # noqa unnamed-task
+- ansible.builtin.meta: flush_handlers # noqa name[missing]
 
 # REGISTER WITNESS
 
 - name: setup witness
   when: (db_role is defined and db_role == "witness") or (db_role is undefined and inventory_hostname == groups['postgres'][2])
   block:
-
     - name: check if witness already joined
       become: true
       become_user: postgres
@@ -263,7 +259,7 @@
       ansible.builtin.command:
         cmd: repmgr --config-file={{ repmgr_config }} --host={{ repmgr_primary_node }} witness register
 
-- ansible.builtin.meta: flush_handlers  # noqa unnamed-task
+- ansible.builtin.meta: flush_handlers # noqa name[missing]
 
 # REPHACHECK
 
@@ -292,7 +288,7 @@
     - restart rephacheck
   ansible.builtin.copy:
     dest: /etc/systemd/system/rephacheck.socket
-    mode: '644'
+    mode: "644"
     content: |
       [Unit]
       Description=RepHACheck socket
@@ -310,7 +306,7 @@
     - restart rephacheck
   ansible.builtin.copy:
     dest: /etc/systemd/system/rephacheck@.service
-    mode: '644'
+    mode: "644"
     content: |
       [Unit]
       Description=RepHACheck - Health check for PostgreSQL cluster managed by repmgr
@@ -336,5 +332,3 @@
     ferm_global_settings: "{{ pg_ferm_global_settings }}"
   ansible.builtin.include_role:
     name: ferm-configure
-
-...
diff --git a/roles/postgres/defaults/main.yml b/roles/postgres/defaults/main.yml
index c9938a2fa65567b3850548d03b65a5e2436b23bc..a23a4b63adf9b291cd8ab137d0177e9f7d468e54 100644
--- a/roles/postgres/defaults/main.yml
+++ b/roles/postgres/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 pg_packages:
   - acl
   - postgresql
@@ -33,9 +32,7 @@ pg_hba:
     address: ::1/128
 
 pg_users: []
-
 pg_databases: []
-
 pg_firewall_enabled: true
 pg_ferm_rules_filename: postgres
 pg_ferm_input_rules:
@@ -45,5 +42,3 @@ pg_ferm_input_rules:
       - 5432
 pg_ferm_output_rules: []
 pg_ferm_global_settings:
-
-...
diff --git a/roles/postgres/handlers/main.yml b/roles/postgres/handlers/main.yml
index 6a5616ec10107967826013510b384228909a6faa..0d04643c9c1ead30c100dfa9f5094ab968846b9e 100644
--- a/roles/postgres/handlers/main.yml
+++ b/roles/postgres/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart postgresql
   ansible.builtin.systemd:
     name: postgresql@{{ pg_version }}-{{ pg_cluster }}
     state: restarted
-
-...
diff --git a/roles/postgres/meta/main.yml b/roles/postgres/meta/main.yml
index e45d692ae3567f856967cd6f66c91d13e2e94e4e..471eb65e53129fb19aa41af0c049719b93c0ef91 100644
--- a/roles/postgres/meta/main.yml
+++ b/roles/postgres/meta/main.yml
@@ -1,6 +1,3 @@
 ---
-
 dependencies:
   - role: base
-
-...
diff --git a/roles/postgres/tasks/main.yml b/roles/postgres/tasks/main.yml
index fcf23ded110885133c7fd654f19d88b9e713b33b..42564f3641d08e5355507be3f758f08b63e8bc9f 100644
--- a/roles/postgres/tasks/main.yml
+++ b/roles/postgres/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: ansible postgresql requirements install
   ansible.builtin.apt:
     force_apt_get: true
@@ -23,11 +22,11 @@
 - name: update logrotate config
   ansible.builtin.copy:
     src: logrotate-postgresql
-    dest: "/etc/logrotate.d/postgresql-common"
+    dest: /etc/logrotate.d/postgresql-common
     owner: root
     group: root
     backup: false
-    mode: '644'
+    mode: "644"
 
 - name: ensure conf directory exists
   ansible.builtin.file:
@@ -35,21 +34,21 @@
     owner: postgres
     group: postgres
     state: directory
-    mode: '755'
+    mode: "755"
 
 - name: ensure conf directory is included
   ansible.builtin.replace:
     path: "{{ pg_conf_dir }}/postgresql.conf"
     backup: true
-    regexp: "^#?include_dir = '[A-Za-z\\.]+'(\\s+.*)$"
-    replace: "include_dir = 'conf.d'\\1"
+    regexp: ^#?include_dir = '[A-Za-z\.]+'(\s+.*)$
+    replace: include_dir = 'conf.d'\1
 
 - name: change max connections value
   ansible.builtin.replace:
     path: "{{ pg_conf_dir }}/postgresql.conf"
     backup: true
-    regexp: "^#?max_connections = [0-9]+"
-    replace: "max_connections = {{ pg_conf_max_connections }}"
+    regexp: ^#?max_connections = [0-9]+
+    replace: max_connections = {{ pg_conf_max_connections }}
   when: pg_conf_max_connections is defined
 
 - name: configure custom settings
@@ -62,7 +61,7 @@
     group: postgres
     backup: true
     content: "{{ item.content }}"
-    mode: '644'
+    mode: "644"
 
 - name: configure authentication
   notify: restart postgresql
@@ -105,7 +104,7 @@
 - name: set .pgpass to allow passwordless connection
   loop: "{{ query('nested', ['root', 'postgres'], pg_users) }}"
   ansible.builtin.blockinfile:
-    path: "~{{ item.0 }}/.pgpass"
+    path: ~{{ item.0 }}/.pgpass
     block: "*:*:*:{{ item.1.name }}:{{ item.1.password }}"
     marker: "# {mark} {{ item.1.name }}"
     create: true
@@ -137,5 +136,3 @@
 
 - name: flush handlers
   ansible.builtin.meta: flush_handlers
-
-...
diff --git a/roles/proxy/defaults/main.yml b/roles/proxy/defaults/main.yml
index 50957cb1baa86268f3a3d12219299fb744350d97..77cb8136d9678e2b4444ae8d02f8e081c3f1682e 100644
--- a/roles/proxy/defaults/main.yml
+++ b/roles/proxy/defaults/main.yml
@@ -1,14 +1,11 @@
 ---
-
 proxy_http: "{{ envsetup_proxy_http | d() }}"
 proxy_https: "{{ envsetup_proxy_https | d() }}"
 proxy_exclude:
-  - "localhost"
-  - "127.0.0.1"
-  - "::1"
+  - localhost
+  - 127.0.0.1
+  - ::1
   - "{{ envsetup_proxy_exclude | d() }}"
   - "{{ envsetup_ms_server_name | d() }}"
   - "{{ envsetup_monitor_server_name | d() }}"
   - "{{ envsetup_cm_server_name | d() }}"
-
-...
diff --git a/roles/proxy/tasks/main.yml b/roles/proxy/tasks/main.yml
index 5b8f081210d5c517ed87c723b02cb944bfb7e08f..2c766899da6a0823c843d7c1f153e89bd5252bed 100644
--- a/roles/proxy/tasks/main.yml
+++ b/roles/proxy/tasks/main.yml
@@ -1,18 +1,16 @@
 ---
-
 - name: if proxy settings are set
   when:
     - proxy_http | d(false)
     - proxy_https | d(false)
   block:
-
     - name: environment
       ansible.builtin.blockinfile:
         path: /etc/environment
         create: true
         marker_begin: BEGIN PROXY
         marker_end: END PROXY
-        mode: '644'
+        mode: "644"
         block: |
           http_proxy={{ proxy_http }}
           HTTP_PROXY={{ proxy_http }}
@@ -24,7 +22,7 @@
     - name: apt
       ansible.builtin.copy:
         dest: /etc/apt/apt.conf.d/proxy
-        mode: '644'
+        mode: "644"
         content: |
           Acquire::http::Proxy "{{ proxy_http }}";
           Acquire::https::Proxy "{{ proxy_https }}";
@@ -32,7 +30,7 @@
     - name: wget
       ansible.builtin.copy:
         dest: /etc/wgetrc
-        mode: '644'
+        mode: "644"
         content: |
           use_proxy=yes
           http_proxy={{ proxy_http }}
@@ -59,5 +57,3 @@
         scope: global
         value: "{{ item.value }}"
         state: present
-
-...
diff --git a/roles/sysconfig/defaults/main.yml b/roles/sysconfig/defaults/main.yml
index c6c588f5b9fa576d6ce276dd09d3dcee64664d61..df8498dc45afee7c52dbf0630151ef826c5264a2 100644
--- a/roles/sysconfig/defaults/main.yml
+++ b/roles/sysconfig/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 repos_prefix: "{{ envsetup_apt_cache_url | d('http://', true) }}"
 repos_deb: deb.debian.org
 repos_deb_sec: security.debian.org
@@ -46,7 +45,6 @@ sysconfig_ferm_input_rules:
       - 9090
 sysconfig_ferm_output_rules: []
 sysconfig_ferm_global_settings:
-
 locale_packages:
   - locales
   - tzdata
@@ -59,5 +57,3 @@ sysconfig_logs_packages:
   - rsyslog
 
 ntp_servers: "{{ envsetup_ntp_server | d('0.pool.ntp.org,1.pool.ntp.org,2.pool.ntp.org,3.pool.ntp.org', true) }}"
-
-...
diff --git a/roles/sysconfig/handlers/main.yml b/roles/sysconfig/handlers/main.yml
index d58394d537aeef45574d6603654ab78b8d4bf69a..a565a46301a61d47c16e85cd84384bee5760c473 100644
--- a/roles/sysconfig/handlers/main.yml
+++ b/roles/sysconfig/handlers/main.yml
@@ -43,5 +43,3 @@
   register: apt_status
   retries: 60
   until: apt_status is success or ('Failed to lock apt for exclusive operation' not in apt_status.msg and '/var/lib/dpkg/lock' not in apt_status.msg)
-
-...
diff --git a/roles/sysconfig/tasks/locale.yml b/roles/sysconfig/tasks/locale.yml
index 4e93ac338490308ee186efd3ebbcbb7aef5904b1..250b143ba9f70980130452d981b08cdd72629cb1 100644
--- a/roles/sysconfig/tasks/locale.yml
+++ b/roles/sysconfig/tasks/locale.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install locale packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -18,7 +17,7 @@
   notify: update locale
   ansible.builtin.copy:
     dest: /etc/default/locale
-    mode: '644'
+    mode: "644"
     content: |
       LANG={{ init_locale }}
       LANGUAGE={{ init_locale }}
@@ -28,13 +27,11 @@
   notify: update locale
   ansible.builtin.lineinfile:
     path: /etc/locale.gen
-    regexp: '^(?:# )?({{ init_locale }}.*)$'
+    regexp: ^(?:# )?({{ init_locale }}.*)$
     backrefs: true
-    line: '\1'
+    line: \1
 
 - name: set timezone
   notify: restart cron
   timezone:
     name: "{{ init_timezone }}"
-
-...
diff --git a/roles/sysconfig/tasks/logs.yml b/roles/sysconfig/tasks/logs.yml
index 395d042f69f83cbc1c086b3c3443446d9607468d..1c9aa618d43bce5395f7b7ce39ff2c34f482742d 100644
--- a/roles/sysconfig/tasks/logs.yml
+++ b/roles/sysconfig/tasks/logs.yml
@@ -18,6 +18,4 @@
   ansible.builtin.file:
     path: /var/log/journal
     state: directory
-    mode: '755'
-
-...
+    mode: "755"
diff --git a/roles/sysconfig/tasks/main.yml b/roles/sysconfig/tasks/main.yml
index 8019c58d8667ea6f4974eecb6b0054e5d01420b1..59f685095165bf8e482ab9f435077d035ef8064b 100644
--- a/roles/sysconfig/tasks/main.yml
+++ b/roles/sysconfig/tasks/main.yml
@@ -77,19 +77,19 @@
     content: |
       APT::Periodic::Update-Package-Lists "1";
       APT::Periodic::Unattended-Upgrade "1";
-    mode: '644'
+    mode: "644"
 
 - name: remove old kernel with unattended-upgrades
   ansible.builtin.replace:
     dest: /etc/apt/apt.conf.d/50unattended-upgrades
-    regexp: '^//Unattended-Upgrade::Remove-Unused-Kernel-Packages.*$'
-    replace: 'Unattended-Upgrade::Remove-Unused-Kernel-Packages "true";'
+    regexp: ^//Unattended-Upgrade::Remove-Unused-Kernel-Packages.*$
+    replace: Unattended-Upgrade::Remove-Unused-Kernel-Packages "true";
   notify: restart unattended-upgrades
 
 - name: allow automatic updates for ubicast security repo
   ansible.builtin.lineinfile:
     path: /etc/apt/apt.conf.d/50unattended-upgrades
-    insertafter: '^Unattended-Upgrade::Origins-Pattern {$'
+    insertafter: ^Unattended-Upgrade::Origins-Pattern {$
     line: '        "origin=UbiCast,label=UbiCast-Security";'
     backup: true
   notify: restart unattended-upgrades
@@ -97,16 +97,16 @@
 - name: enable root login via ssh with key
   ansible.builtin.replace:
     dest: /etc/ssh/sshd_config
-    regexp: "^#PermitRootLogin (yes|without-password|prohibit-password)"
-    replace: "PermitRootLogin without-password"
+    regexp: ^#PermitRootLogin (yes|without-password|prohibit-password)
+    replace: PermitRootLogin without-password
   notify: restart sshd
 
 - name: remove disabled root login
   ansible.builtin.replace:
     dest: /root/.ssh/authorized_keys
-    regexp: "^no-port-forwarding,(.+) ssh-"
-    replace: "ssh-"
-    mode: '600'
+    regexp: ^no-port-forwarding,(.+) ssh-
+    replace: ssh-
+    mode: "600"
   failed_when: false
 
 # FIREWALL
@@ -126,5 +126,3 @@
 - include_tasks: locale.yml
 
 - include_tasks: ntp.yml
-
-...
diff --git a/roles/sysconfig/tasks/ntp.yml b/roles/sysconfig/tasks/ntp.yml
index 92f6a3d2654287202d065389b9512cf5558e3b0d..7337589475859de2622a4165c3a10aa65a78ae0f 100644
--- a/roles/sysconfig/tasks/ntp.yml
+++ b/roles/sysconfig/tasks/ntp.yml
@@ -1,8 +1,6 @@
 ---
-
 - name: gathering services
   ansible.builtin.service_facts:
-
 - name: ntp disable systemd-timesyncd service
   notify: restart ntp
   ansible.builtin.systemd:
@@ -10,8 +8,7 @@
     enabled: false
     daemon_reload: true
     state: stopped
-  when: ('systemd-timesyncd.service' in ansible_facts.services)
-        and (ansible_facts.services['systemd-timesyncd.service'].status != 'not-found')
+  when: ('systemd-timesyncd.service' in ansible_facts.services) and (ansible_facts.services['systemd-timesyncd.service'].status != 'not-found')
 
 - name: ntp install
   ansible.builtin.apt:
@@ -29,12 +26,10 @@
     backup: true
     src: ntp.conf.j2
     dest: /etc/ntp.conf
-    mode: '644'
+    mode: "644"
 
 - name: ensure ntp is running
   ansible.builtin.service:
     name: ntp
     enabled: true
     state: started
-
-...
diff --git a/roles/sysconfig/tasks/repos.yml b/roles/sysconfig/tasks/repos.yml
index 4d2b06501d01f4a656e75b16c733a95a656fe297..31b417ad55adbb5be1361e0afad19c744efd5d09 100644
--- a/roles/sysconfig/tasks/repos.yml
+++ b/roles/sysconfig/tasks/repos.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: debian 10 apt repo sources list
   when:
     - not offline_mode | d(false)
@@ -8,7 +7,7 @@
   notify: update cache
   ansible.builtin.copy:
     dest: /etc/apt/sources.list
-    mode: '644'
+    mode: "644"
     content: |
       deb {{ repos_prefix }}{{ repos_deb }}/debian {{ repos_release }} main contrib non-free
       deb {{ repos_prefix }}{{ repos_deb }}/debian {{ repos_release }}-updates main contrib non-free
@@ -22,7 +21,7 @@
   notify: update cache
   ansible.builtin.copy:
     dest: /etc/apt/sources.list
-    mode: '644'
+    mode: "644"
     content: |
       deb {{ repos_prefix }}{{ repos_deb }}/debian {{ repos_release }} main contrib non-free
       deb {{ repos_prefix }}{{ repos_deb }}/debian {{ repos_release }}-updates main contrib non-free
@@ -74,5 +73,3 @@
     repo: deb https://{{ repos_skyreach_host }} packaging/apt/ubicast-security-updates/bullseye/
     filename: ubicast-secu
     update_cache: true
-
-...
diff --git a/roles/tester/defaults/main.yml b/roles/tester/defaults/main.yml
index c05f4349c4ab2e24787c652951ae9ef799f70c54..e497e85692fd7bb87f3b7cb3a163a61906d31951 100644
--- a/roles/tester/defaults/main.yml
+++ b/roles/tester/defaults/main.yml
@@ -1,9 +1,6 @@
 ---
-
 tester_packages:
   - ubicast-env
   - ubicast-tester
   - ubicast-tester-nudgis
   - ubicast-tester-system
-
-...
diff --git a/roles/tester/tasks/main.yml b/roles/tester/tasks/main.yml
index 5b148f460ad332523b5d15c084891dde68a9c720..8d87613734ad4c9565602d7791c287333a731619 100644
--- a/roles/tester/tasks/main.yml
+++ b/roles/tester/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: install tester packages
   ansible.builtin.apt:
     force_apt_get: true
@@ -9,5 +8,3 @@
   register: apt_status
   retries: 60
   until: apt_status is success or ('Failed to lock apt for exclusive operation' not in apt_status.msg and '/var/lib/dpkg/lock' not in apt_status.msg)
-
-...
diff --git a/roles/users/defaults/main.yml b/roles/users/defaults/main.yml
index 2037801c737949925bc79a66afaddd45e1fbdcff..5af3f048635067201b41ed9db69f70830147a0b1 100644
--- a/roles/users/defaults/main.yml
+++ b/roles/users/defaults/main.yml
@@ -1,5 +1,4 @@
 ---
-
 users:
   - name: ubicast
     passwd: "{{ envsetup_shell_ubicast_pwd | password_hash('sha512', 'ubicastsalt') }}"
@@ -10,5 +9,3 @@ users_ssh_authorized_keys:
   - "{{ envsetup_ssh_allowed_keys | d('') }}"
 
 users_root_change: true
-
-...
diff --git a/roles/users/handlers/main.yml b/roles/users/handlers/main.yml
index cbc6f332ac43dd2b59fe05cc1eadd3472a872924..77e6bcc744a0cf82fbc11b2a7293ff4c5a6c65c3 100644
--- a/roles/users/handlers/main.yml
+++ b/roles/users/handlers/main.yml
@@ -1,8 +1,5 @@
 ---
-
 - name: restart sshd
   ansible.builtin.service:
     name: sshd
     state: restarted
-
-...
diff --git a/roles/users/tasks/main.yml b/roles/users/tasks/main.yml
index b4a4a12be07cf29c8266d4b631772694e91475df..271a3936c8d7ee37108f2113ff210a98e34eb5e0 100644
--- a/roles/users/tasks/main.yml
+++ b/roles/users/tasks/main.yml
@@ -1,5 +1,4 @@
 ---
-
 - name: create users groups
   loop: "{{ users }}"
   ansible.builtin.group:
@@ -32,28 +31,28 @@
   ansible.builtin.copy:
     src: .bashrc
     dest: ~{{ item.name }}/.bashrc
-    mode: '644'
+    mode: "644"
 
 - name: copy .vimrc
   loop: "{{ users }}"
   ansible.builtin.copy:
     src: .vimrc
     dest: ~{{ item.name }}/.vimrc
-    mode: '644'
+    mode: "644"
 
 - name: copy .bashrc for root
   when: users_root_change
   ansible.builtin.copy:
     src: .bashrc
     dest: ~root/.bashrc
-    mode: '644'
+    mode: "644"
 
 - name: copy .vimrc for root
   when: users_root_change
   ansible.builtin.copy:
     src: .vimrc
     dest: ~root/.vimrc
-    mode: '644'
+    mode: "644"
 
 - name: set users allowed ssh keys
   loop: "{{ users | product(users_ssh_authorized_keys) | list }}"
@@ -71,7 +70,7 @@
   ansible.builtin.copy:
     dest: /etc/sudoers.d/nopasswd
     validate: visudo -cf %s
-    mode: '440'
+    mode: "440"
     content: |
       %sudo ALL=(ALL) NOPASSWD: ALL
 
@@ -80,10 +79,8 @@
   ansible.builtin.apt:
     force_apt_get: true
     install_recommends: false
-    name: "ubicast-ssh-access"
+    name: ubicast-ssh-access
     state: latest
   register: apt_status
   retries: 60
   until: apt_status is success or ('Failed to lock apt for exclusive operation' not in apt_status.msg and '/var/lib/dpkg/lock' not in apt_status.msg)
-
-...