Skip to content
Snippets Groups Projects
Commit 6d2184a9 authored by Stéphane Diemer's avatar Stéphane Diemer
Browse files

Merge branch 't32853-ovh_cloud_provisioning' into 'master'

See merge request mediaserver/envsetup!25
parents 2f5addd9 783a826d
No related branches found
No related tags found
No related merge requests found
Showing
with 799 additions and 77 deletions
import socket
def get_status(host):
ip = host.interface('eth0').addresses[0]
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, 8543))
data = s.recv(1024)
return data.rstrip().decode('utf-8')
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("mediaserver")
def test_psycopg2_is_installed(host):
p = host.package("haproxy")
assert p.is_installed
def test_postgres_service(host):
s = host.service("haproxy")
assert s.is_running
def test_haproxy_socket(host):
s = host.socket("tcp://0.0.0.0:54321")
assert s.is_listening
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("mediaserver")
def test_server_is_installed(host):
p = host.package("ubicast-mediaserver")
assert p.is_installed
def test_server_user(host):
u = host.user("msuser")
assert u.name == "msuser"
def test_server_nginx(host):
f = host.file("/etc/nginx/sites-available/mediaserver-msuser.conf")
assert f.exists
def test_server_service(host):
s = host.service("mediaserver")
assert s.is_running
assert s.is_enabled
def test_server_socket(host):
s = host.socket("tcp://0.0.0.0:443")
assert s.is_listening
def test_fail2ban_conf(host):
f = host.file("/etc/fail2ban/jail.d/mediaserver.conf")
assert f.exists
def test_fail2ban_service(host):
s = host.service("fail2ban")
assert s.is_running
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("postgres")
def test_psycopg2_is_installed(host):
p = host.package("python3-psycopg2")
assert p.is_installed
def test_postgres_is_installed(host):
p = host.package("postgresql")
assert p.is_installed
def test_postgres_user(host):
u = host.user("postgres")
assert u.name == "postgres"
def test_postgres_service(host):
s = host.service("postgresql@11-main")
assert s.is_running
def test_postgres_socket(host):
s = host.socket("tcp://127.0.0.1:5432")
assert s.is_listening
---
- name: OVH CLOUD DEPROVISIONING
hosts: localhost
connection: local
tags: [ 'never', 'force-delete' ]
vars:
ovh_prefix: cloud
tasks:
- name: list all server in cloud project
os_server_info:
availability_zone: nova
server: "{{ ovh_prefix }}-*"
filters:
vm_state: active
register: result_json
- name: register variable for futures uses
set_fact:
current_servers: "{{ result_json.openstack_servers }}"
current_server_inventory: "{{ query('inventory_hostnames', 'all:!localhost') | flatten(1) }}"
- name: delete server from inventory
os_server:
state: absent
name: "{{ item.name }}"
when:
- item.name not in current_server_inventory
loop: "{{ current_servers }}"
...
---
- name: SET VARIABLES WHEN PROVISIONING
hosts: localhost
tags: always
gather_facts: no
run_once: true
tasks:
- name: not provisioning
set_fact:
provisioning: false
delegate_facts: yes
- name: enable provisioning
set_fact:
provisioning: true
delegate_facts: yes
when: ('force-delete' not in ansible_run_tags)
- name: OVH CLOUD PROVISIONING
hosts: localhost
connection: local
tags: always
vars:
ovh_prefix: cloud
tasks:
- name: list all server in cloud project
os_server_info:
availability_zone: nova
server: "{{ ovh_prefix }}-*"
filters:
vm_state: active
register: result_json
- name: register variable for futures uses
set_fact:
current_servers: "{{ result_json.openstack_servers }}"
- name: add existent hosts to inventory
add_host:
name: "{{ item.metadata.hostname }}"
ansible_host: "{{ item.accessIPv4 }}"
ansible_user: debian
ansible_become: yes
loop: "{{ current_servers }}"
- name: launch a compute instance for non existant hosts
os_server:
state: present
name: "{{ ovh_prefix }}-{{ item }}"
availability_zone: nova
image: "{{ hostvars[item].image | default('a75cc6c8-f697-48a6-a820-37e68621b07a') }}"
flavor: "{{ hostvars[item].flavor | default('b2-7-flex') }}"
security_groups: default
key_name: gitlab
meta:
hostname: "{{ item }}"
nics:
- net-name: Ext-Net
- net-name: backend
when:
- hostvars[item].ansible_host is defined
- hostvars[item].ansible_host == "ovhcloud"
- hostvars.localhost.provisioning == true
- item not in current_servers
register: openstack_servers
loop: "{{ query('inventory_hostnames', 'all:!localhost') }}"
- name: add created hosts to inventory
add_host:
name: "{{ item.metadata.hostname }}"
ansible_host: "{{ item.accessIPv4 }}"
ansible_user: debian
ansible_become: yes
loop: "{{ openstack_servers | json_query('results[*].openstack') }}"
...
......@@ -23,7 +23,7 @@
- name: POSTGRES HA CLIENTS
hosts: mediaserver
tags: postgres
tags: [ 'postgres', 'mediaserver' ]
pre_tasks:
- name: check that haproxy is configured
assert:
......
......@@ -32,7 +32,7 @@
tags: import
- import_playbook: netcapture.yml
tags: netcapture
- import_playbook: bench.yml
tags: bench
# - import_playbook: bench.yml
# tags: bench
...
#!/usr/bin/env ansible-playbook
---
- name: DOCKER CONTAINERS PROVISIONING
hosts: localhost
......
---
- import_playbook: ovh/provisioning.yml
- name: NFS mount provisioning
hosts: mediaserver
tags: always
gather_facts: no
vars:
ovh_prefix: cloud
tasks:
- name: wait for system to become reachable
wait_for_connection:
when:
- hostvars.localhost.provisioning == true
- name: gather facts for first time
setup:
when:
- hostvars.localhost.provisioning == true
- name: allow ip on ovh nas_ha
ovh_nasha:
name: "{{ item.name }}"
partition: "{{ item.partition }}"
ip: "{{ hostvars[inventory_hostname].ansible_host }}"
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- hostvars.localhost.provisioning == true
delegate_to: localhost
- name: install required packages for nfs
apt:
name: nfs-common
state: latest
update_cache: yes
when:
- hostvars.localhost.provisioning == true
- name: mount correct directory
mount:
fstype: nfs
opts: "{{ item.mount_options }}"
dump: "0"
passno: "0"
state: mounted
src: "{{ item.mount_ip }}:{{ item.name }}/{{ item.partition }}"
path: "{{ item.mount_endpoint }}"
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
retries: 20
delay: 30
when:
- hostvars.localhost.provisioning == true
- import_playbook: site.yml
when: hostvars.localhost.provisioning == true
- name: Verify mediaserver installation then add server to load balancer
hosts: mediaserver
gather_facts: no
vars:
ovh_prefix: cloud
tasks:
- name: launch script to test mediaserver configuration
command: /root/envsetup/tests/scripts/test_nginx_status.py
register: nginx_vhosts_status
when:
- hostvars.localhost.provisioning == true
- name: allow ip on load balancer (http 443)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167775
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 443
status: active
ssl: yes
farmtype: http
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: present
delegate_to: localhost
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- nginx_vhosts_status is succeeded
- hostvars.localhost.provisioning == true
- name: Verify wowza installation then add server to load balancer
hosts: wowza
gather_facts: no
vars:
ovh_prefix: cloud
tasks:
- name: launch script to test wowza configuration
command: /root/envsetup/tests/scripts/test_wowza.py
register: wowza_status
when:
- hostvars.localhost.provisioning == true
- name: allow ip on load balancer (tcp 1935)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167776
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 1935
status: active
ssl: no
farmtype: tcp
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: present
delegate_to: localhost
when:
- groups['wowza'] is defined
- inventory_hostname in groups['wowza']
- wowza_status is succeeded
- inventory_hostname == groups['wowza'][0]
- hostvars.localhost.provisioning == true
- import_playbook: ovh/deprovisioning.yml
- name: OVH CLOUD DEPROVISIONING WOWZA
hosts: wowza
gather_facts: no
tags: [ 'never', 'force-delete' ]
tasks:
- name: remove ip on load balancer (tcp 1935)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167776
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 1935
status: active
ssl: yes
farmtype: tcp
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
delegate_to: localhost
when:
- groups['wowza'] is defined
- inventory_hostname in groups['wowza']
- name: OVH CLOUD DEPROVISIONING MEDIASERVER
hosts: mediaserver
gather_facts: no
tags: [ 'never', 'force-delete' ]
tasks:
- name: remove ip on load balancer (http 443)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167775
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 443
status: active
ssl: yes
farmtype: http
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
delegate_to: localhost
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- name: remove ip on ovh nas_ha
ovh_nasha:
name: "{{ item.name }}"
partition: "{{ item.partition }}"
ip: "{{ hostvars[inventory_hostname].ansible_host }}"
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
when: groups['mediaserver'] is defined and inventory_hostname in groups['mediaserver']
delegate_to: localhost
- name: REFRESH LOAD BALANCER
hosts: localhost
gather_facts: no
tags: always
tasks:
- name: refresh loadbalancer state
ovh_loadbalancer:
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
refresh: yes
delegate_to: localhost
run_once: yes
- name: COMPLETE TEST
hosts: mediaserver
tags: tester
tasks:
- name: launcher tester.py for all mediaservers
command: /root/envsetup/tests/tester.py
register: tester_status
when:
- hostvars.localhost.provisioning == true
...
#!/usr/bin/env ansible-playbook
---
- import_playbook: ovh/provisioning.yml
- name: LIST OVH SERVERS
hosts: localhost
tasks:
- name: List all OVH servers
debug:
msg="{{ item }} / {{ hostvars[item].ansible_host }}"
loop: "{{ query('inventory_hostnames', 'all:!localhost') }}"
...
......@@ -4,5 +4,7 @@ flake8
git+git://github.com/atmaniak/molecule@e03437923b302fca1bd7b4f6030c6956ad00367a#egg=molecule[docker]
#molecule[docker]
pip-tools
testinfra
pytest-testinfra
yamllint
openstacksdk
ovh
......@@ -4,70 +4,240 @@
#
# pip-compile --output-file=requirements.dev.txt requirements.dev.in
#
ansible-lint==4.2.0 # via -r requirements.dev.in
ansible==2.9.18 # via -r requirements.in, ansible-lint, molecule
arrow==0.15.5 # via jinja2-time
attrs==19.3.0 # via pytest
bcrypt==3.1.7 # via paramiko
binaryornot==0.4.4 # via cookiecutter
cerberus==1.3.2 # via molecule
certifi==2020.4.5.1 # via requests
cffi==1.14.0 # via bcrypt, cryptography, pynacl
chardet==3.0.4 # via binaryornot, requests
click-completion==0.5.2 # via molecule
click-help-colors==0.8 # via molecule
click==7.1.1 # via click-completion, click-help-colors, cookiecutter, molecule, pip-tools, python-gilt
colorama==0.4.3 # via molecule, python-gilt
cookiecutter==1.7.0 # via molecule
cryptography==2.9 # via ansible, paramiko
distro==1.5.0 # via selinux
docker==4.2.0 # via molecule
entrypoints==0.3 # via flake8
fasteners==0.15 # via python-gilt
flake8==3.7.9 # via -r requirements.dev.in
future==0.18.2 # via cookiecutter
idna==2.9 # via requests
jinja2-time==0.2.0 # via cookiecutter
jinja2==2.11.2 # via ansible, click-completion, cookiecutter, jinja2-time, molecule
markupsafe==1.1.1 # via jinja2
mccabe==0.6.1 # via flake8
git+git://github.com/atmaniak/molecule@e03437923b302fca1bd7b4f6030c6956ad00367a#egg=molecule[docker] # via -r requirements.dev.in
monotonic==1.5 # via fasteners
more-itertools==8.2.0 # via pytest
netaddr==0.7.19 # via -r requirements.in
packaging==20.3 # via pytest
paramiko==2.7.1 # via molecule
pathspec==0.8.0 # via yamllint
pexpect==4.8.0 # via molecule
pip-tools==5.0.0 # via -r requirements.dev.in
pluggy==0.13.1 # via molecule, pytest
poyo==0.5.0 # via cookiecutter
ptyprocess==0.6.0 # via pexpect
py==1.8.1 # via pytest
pycodestyle==2.5.0 # via flake8
pycparser==2.20 # via cffi
pyflakes==2.1.1 # via flake8
pynacl==1.3.0 # via paramiko
pyparsing==2.4.7 # via packaging
pytest==5.4.1 # via testinfra
python-dateutil==2.8.1 # via arrow
python-gilt==1.2.3 # via molecule
pyyaml==5.3.1 # via -r requirements.in, ansible, ansible-lint, molecule, python-gilt, yamllint
requests==2.23.0 # via cookiecutter, docker
ruamel.yaml.clib==0.2.0 # via ruamel.yaml
ruamel.yaml==0.16.10 # via ansible-lint
selinux==0.2.1 # via molecule
sh==1.13.1 # via molecule, python-gilt
shellingham==1.3.2 # via click-completion
six==1.14.0 # via ansible-lint, bcrypt, click-completion, cryptography, docker, fasteners, packaging, pip-tools, pynacl, python-dateutil, websocket-client
tabulate==0.8.7 # via molecule
testinfra==5.0.0 # via -r requirements.dev.in
tree-format==0.1.2 # via molecule
urllib3==1.25.9 # via requests
wcwidth==0.1.9 # via pytest
websocket-client==0.57.0 # via docker
whichcraft==0.6.1 # via cookiecutter
yamllint==1.22.1 # via -r requirements.dev.in, molecule
ansible-lint==4.2.0
# via -r requirements.dev.in
ansible==2.9.18
# via
# -r requirements.in
# ansible-lint
# molecule
appdirs==1.4.4
# via openstacksdk
arrow==0.17.0
# via jinja2-time
attrs==20.3.0
# via pytest
bcrypt==3.2.0
# via paramiko
binaryornot==0.4.4
# via cookiecutter
cerberus==1.3.2
# via molecule
certifi==2020.12.5
# via requests
cffi==1.14.4
# via
# bcrypt
# cryptography
# pynacl
chardet==4.0.0
# via
# binaryornot
# requests
click-completion==0.5.2
# via molecule
click-help-colors==0.9
# via molecule
click==7.1.2
# via
# click-completion
# click-help-colors
# cookiecutter
# molecule
# pip-tools
# python-gilt
colorama==0.4.4
# via
# molecule
# python-gilt
# rich
commonmark==0.9.1
# via rich
cookiecutter==1.7.2
# via molecule
cryptography==3.3.1
# via
# ansible
# openstacksdk
# paramiko
decorator==4.4.2
# via
# dogpile.cache
# openstacksdk
distro==1.5.0
# via selinux
docker==4.4.1
# via molecule
dogpile.cache==1.1.1
# via openstacksdk
fasteners==0.16
# via python-gilt
flake8==3.8.4
# via -r requirements.dev.in
idna==2.10
# via requests
iniconfig==1.1.1
# via pytest
iso8601==0.1.13
# via
# keystoneauth1
# openstacksdk
jinja2-time==0.2.0
# via cookiecutter
jinja2==2.11.2
# via
# ansible
# click-completion
# cookiecutter
# jinja2-time
# molecule
jmespath==0.10.0
# via openstacksdk
jsonpatch==1.28
# via openstacksdk
jsonpointer==2.0
# via jsonpatch
keystoneauth1==4.3.0
# via openstacksdk
markupsafe==1.1.1
# via
# cookiecutter
# jinja2
mccabe==0.6.1
# via flake8
git+git://github.com/atmaniak/molecule@e03437923b302fca1bd7b4f6030c6956ad00367a#egg=molecule[docker]
# via -r requirements.dev.in
munch==2.5.0
# via openstacksdk
netaddr==0.8.0
# via -r requirements.in
netifaces==0.10.9
# via openstacksdk
openstacksdk==0.52.0
# via
# -r requirements.dev.in
# -r requirements.in
os-service-types==1.7.0
# via
# keystoneauth1
# openstacksdk
ovh==0.5.0
# via
# -r requirements.dev.in
# -r requirements.in
packaging==20.8
# via pytest
paramiko==2.7.2
# via molecule
pathspec==0.8.1
# via yamllint
pbr==5.5.1
# via
# keystoneauth1
# openstacksdk
# os-service-types
# stevedore
pexpect==4.8.0
# via molecule
pip-tools==5.5.0
# via -r requirements.dev.in
pluggy==0.13.1
# via
# molecule
# pytest
poyo==0.5.0
# via cookiecutter
ptyprocess==0.7.0
# via pexpect
py==1.10.0
# via pytest
pycodestyle==2.6.0
# via flake8
pycparser==2.20
# via cffi
pyflakes==2.2.0
# via flake8
pygments==2.7.3
# via rich
pynacl==1.4.0
# via paramiko
pyparsing==2.4.7
# via packaging
pytest-testinfra==6.1.0
# via -r requirements.dev.in
pytest==6.2.1
# via pytest-testinfra
python-dateutil==2.8.1
# via arrow
python-gilt==1.2.3
# via molecule
python-slugify==4.0.1
# via cookiecutter
pyyaml==5.3.1
# via
# -r requirements.in
# ansible
# ansible-lint
# molecule
# openstacksdk
# python-gilt
# yamllint
requests==2.25.1
# via
# cookiecutter
# docker
# keystoneauth1
requestsexceptions==1.4.0
# via openstacksdk
rich==9.6.1
# via ansible-lint
ruamel.yaml.clib==0.2.2
# via ruamel.yaml
ruamel.yaml==0.16.12
# via ansible-lint
selinux==0.2.1
# via molecule
sh==1.13.1
# via
# molecule
# python-gilt
shellingham==1.3.2
# via click-completion
six==1.15.0
# via
# bcrypt
# click-completion
# cookiecutter
# cryptography
# docker
# fasteners
# keystoneauth1
# munch
# pynacl
# python-dateutil
# websocket-client
stevedore==3.3.0
# via
# dogpile.cache
# keystoneauth1
tabulate==0.8.7
# via molecule
text-unidecode==1.3
# via python-slugify
toml==0.10.2
# via pytest
tree-format==0.1.2
# via molecule
typing-extensions==3.7.4.3
# via rich
urllib3==1.26.2
# via requests
websocket-client==0.57.0
# via docker
yamllint==1.25.0
# via
# -r requirements.dev.in
# molecule
# The following packages are considered to be unsafe in a requirements file:
# pip
......
ansible ~= 2.9.0
ansible<2.10
netaddr
pyyaml
openstacksdk
ovh
......@@ -8,8 +8,22 @@ ansible==2.9.18 # via -r requirements.in
cffi==1.14.0 # via cryptography
cryptography==2.9 # via ansible
jinja2==2.11.2 # via ansible
jmespath==0.10.0 # via openstacksdk
jsonpatch==1.28 # via openstacksdk
jsonpointer==2.0 # via jsonpatch
keystoneauth1==4.3.0 # via openstacksdk
markupsafe==1.1.1 # via jinja2
netaddr==0.7.19 # via -r requirements.in
munch==2.5.0 # via openstacksdk
netaddr==0.8.0 # via -r requirements.in
netifaces==0.10.9 # via openstacksdk
openstacksdk==0.52.0 # via -r requirements.in
os-service-types==1.7.0 # via keystoneauth1, openstacksdk
ovh==0.5.0 # via -r requirements.in
pbr==5.5.1 # via keystoneauth1, openstacksdk, os-service-types, stevedore
pycparser==2.20 # via cffi
pyyaml==5.3.1 # via -r requirements.in, ansible
six==1.14.0 # via cryptography
pyyaml==5.3.1 # via -r requirements.in, ansible, openstacksdk
requests==2.25.1 # via keystoneauth1
requestsexceptions==1.4.0 # via openstacksdk
six==1.15.0 # via cryptography, keystoneauth1, munch
stevedore==3.3.0 # via dogpile.cache, keystoneauth1
urllib3==1.26.2 # via requests
......@@ -14,6 +14,7 @@
mode: 0644
owner: root
group: root
changed_when: "'molecule-idempotence-notest' not in ansible_skip_tags"
- name: ensure celerity server is running
service:
......
......@@ -4,7 +4,7 @@
SIGNING_KEY = '{{ celerity_signing_key }}'
SERVER_URL = 'https://{{ celerity_server }}:6200'
WORKERS_COUNT = {{ celerity_workers_count }}
QUEUES_PER_WORKER = {{ celerity_workers_count }}
# MediaServer interactions
MEDIASERVERS = {
......
---
- name: requirements install
apt:
force_apt_get: true
install_recommends: false
name:
- apt-transport-https
- ca-certificates
- curl
- gnupg-agent
- lsb-release
- software-properties-common
- name: add docker key
when:
- not offline_mode | d(false)
apt_key:
url: https://download.docker.com/linux/{{ ansible_distribution | lower }}/gpg
state: present
- name: add docker debian repository
when:
- not offline_mode | d(false)
apt_repository:
repo: deb [arch=amd64] https://download.docker.com/linux/{{ ansible_distribution | lower }} {{ ansible_distribution_release | lower }} stable
state: present
update_cache: yes
- name: install docker
when:
- not offline_mode | d(false)
apt:
name: docker-ce
state: latest
update_cache: yes
- name: docker service
when:
- not offline_mode | d(false)
systemd:
name: docker
enabled: true
state: started
- name: install requirements for docker python binding
when:
- not offline_mode | d(false)
apt:
name: python3-docker
state: latest
update_cache: yes
...
......@@ -6,4 +6,9 @@
name: ferm
state: restarted
- name: restart fail2ban
when: ansible_facts.services['fail2ban.service'] is defined
systemd:
name: fail2ban
state: started
...
......@@ -15,28 +15,36 @@
- name: global
when: ferm_global_settings | d(false)
notify: restart ferm
notify:
- restart ferm
- restart fail2ban
copy:
dest: /etc/ferm/ferm.d/{{ ferm_rules_filename }}.conf
content: "{{ ferm_global_settings }}"
- name: input
when: ferm_input_rules | length > 0
notify: restart ferm
notify:
- restart ferm
- restart fail2ban
template:
src: ferm_rules_input.conf.j2
dest: /etc/ferm/input.d/{{ ferm_rules_filename }}.conf
- name: output
when: ferm_output_rules | length > 0
notify: restart ferm
notify:
- restart ferm
- restart fail2ban
template:
src: ferm_rules_output.conf.j2
dest: /etc/ferm/output.d/{{ ferm_rules_filename }}.conf
- name: forward
when: ferm_forward_rules | length > 0
notify: restart ferm
notify:
- restart ferm
- restart fail2ban
template:
src: ferm_rules_forward.conf.j2
dest: /etc/ferm/forward.d/{{ ferm_rules_filename }}.conf
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment