Newer
Older
---
- import_playbook: ovh/provisioning.yml
- name: NFS mount provisioning
hosts: mediaserver
tags: always
gather_facts: false
vars:
ovh_prefix: cloud
tasks:
- name: wait for system to become reachable
wait_for_connection:
when:
- hostvars.localhost.provisioning == true
- name: gather facts for first time
setup:
when:
- hostvars.localhost.provisioning == true
- name: allow ip on ovh nas_ha
ovh_nasha:
name: "{{ item.name }}"
partition: "{{ item.partition }}"
ip: "{{ hostvars[inventory_hostname].ansible_host }}"
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- hostvars.localhost.provisioning == true
delegate_to: localhost
- name: install required packages for nfs
apt:
name: nfs-common
state: latest
update_cache: true
when:
- hostvars.localhost.provisioning == true
- name: mount correct directory
mount:
fstype: nfs
opts: "{{ item.mount_options }}"
dump: "0"
passfalse: "0"
state: mounted
src: "{{ item.mount_ip }}:{{ item.name }}/{{ item.partition }}"
path: "{{ item.mount_endpoint }}"
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
retries: 20
delay: 30
when:
- hostvars.localhost.provisioning == true
- import_playbook: site.yml
when: hostvars.localhost.provisioning == true
- name: Verify mediaserver installation then add server to load balancer
hosts: mediaserver
gather_facts: false
vars:
ovh_prefix: cloud
tasks:
- name: launch script to test mediaserver configuration
command: /root/envsetup/tests/scripts/test_nginx_status.py
register: nginx_vhosts_status
when:
- hostvars.localhost.provisioning == true
- name: allow ip on load balancer (http 443)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167775
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 443
status: active
ssl: true
farmtype: http
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: present
delegate_to: localhost
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- nginx_vhosts_status is succeeded
- hostvars.localhost.provisioning == true
- name: Verify wowza installation then add server to load balancer
hosts: wowza
gather_facts: false
vars:
ovh_prefix: cloud
tasks:
- name: launch script to test wowza configuration
command: /root/envsetup/tests/scripts/test_wowza.py
register: wowza_status
when:
- hostvars.localhost.provisioning == true
- name: allow ip on load balancer (tcp 1935)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167776
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 1935
status: active
ssl: false
farmtype: tcp
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: present
delegate_to: localhost
when:
- groups['wowza'] is defined
- inventory_hostname in groups['wowza']
- wowza_status is succeeded
- inventory_hostname == groups['wowza'][0]
- hostvars.localhost.provisioning == true
- import_playbook: ovh/deprovisioning.yml
- name: OVH CLOUD DEPROVISIONING WOWZA
hosts: wowza
gather_facts: false
tags: [ 'never', 'force-delete' ]
tasks:
- name: remove ip on load balancer (tcp 1935)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167776
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 1935
status: active
ssl: true
farmtype: tcp
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
delegate_to: localhost
when:
- groups['wowza'] is defined
- inventory_hostname in groups['wowza']
- name: OVH CLOUD DEPROVISIONING MEDIASERVER
hosts: mediaserver
gather_facts: false
tags: [ 'never', 'force-delete' ]
tasks:
- name: remove ip on load balancer (http 443)
ovh_loadbalancer_farm_server:
name: "{{ inventory_hostname }}"
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
farmid: 167775
address: "{{ hostvars[inventory_hostname].ansible_host }}"
port: 443
status: active
ssl: true
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
farmtype: http
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
delegate_to: localhost
when:
- groups['mediaserver'] is defined
- inventory_hostname in groups['mediaserver']
- name: remove ip on ovh nas_ha
ovh_nasha:
name: "{{ item.name }}"
partition: "{{ item.partition }}"
ip: "{{ hostvars[inventory_hostname].ansible_host }}"
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
state: absent
loop: "{{ ovh_nas_ha|flatten(levels=1) }}"
when: groups['mediaserver'] is defined and inventory_hostname in groups['mediaserver']
delegate_to: localhost
- name: REFRESH LOAD BALANCER
hosts: localhost
gather_facts: false
tags: always
tasks:
- name: refresh loadbalancer state
ovh_loadbalancer:
loadbalancerid: loadbalancer-ddb43f95d453a8da8271f759eac25b32
endpoint: "{{ ovh_endpoint }}"
application_key: "{{ ovh_application_key }}"
application_secret: "{{ ovh_application_secret }}"
consumer_key: "{{ ovh_consumer_key }}"
refresh: true
delegate_to: localhost
run_once: true
- name: COMPLETE TEST
hosts: mediaserver
tags: tester
tasks:
- name: launcher tester.py for all mediaservers
command: /root/envsetup/tests/tester.py
register: tester_status
when:
- hostvars.localhost.provisioning == true
...