Skip to content
Snippets Groups Projects
Commit 857b2ef3 authored by Emmanuel Cohen's avatar Emmanuel Cohen
Browse files

Merge branch 'master' into 't32350-improve_benchmark_role'

# Conflicts:
#   playbooks/bench.yml
parents 78bd5440 f59031f8
No related branches found
No related tags found
No related merge requests found
Showing
with 141 additions and 94 deletions
......@@ -16,7 +16,7 @@ docker:build:
tags:
- docker
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_NAME == "master"'
- if: '$CI_PIPELINE_SOURCE == "push"'
changes:
- .devcontainer/Dockerfile
- requirements.dev.txt
......@@ -50,7 +50,7 @@ lint:
rules:
- if: '$CI_PIPELINE_SOURCE == "web"'
- if: '$CI_PIPELINE_SOURCE == "merge_requests"'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_NAME == "master"'
- if: '$CI_PIPELINE_SOURCE == "push"'
changes:
- "*.yml"
- "*.py"
......@@ -65,7 +65,7 @@ test:
rules:
- if: '$CI_PIPELINE_SOURCE == "web"'
- if: '$CI_PIPELINE_SOURCE == "merge_requests"'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_NAME == "master"'
- if: '$CI_PIPELINE_SOURCE == "push"'
changes:
- inventories/**/*
- library/**/*
......
......@@ -44,7 +44,7 @@ install: venv
install-dev: install
$(PIP_BIN) install -r requirements.dev.txt
[ -d .git/hooks ] || mkdir .git/hooks
ln -sfv .githooks/pre-commit .git/hooks/
ln -sfv .githooks/pre-commit .git/hooks/ || echo "Failed to create pre-commit link"
.PHONY: lint
## lint: Run linters on the project
......@@ -76,21 +76,6 @@ endif
$(ANSIBLE_BIN) -i $(i) -l $(l) -m ping all
$(ANSIBLE_PLAYBOOK_BIN) -i $(i) site.yml -e conf_update=true -l $(l) -t $(t)
.PHONY: deploy-ha
## deploy-ha: Run deployment playbooks : i=<inventory-path>, l=<host-or-group>, t=<tag>
deploy-ha:
ifndef i
$(error i is undefined)
endif
ifndef l
$(eval l=all)
endif
ifndef t
$(eval t=all)
endif
$(ANSIBLE_BIN) -i $(i) -l $(l) -m ping all
$(ANSIBLE_PLAYBOOK_BIN) -i $(i) site-ha.yml -e conf_update=true -l $(l) -t $(t)
.PHONY: image-validate
## image-validate: Check that Packer image is valid : build=<path-to-packer-file>
image-validate:
......
......@@ -60,7 +60,7 @@ If a computer response is `UNREACHABLE`, check if he is powered on and accessibl
You can deploy the environment with the following command :
```sh
make deploy-ha i=inventories/<client-ha>
make deploy i=inventories/<client-ha>
```
# Known error (patched in the next skyreach release)
......@@ -79,6 +79,6 @@ It may be linked to a current skyreach bug (a patch is waiting to be deployed at
You can edit the skyreach configuration manual with the following command :
```sh
ansible -i inventories/<client-ha> -m shell -a "sed -i \"s/'PORT': .*/'PORT': '54321'/g\" /home/skyreach/htdocs/skyreach_site/settings_override.py" ms1
ansible -i inventories/<client-ha> -m shell -a "sed -i \"s/'PORT': .*/'PORT': '54321'/g\" /home/skyreach/skyreach_data/private/settings_override.py" ms1
```
and then deploy again as described in the previous section
......@@ -100,9 +100,6 @@ DB_PG_ROOT_PWD=''
CELERITY_SIGNING_KEY='test'
# ⚠ CELERITY_SERVER is used in ubicast-mediaserver package when adding an instance
CELERITY_SERVER='127.0.0.1'
# ⚠ CELERITY_WORKER_IP is used in ubicast-mediaserver package when adding an instance
# worker IP adresses, use commas to separate values
CELERITY_WORKER_IP='127.0.0.1'
# -- Network configuration --
# applied with client configuration step
......
......@@ -6,7 +6,7 @@ export APT="DEBIAN_FRONTEND=noninteractive apt-get -y -q -o Dpkg::Options::=--fo
sudo $APT purge unattended-upgrades
sudo $APT update
sudo $APT install apt-utils
sudo $APT install apt-utils curl
sudo $APT dist-upgrade
exit 0
......@@ -11,6 +11,9 @@
template:
src: celerity-config.py.j2
dest: /etc/celerity/config.py
mode: 0644
owner: celerity
group: celerity
- name: ensure celerity server is running
service:
......
......@@ -23,6 +23,7 @@
changed_when: ceph_check_image.stdout != ceph_image_name
command:
cmd: rbd -n client.{{ ceph_login }} list {{ ceph_pool_name }}
ignore_errors: yes
- name: create rbd image
when:
......
......@@ -6,6 +6,21 @@
install_recommends: false
name: "{{ server_packages }}"
- name: fetch ssh public key
register: root_ssh_pubkey
slurp:
path: /root/.ssh/id_ed25519.pub
- name: register ssh public key as an ansible fact
set_fact:
pubkey: "{{ root_ssh_pubkey['content'] | b64decode }}"
- name: share ssh public key between cluster members
loop: "{{ groups['mediaserver'] }}"
authorized_key:
user: root
key: "{{ hostvars[item]['pubkey'] }}"
- name: resolve domain name to localhost
when: not in_docker
notify: restart nginx
......@@ -15,7 +30,38 @@
line: '127.0.1.1 {{ item.ms_server_name }}'
backup: true
- name: synchronize configuration
when: groups['mediaserver'] | length > 1
loop:
- /etc/passwd
- /etc/shadow
- /etc/group
synchronize:
src: "{{ item }}"
dest: "{{ item }}"
mode: push
copy_links: yes
set_remote_user: no
delegate_to: "{{ groups['mediaserver'][0] }}"
- name: create instances
when: inventory_hostname == groups['mediaserver'][0]
loop: "{{ server_instances }}"
environment:
MS_ID: "{{ item.ms_id }}"
MS_SERVER_NAME: "{{ item.ms_server_name }}"
MS_API_KEY: "{{ item.ms_api_key }}"
CM_SERVER_NAME: "{{ item.cm_server_name }}"
MS_SUPERUSER_PWD: "{{ item.ms_superuser_pwd }}"
MS_ADMIN_PWD: "{{ item.ms_admin_pwd }}"
command:
cmd: msinstaller.py {{ item.name }} --no-input
creates: /etc/nginx/sites-available/mediaserver-{{ item.name }}.conf
- name: create instances for secondary servers
when:
- groups['mediaserver'] | length > 1
- inventory_hostname != groups['mediaserver'][0]
loop: "{{ server_instances }}"
environment:
MS_ID: "{{ item.ms_id }}"
......@@ -73,6 +119,7 @@
regexp: '^RTMP_PWD =.*$'
line: "RTMP_PWD = '{{ server_wowza_live_pwd }}'"
validate: python3 -m py_compile %s
mode: 0644
- name: ensure mediaserver is running
service:
......@@ -80,29 +127,6 @@
enabled: true
state: started
# SYNCHRONIZE
- name: sync all mediaservers
when: groups['mediaserver'] | length > 1
block:
- name: save config of first mediaserver
when: inventory_hostname == groups['mediaserver'][0]
register: server_primary_config
loop:
- /etc/passwd
- /etc/shadow
- /etc/group
slurp:
path: "{{ item }}"
- name: deploy saved config
when: inventory_hostname != groups['mediaserver'][0]
loop: "{{ hostvars[groups['mediaserver'][0]].c.results }}"
copy:
dest: "{{ item.source }}"
content: "{{ item.content | b64decode }}"
# FAIL2BAN
- name: fail2ban
......
......@@ -11,6 +11,9 @@
template:
src: celerity-config.py.j2
dest: /etc/celerity/config.py
mode: 0644
owner: celerity
group: celerity
- name: ensure celerity worker is running
service:
......
......@@ -9,7 +9,7 @@
- name: configure email sender address
notify: restart nginx
lineinfile:
path: /home/skyreach/htdocs/skyreach_site/settings_override.py
path: /home/skyreach/skyreach_data/private/settings_override.py
regexp: '^#? ?DEFAULT_FROM_EMAIL.*'
line: "DEFAULT_FROM_EMAIL = '{{ manager_email_sender }}'"
backup: true
......@@ -26,7 +26,7 @@
become: true
become_user: skyreach
environment:
PYTHONPATH: "/home/skyreach/htdocs/skyreach_site:/home/skyreach/htdocs:${PYTHONPATH}"
PYTHONPATH: "/home/skyreach/skyreach_site:/home/skyreach:${PYTHONPATH}"
DJANGO_SETTINGS_MODULE: settings
script:
cmd: files/set_site_url.py {{ manager_hostname }}
......
......@@ -26,7 +26,7 @@
when: inventory_hostname == play_hosts[0]
filesystem:
fstype: ocfs2
opts: -T mail
opts: -T mail -Jblock64
dev: /dev/rbd0
- name: mount mapped device
......
......@@ -28,4 +28,20 @@ repmgr_conninfo: host={{ ansible_default_ipv4.address }} dbname={{ repmgr_db }}
repmgr_repha_port: 8543
pg_firewall_enabled: true
pg_ferm_rules_filename: postgres_ha
pg_ferm_input_rules:
- proto:
- tcp
dport:
- 5432
- 8543
pg_ferm_output_rules:
- proto:
- tcp
dport:
- 54321
- 54322
pg_ferm_global_settings:
...
......@@ -325,4 +325,14 @@
state: started
enabled: true
- name: firewall
when: pg_firewall_enabled
vars:
ferm_rules_filename: "{{ pg_ferm_rules_filename }}"
ferm_input_rules: "{{ pg_ferm_input_rules }}"
ferm_output_rules: "{{ pg_ferm_output_rules }}"
ferm_global_settings: "{{ pg_ferm_global_settings }}"
include_role:
name: ferm-configure
...
......@@ -3,7 +3,7 @@
apt:
force_apt_get: true
update_cache: true
- name: update locale
command: locale-gen
......@@ -12,6 +12,11 @@
name: cron
state: restarted
- name: restart sshd
service:
name: sshd
state: restarted
- name: update cache
apt:
force_apt_get: true
......
......@@ -20,6 +20,20 @@
APT::Periodic::Update-Package-Lists "1";
APT::Periodic::Unattended-Upgrade "1";
- name: enable root login via ssh with key
replace:
dest: /etc/ssh/sshd_config
regexp: '^#PermitRootLogin (yes|without-password|prohibit-password)'
replace: "PermitRootLogin without-password"
notify: restart sshd
- name: remove disabled root login
replace:
dest: /root/.ssh/authorized_keys
regexp: "^no-port-forwarding,(.+) ssh-"
replace: "ssh-"
ignore_errors: yes
# FIREWALL
- name: firewall
......
......@@ -223,7 +223,7 @@ class SetAppDomain():
log('Assuming that the new url is using HTTPS: "%s"' % new_url)
cmds = [
# set site url in site settings
'echo \'from skyreach_site.base.models import SiteSettings; ss = SiteSettings.get_singleton(); ss.url = "%s"; ss.save(); print("Site settings saved.")\' | su skyreach -c "python3 /home/skyreach/htdocs/skyreach_site/manage.py shell -i python"' % new_url,
'echo \'from skyreach_site.base.models import SiteSettings; ss = SiteSettings.get_singleton(); ss.url = "%s"; ss.save(); print("Site settings saved.")\' | su skyreach -c "python3 /home/skyreach/skyreach_site/manage.py shell -i python"' % new_url,
]
utils.run_commands(cmds)
except Exception as e:
......
#!/usr/bin/env ansible-playbook
---
- name: PYTHON
hosts: all
gather_facts: false
tasks:
- name: ensure python3 is installed
register: python_install
changed_when: "'es_pyinstall' in python_install.stdout_lines"
raw: command -v python3 || echo es_pyinstall && apt update && apt install -y python3-minimal python3-apt
- import_playbook: playbooks/postgres-ha.yml
tags: postgres
- import_playbook: playbooks/msmonitor.yml
tags: monitor
- import_playbook: playbooks/mirismanager.yml
tags: manager
- import_playbook: playbooks/wowza.yml
tags: wowza
- import_playbook: playbooks/celerity.yml
tags: celerity
- import_playbook: playbooks/mediaworker.yml
tags: worker
- import_playbook: playbooks/mediaserver.yml
tags: server
- import_playbook: playbooks/mediavault.yml
tags: vault
- import_playbook: playbooks/mediaimport.yml
tags: import
- import_playbook: playbooks/netcapture.yml
tags: netcapture
...
......@@ -9,8 +9,8 @@
register: python_install
changed_when: "'es_pyinstall' in python_install.stdout_lines"
raw: command -v python3 || echo es_pyinstall && apt update && apt install -y python3-minimal python3-apt
- import_playbook: playbooks/postgres.yml
tags: always
- import_playbook: "playbooks/{{ 'postgres-ha' if groups['postgres']|d('') | length > 1 else 'postgres' }}.yml"
tags: postgres
- import_playbook: playbooks/msmonitor.yml
tags: monitor
......
......@@ -96,6 +96,30 @@ def run_tests(ip):
return False
def get_remote_workers_ips():
'''
Returns the list of IP addresses of all non local celerity workers.
'''
ips = set()
# get worker IPs
try:
from celerity_utils import api
success, response = api.list_workers('celerity_config_updater')
if not success:
raise Exception(str(response))
for worker in response['workers']:
if worker['remote_ip']:
ips.add(worker['remote_ip'])
except Exception as e:
u.error('Failed to get workers list using celerity API: %s' % e)
# remove local IP
p = subprocess.run(['ip', 'addr'], stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8')
local_ips = re.findall(r'inet ([\d\.]+)/', p.stdout)
u.log('Local IP addresses are: %s.' % local_ips)
ips.difference(set(local_ips))
return ips
def main():
try:
import mediaserver
......@@ -107,11 +131,10 @@ def main():
all_ok = True
tested = False
mediaserver_ip = u.get_conf('NETWORK_IP')
worker_ips = u.get_conf('CELERITY_WORKER_IP')
for worker_ip in worker_ips.split(','):
worker_ips = get_remote_workers_ips()
for worker_ip in worker_ips:
worker_ip = worker_ip.strip()
if worker_ip and not worker_ip.startswith('127.0.') and worker_ip != mediaserver_ip:
if worker_ip and not worker_ip.startswith('127.'):
tested = True
if not check_ssh(worker_ip):
all_ok = False
......@@ -123,7 +146,7 @@ def main():
# if not run_tests(worker_ip):
# all_ok = False
if not tested:
u.log('Celerity IP not set or running locally, skipping test.')
u.log('No remote worker found, skipping test.')
return 2
if not all_ok:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment