|
@@ -0,0 +1,299 @@
|
|
|
+---
|
|
|
+- name: Implementation parameters
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tasks:
|
|
|
+
|
|
|
+ - name: "Parameters"
|
|
|
+ command: "docker-compose exec {{ item }} bash -c \"cp /opt/{{ item }}/app/config/parameters.yml.dist /opt/{{ item }}/app/config/parameters.yml\""
|
|
|
+ with_items: "{{ lookup('env', 'MODULES_INSTALL').split(',') }}"
|
|
|
+
|
|
|
+# - name: Parameters for base
|
|
|
+# command: "cp {{ playbook_dir }}/kea/conf/kea-dhcp4.conf.dist {{ playbook_dir }}/kea/conf/kea-dhcp4.conf"
|
|
|
+
|
|
|
+- name: Launch docker for mysql database. Tag=start_mysql
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: start_mysql
|
|
|
+ tasks:
|
|
|
+ - name: Docker mysql up
|
|
|
+ command: "docker-compose up -d --force-recreate mysql"
|
|
|
+
|
|
|
+- name: Configure mysql database. Tag=configure_mysql
|
|
|
+ hosts: mysql
|
|
|
+ connection: docker
|
|
|
+ gather_facts: no
|
|
|
+ tags: configure_mysql
|
|
|
+ tasks:
|
|
|
+ - name: Wait for mysql be ready
|
|
|
+ wait_for:
|
|
|
+ host: 127.0.0.1
|
|
|
+ port: 3306
|
|
|
+ state: started
|
|
|
+ delay: 5
|
|
|
+ connect_timeout: 15
|
|
|
+ timeout: 360
|
|
|
+
|
|
|
+ - name: Create database fd_session
|
|
|
+ mysql_db:
|
|
|
+ name: fd_session
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database freeradius
|
|
|
+ mysql_db:
|
|
|
+ name: freeradius
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database grafana
|
|
|
+ mysql_db:
|
|
|
+ name: grafana
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database statsd_db
|
|
|
+ mysql_db:
|
|
|
+ name: statsd_db
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database base
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_base
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database ftth
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_ftth
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database mapas
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_mapas
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database stats
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_stats
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database cablemodem
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_cablemodem
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database Radius
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_radius
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - name: Create database dhcp
|
|
|
+ mysql_db:
|
|
|
+ name: fd3_dhcp
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - copy:
|
|
|
+ src: "{{ playbook_dir }}/freeradius_schema.sql"
|
|
|
+ dest: /tmp/freeradius_schema.sql
|
|
|
+
|
|
|
+ - name: Create freeradius MySQL schema
|
|
|
+ mysql_db:
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: import
|
|
|
+ name: freeradius
|
|
|
+ target: "/tmp/freeradius_schema.sql"
|
|
|
+
|
|
|
+ - name: Create user iksop
|
|
|
+ mysql_user:
|
|
|
+ name: 'iksop'
|
|
|
+ password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ priv: '*.*:ALL'
|
|
|
+ state: present
|
|
|
+
|
|
|
+ - copy:
|
|
|
+ src: "{{ playbook_dir }}/mysql_scripts.sql"
|
|
|
+ dest: /tmp/mysql_scripts.sql
|
|
|
+
|
|
|
+ - name: Replace password mysql_scripts.sql
|
|
|
+ shell: "sed -i -- 's/#password#/{{ lookup('env', 'MYSQL_PASSWORD') }}/g' /tmp/mysql_scripts.sql"
|
|
|
+
|
|
|
+ - name: Run SQL commands against DB to create table
|
|
|
+ mysql_db:
|
|
|
+ login_password: "{{ lookup('env', 'MYSQL_PASSWORD') }}"
|
|
|
+ state: import
|
|
|
+ name: fd_session
|
|
|
+ target: "/tmp/mysql_scripts.sql"
|
|
|
+
|
|
|
+- name: Launch docker for amqp. Tag=start_amqp
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: start_base
|
|
|
+ tasks:
|
|
|
+ - name: Docker base up
|
|
|
+ command: "docker-compose up -d amqp"
|
|
|
+
|
|
|
+ - name: "Wait for amqp. Timeout: 1 minutes"
|
|
|
+ pause:
|
|
|
+ minutes: 1
|
|
|
+
|
|
|
+- name: Launch docker for module base. Tag=start_base
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: start_base
|
|
|
+ tasks:
|
|
|
+ - name: Docker base up
|
|
|
+ command: "docker-compose up -d --force-recreate base"
|
|
|
+# - name: "Wait for autoload_classmap.php. Timeout: 10 minutes"
|
|
|
+# wait_for:
|
|
|
+# path: "{{ playbook_dir }}/base/vendor/composer/autoload_classmap.php"
|
|
|
+# state: present
|
|
|
+# sleep: 30
|
|
|
+# timeout: 600
|
|
|
+
|
|
|
+- name: Configure module base. Tag=configure_base
|
|
|
+ hosts: base
|
|
|
+ connection: docker
|
|
|
+ tags: configure_base
|
|
|
+ vars:
|
|
|
+ uris: ""
|
|
|
+ tasks:
|
|
|
+ - name: Chmod cache
|
|
|
+ command: "docker-compose exec base bin/composer-install.sh"
|
|
|
+# file:
|
|
|
+# path: /opt/base/var/cache/
|
|
|
+# recurse: yes
|
|
|
+# mode: 777
|
|
|
+
|
|
|
+# - name: Clean cache
|
|
|
+# file:
|
|
|
+# state: absent
|
|
|
+# path: "/opt/base/var/cache/"
|
|
|
+
|
|
|
+# - name: Composer install
|
|
|
+# shell: "eval $(ssh-agent) ; ssh-add /opt/keys/bitbucket.id_rsa ; composer install --no-interaction"
|
|
|
+
|
|
|
+ - name: Load redirections
|
|
|
+ set_fact:
|
|
|
+ uris: "{{ uris }} --redirect_uri=https://{{ item }}.{{ lookup('env', 'CLIENT') }}.flowdat.com/login_check"
|
|
|
+ with_items: "{{ lookup('env', 'MODULES_INSTALL').split(',') }}"
|
|
|
+
|
|
|
+ - name: Load redirections app_dev
|
|
|
+ set_fact:
|
|
|
+ uris: "{{ uris }} --redirect_uri=https://{{ item }}.{{ lookup('env', 'CLIENT') }}.flowdat.com/app_dev.php/login_check"
|
|
|
+ with_items: "{{ lookup('env', 'MODULES_INSTALL').split(',') }}"
|
|
|
+
|
|
|
+ - name: Create oauth client
|
|
|
+ shell: "docker-compose exec base bin/console oauth:client:createRedirects {{ uris }} > oauth.data.log"
|
|
|
+ args:
|
|
|
+ executable: /bin/bash
|
|
|
+
|
|
|
+
|
|
|
+ - name: Update variable usersetting
|
|
|
+ set_fact:
|
|
|
+ usersetting: "{{ lookup('file', '{{ playbook_dir }}/user_system.json')|from_json }}"
|
|
|
+
|
|
|
+ - name: Create user iksop
|
|
|
+ shell: "bin/console user:create {{ item.user }} {{ item.email }} {{ item.password }} {{ item.tenancy }} {{ item.extra }}"
|
|
|
+ args:
|
|
|
+ executable: /bin/bash
|
|
|
+ with_items: "{{ usersetting.users }}"
|
|
|
+
|
|
|
+ - name: Promote users to ADMIN
|
|
|
+ shell: " bin/console fos:user:promote admin ROLE_ADMIN"
|
|
|
+ args:
|
|
|
+ executable: /bin/bash
|
|
|
+
|
|
|
+ - name: Promote users to IKSOP
|
|
|
+ shell: " bin/console fos:user:promote iksop ROLE_SUPER_ADMIN"
|
|
|
+ args:
|
|
|
+ executable: /bin/bash
|
|
|
+
|
|
|
+ - name: Copy file oauth.data.log to host
|
|
|
+ fetch:
|
|
|
+ src: /opt/base/oauth.data.log
|
|
|
+ dest: /tmp/
|
|
|
+ flat: yes
|
|
|
+
|
|
|
+- name: Configure oauth data for other container. Tag=configure_oauth
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: configure_oauth
|
|
|
+ tasks:
|
|
|
+ - name: "Add outh client id"
|
|
|
+ shell: "cp /tmp/oauth.data.log {{ item }}.oauth.env"
|
|
|
+ args:
|
|
|
+ executable: /bin/bash
|
|
|
+ with_items: "{{ lookup('env', 'MODULES_INSTALL').split(',') }}"
|
|
|
+
|
|
|
+- name: "Launch docker's. Tag=launch_dockers"
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: launch_dockers
|
|
|
+ tasks:
|
|
|
+ - name: "Dockers up"
|
|
|
+ command: "docker-compose up -d --force-recreate {{ item }}"
|
|
|
+ with_items: "{{ lookup('env', 'MODULES_INSTALL').split(',') }}"
|
|
|
+
|
|
|
+- name: "Run modules. Timeout: 10 minutes. Tag=run_modules"
|
|
|
+ hosts: "{{ lookup('env', 'MODULES_INSTALL') }}"
|
|
|
+ connection: docker
|
|
|
+ tags: run_modules
|
|
|
+ tasks:
|
|
|
+ - name: "Docker's schema update"
|
|
|
+ wait_for:
|
|
|
+ path: "./vendor/composer/autoload_classmap.php"
|
|
|
+ #path: "/opt/{{ play_hosts }}/vendor/composer/autoload_classmap.php"
|
|
|
+ state: present
|
|
|
+ sleep: 30
|
|
|
+ timeout: 600
|
|
|
+
|
|
|
+- name: "Run doctrine:schema:update for modules. Tag=dsu_modules"
|
|
|
+ hosts: "{{ lookup('env', 'MODULES_INSTALL') }}"
|
|
|
+ connection: docker
|
|
|
+ tags: dsu_modules
|
|
|
+ tasks:
|
|
|
+ - name: "Doctrine schema update"
|
|
|
+ shell: "bin/console doctrine:schema:update --force"
|
|
|
+
|
|
|
+ # Generate self-signed cert files for Nginx (intended for development, please replace with proper ones)
|
|
|
+- name: "Generate self-signed cert files for Nginx"
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: self_cert
|
|
|
+ tasks:
|
|
|
+ - name: "Make sure certs dir exists"
|
|
|
+ shell: "mkdir -p /opt/flowdat/certs"
|
|
|
+ - name: "Generate self-signed certs for Nginx"
|
|
|
+ shell: "openssl req -x509 -nodes -batch -days 365 -newkey rsa:2048 -keyout /opt/flowdat/certs/privkey.pem -out /opt/flowdat/certs/fullchain.pem -subj '/C=AR/ST=Santa Fe/L=Rosario/O=Interlink/OU=R&D/CN=*.{{ lookup('env','CLIENT') }}.{{ lookup('env','DOMAIN') }}/emailAddress=idev@interlink.com.ar'"
|
|
|
+
|
|
|
+- name: "Copy certs dir to extra/nginx/certs"
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: copy_cert
|
|
|
+ tasks:
|
|
|
+ - name: "Copy certs directory"
|
|
|
+ shell: "cp -nr /opt/flowdat/certs/* /opt/flowdat/extra/nginx/certs"
|
|
|
+
|
|
|
+
|
|
|
+- name: "Run up all. Tag=up_all"
|
|
|
+ hosts: 127.0.0.1
|
|
|
+ connection: local
|
|
|
+ tags: up_all
|
|
|
+ tasks:
|
|
|
+ - name: Docker up all modules
|
|
|
+ command: "docker-compose up -d "
|
|
|
+
|
|
|
+ - name: Docker stop nginx
|
|
|
+ command: "docker-compose stop nginx "
|
|
|
+
|
|
|
+ - name: Docker up nginx
|
|
|
+ command: "docker-compose up -d nginx"
|