Hope y'all are enjoying the holidays. I am attempting an automated installation of wordpress on my Linux VM using ansible. To that end, I have written this ansible piece of code that tries to mimic the official ubuntu guide.
Here is the code:
- name: "Installing wordpress dependencies"
hosts: all
become: True
gather_facts: True
vars:
get_installer: 'curl -sS https://getcomposer.org/installer -o /tmp/composer-setup.php || /bin/true'
get_signature: 'curl -sS https://composer.github.io/installer.sig'
tasks:
- name: "Update repository"
apt:
update_cache: "yes"
- name: "Installing requirements"
apt:
name:
- "curl"
- "php"
- "php-cli"
- "gnupg"
- "unzip"
- "mysql-server"
- "php-fpm"
- "php-mysql"
- "apache2"
- "ghostscript"
- "libapache2-mod-php"
- "php-bcmath"
- "php-curl"
- "php-imagick"
- "php-intl"
- "php-json"
- "php-mbstring"
- "php-xml"
- "php-zip"
state: present
- name: Populate service facts
ansible.builtin.service_facts:
- name: Print service facts
ansible.builtin.debug:
var: ansible_facts.services
- name: "stopping nginx if running"
service:
name: nginx
state: stopped
when: "'nginx' in ansible_facts.services"
- name: "remove nginx if installed"
apt:
name:
- "nginx"
state: absent
- name: stop Mysql
service:
name: mysql
state: stopped
when: "'mysql' in ansible_facts.services"
- name: stop apache2
service:
name: apache2
state: stopped
when: "'apache2' in ansible_facts.services"
- name: Installing wordpress through source
hosts: all
become: True
gather_facts: False
vars:
wprootdir: "/srv/www/wordpress"
tasks:
- name: checking if wp src dir exists
stat:
path: "{{ wprootdir }}"
register: dir_details
- name: delete existing wordpress source files
become_user: www-data
no_log: True
file:
#path: "{{ item.path }}"
#recurse: True
path: "{{ wprootdir }}"
state: absent
#with_items: "{{ path_list.files }}"
- name: creating /var/www for wordpress source
file:
#path: "'{{ wp-root-dir }}' + 'wordpress'"
path: "/srv/www/wordpress"
recurse: yes
state: directory
owner: www-data
mode: '0755'
- name: downloading and extracting wordpress source
shell:
cmd: "curl https://wordpress.org/latest.tar.gz | sudo -u www-data tar zx -C /srv/www"
register: status
- fail:
msg: "Unable to download or extract wordpress source"
when: (status.rc != 0)
- name: Configuring apache for wordpress
hosts: all
become: True
gather_facts: False
vars:
wprootdir: "/srv/www/wordpress"
wpconffile: "/etc/apache2/sites-available/wordpress.conf"
tasks:
- name: deleting the file if it exists
file:
path: "{{ wpconffile }}"
state: absent
- name: creating wordpress conf file
file:
path: "{{ wpconffile }}"
state: touch
owner: www-data
- name: populating wordpress conf file
template:
src: apache2.j2
dest: "{{ wpconffile }}"
- name: enabling the site
shell:
cmd: "a2ensite wordpress"
- name: enable URL rewriting
shell:
cmd: "a2enmod rewrite"
- name: disable default "it works" site
shell:
cmd: "a2dissite 000-default"
- name: restart apache2
service:
name: apache2
state: reloaded
- name: Configuring database
hosts: all
become: True
gather_facts: True
#gather_facts: yes
vars:
mysql_port: 3306
mysql_socket: /var/run/mysqld/mysqld.sock
mysql_superuser: root
mysql_superuser_home: "{% if mysql_superuser == 'root' %}/root{% else %}/home/{{ mysql_superuser }}{% endif %}"
mysql_superuser_password: SuperUserPwd
mysql_wordpress_password: WordpressPwd
http_port: 80
tasks:
- name: Installing PyMySql through pip
pip:
name: PyMySql
state: present
- name: ensure mysql is running and starts on boot
service:
name: mysql
state: started
enabled: True
- name: Removes anonymous user account for localhost
community.mysql.mysql_user:
name: ''
state: absent
login_user: root
login_password: ""
login_unix_socket: "{{ mysql_socket }}"
when: ansible_local.mysqlinfo is undefined
- name: adding a password for root user
mysql_user:
# Update the superuser to have all grants and a password
name: "{{ mysql_superuser }}"
host: localhost
password: "{{ mysql_superuser_password }}"
priv: "*.*:ALL,GRANT"
# Login *as root* to perform this change, even though you might
# be altering the root user itself
login_user: root
login_password: ""
login_port: "{{ mysql_port }}"
login_host: localhost
login_unix_socket: "{{ mysql_socket }}"
# As a good measure,have ansible check whether an implicit login
# is possible first
check_implicit_admin: yes
when: ansible_local.mysqlinfo is undefined
- name: "Create custom fact directory"
file:
path: "/etc/ansible/facts.d"
state: "directory"
recurse: yes
when: ansible_local.mysqlinfo is undefined
- name: "record mysql info in custom fact"
template:
src: mysqlinfo.j2
dest: /etc/ansible/facts.d/mysqlinfo.fact
mode: 0644
when: ansible_local.mysqlinfo is undefined
- name: "re-run setup to use custom facts"
setup:
filter: ansible_local
when: ansible_local.mysqlinfo is undefined
- debug:
msg:
- "mysqlinfo is {{ ansible_local.mysqlinfo }}"
when: ansible_local.mysqlinfo is defined
#- name: Create system-wide mysql configuration file
#template:
#src: mysql_sys.cnf.j2
#dest: /etc/my.cnf
#- name: Create mysql configuration file for `{{ mysql_superuser }}`
#template:
#src: mysql_superuser.cnf.j2
#dest: "{{ mysql_superuser_home }}/.my.cnf"
- name: create database wordpress
mysql_db:
db: wordpress
state: present
login_user: "{{ ansible_local.mysqlinfo.mysql_superuser }}"
login_password: "{{ ansible_local.mysqlinfo.mysql_superuser_password }}"
login_unix_socket: "{{ mysql_socket }}"
when: ansible_local.mysqlinfo is defined
- name: Create database user 'wordpress' with all database privileges
community.mysql.mysql_user:
name: wordpress
password: "{{ mysql_wordpress_password }}"
login_user: "{{ ansible_local.mysqlinfo.mysql_superuser }}"
login_password: "{{ ansible_local.mysqlinfo.mysql_superuser_password }}"
priv: '*.*:ALL'
state: present
when: ansible_local.mysqlinfo is defined
- name: Flush privileges
mysql_query:
login_db: wordpress
login_user: "{{ ansible_local.mysqlinfo.mysql_superuser }}"
login_password: "{{ ansible_local.mysqlinfo.mysql_superuser_password }}"
login_unix_socket: "{{ mysql_socket }}"
query: FLUSH PRIVILEGES
# UFW Configuration
- name: "UFW - Allow HTTP on port {{ http_port }}"
ufw:
rule: allow
port: "{{ http_port }}"
proto: tcp
notify:
- Restart Mysql
tags: [ system ]
handlers:
- name: Restart Mysql
service:
name: mysql
state: restarted
- name: Restart Apache2
service:
name: apache2
state: restarted
- name: Configuring wordpress to connect to the database
hosts: all
gather_facts: False
become: true
vars:
wpconfigfile: "/srv/www/wordpress/wp-config.php"
tasks:
- name: copy sample config to wp-config.php
#become_user: www-data
copy:
remote_src: yes
src: /srv/www/wordpress/wp-config-sample.php
dest: "{{ wpconfigfile }}"
owner: www-data
- name: "re-run setup to use custom facts"
setup:
filter: ansible_local
- name: set database credentials in the config file
become: false
#become_user: www-data
#become_method: "su"
# multiple commands are run like this whereas with
# single command one can use a cmd paramater
# since this is technically *not* a list passed to /bin/sh
# we do not need a list here. Instead it is a series of
# commands being passed to /bin/sh
#shell: |
# apparently, passing this list directly doesn't seem to work
# what works is this loop
command: "{{ item }}"
with_items:
- "sudo -u www-data sed -i s/database_name_here/wordpress/ {{ wpconfigfile }}"
- "sudo -u www-data sed -i s/username_here/wordpress/ {{ wpconfigfile }}"
- "sudo -u www-data sed -i s/password_here/{{ ansible_local.mysqlinfo.mysql_wordpress_password }}/ {{ wpconfigfile }}"
- name: get random secret keys
uri:
url: https://api.wordpress.org/secret-key/1.1/salt/
return_content: yes
body_format: json
register: wordpress_keys
- debug:
var: wordpress_keys.content
- name: delete existing bak file
file:
path: "{{ wpconfigfile }}.bak"
state: absent
- name: run script to remove key placeholders
become_user: www-data
script:
chdir: /srv/www/wordpress/
cmd: replacelines.py
executable: /usr/bin/python3
environment: /srv/www/wordpress/
- name: update config file
become_user: www-data
copy:
remote_src: yes
src: "{{ wpconfigfile }}.bak"
dest: "{{ wpconfigfile }}"
- blockinfile:
path: "{{ wpconfigfile }}"
marker: // {mark} ANSIBLE MANAGED BLOCK
# having this separator here was giving me issues
#block: |
block:
"{{ wordpress_keys.content }}"
handlers:
- name: Restart Mysql
service:
name: mysql
state: restarted
- name: Restart Apache2
service:
name: apache2
state: restarted
Associated jinja2 template files are here:
Apache2 template:
<VirtualHost *:80>
Servername {{ ansible_hostname }}
DocumentRoot "{{ wprootdir }}"
<Directory "{{ wprootdir }}">
Options FollowSymLinks
AllowOverride Limit Options FileInfo
DirectoryIndex index.php
Require all granted
</Directory>
<Directory "{{ wprootdir }}/wp-content">
Options FollowSymLinks
Require all granted
</Directory>
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
</VirtualHost>
mysqlinfo template
{
"mysql_port": "{{ mysql_port }}",
"mysql_socket": "{{ mysql_socket }}",
"mysql_superuser": "{{ mysql_superuser }}",
"mysql_superuser_password": "{{ mysql_superuser_password }}",
"mysql_wordpress_password": "{{ mysql_wordpress_password }}"
}
replacelines.py script:
import re
with open("wp-config.php", "r") as wpconfig, open("wp-config.php.bak", "w") as wpconfigbak:
for line in wpconfig:
found = re.search(r'AUTH_KEY|SECURE_AUTH_KEY|LOGGED_IN_KEY|NONCE_KEY|AUTH_SALT|SECURE_AUTH_SALT|LOGGED_IN_SALT|NONCE_SALT', line.strip());
if (not found):
wpconfigbak.write(line)
else:
continue
inventory file:
[local]
localhost ansible_connection=local
With this playbook I am able to see the wordpress landing page when I open 'localhost:80/' on my Linux machine. However I am unable to get to the wordpress dashboard. I run the playbook like so: ansible-playbook -i inventory SetupWordpress.yaml
To save time, you may use my github repo:
git clone -b WIP git#github.com:redbilledpanda/DevOpsScripts.git
cd DevOpsScripts && ansible-playbook -i inventory SetupWordpress.yaml
After the playbook completes, I go to http://localhost:80 and I am presented with the installer:
I fill in the details:
Apparently, it succeeds:
When I try logging in, I don't see the dashboard. Instead, I never go past the login screen (it doesn't say incorrect credentials or anything though):
I am at a loss as to what am I doing wrong. Keen to hear from you folks.
UPDATE1: If I skip the part where I generate the wordpress 'salts'/keys it works. I can see the dashboard etc. With these salts however, it just won't get to the wordpress admin dashboard.
Using a minimal sample config file wpconfig.file
<?php
/**
* The base configuration for WordPress
* ...
* Authentication unique keys and salts.
*
* Change these to different unique phrases! You can generate these using
* the {#link https://api.wordpress.org/secret-key/1.1/salt/ WordPress.org secret-key service}.
*
* You can change these at any point in time to invalidate all existing cookies.
* This will force all users to have to log in again.
* ...
*/
and a minimal example playbook
---
- hosts: localhost
become: false
gather_facts: false
tasks:
- name: Get random secret keys
uri:
url: https://api.wordpress.org/secret-key/1.1/salt/
return_content: yes
body_format: json
register: wordpress_keys
- name: Show keys
debug:
var: wordpress_keys.content
- name: Write keys to config
blockinfile:
path: wpconfig.file
marker: // {mark} ANSIBLE MANAGED BLOCK
block:
"{{ wordpress_keys.content }}"
it results into the expected and probably correct output.
TASK [Show keys] ************************************************************************************************
ok: [localhost] =>
wordpress_keys.content: |-
define('AUTH_KEY', '...');
define('SECURE_AUTH_KEY', '...');
define('LOGGED_IN_KEY', '...');
define('NONCE_KEY', '...');
define('AUTH_SALT', '...');
define('SECURE_AUTH_SALT', '...');
define('LOGGED_IN_SALT', '...');
define('NONCE_SALT', '...');
<?php
/**
* The base configuration for WordPress
* ...
* Authentication unique keys and salts.
*
* Change these to different unique phrases! You can generate these using
* the {#link https://api.wordpress.org/secret-key/1.1/salt/ WordPress.org secret-key service}.
*
* You can change these at any point in time to invalidate all existing cookies.
* This will force all users to have to log in again.
* ...
*/
// BEGIN ANSIBLE MANAGED BLOCK
define('AUTH_KEY', '...');
define('SECURE_AUTH_KEY', '...');
define('LOGGED_IN_KEY', '...');
define('NONCE_KEY', '...');
define('AUTH_SALT', '...');
define('SECURE_AUTH_SALT', '...');
define('LOGGED_IN_SALT', '...');
define('NONCE_SALT', '...');
// END ANSIBLE MANAGED BLOCK
Summary
Your current question and description seems not to be focused on the necessary part but on everything not so related around
On Ansible tasks I am not able to (re-)produce an issue
The part deals with configuration for a 3rd party web service or PHP only
According this it seems not to be related to Ansible at all
The problem domain seems to be Wordpress and PHP setup and configuration only, namely the config file
For further troubleshooting you may try to template module – Template a file out to a target host, the config file including keys generated define('AUTH_KEY', '{{ lookup('password', '/dev/null chars=ascii_letters length=64') }}');
Check with Browser in Incognito Mode because of invalidated cookies
Therefore it is also not about programming at all
An other site on Stack like serverfault.com, superuser.com, devops.staexchange.com or wordpress.stackexchange.com might fit better for your question
-regenerate the security keys
-Make sure the keys are entered correctly in the wp-config file of your WordPress installation.
Github actions throwing error:
Run composer install -q --no-ansi --no-interaction --no-scripts
--no-progress --prefer-dist composer install -q --no-ansi --no-interaction --no-scripts --no-progress --prefer-dist shell: /usr/bin/bash -e {0} Error: The operation was canceled.
Please see configuration and image below:
Laravel.yml file
name: Laravel
on:
push:
branches:
- master
- develop
- features/**
pull_request:
branches:
- master
- develop
jobs:
laravel-tests:
runs-on: ubuntu-latest
# Service container Postgresql postgresql
services:
# Label used to access the service container
postgres:
# Docker Hub image (also with version)
image: postgres:latest
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: db_test_laravel
## map the "external" 55432 port with the "internal" 5432
ports:
- 55432:5432
# Set health checks to wait until postgresql database has started (it takes some seconds to start)
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
strategy:
matrix:
operating-system: [ubuntu-latest]
php-versions: [ '8.0','7.4' ]
dependency-stability: [ prefer-stable ]
name: P${{ matrix.php-versions }} - L${{ matrix.laravel }} - ${{ matrix.dependency-stability }} - ${{ matrix.operating-system}}
steps:
- uses: actions/checkout#v2
- name: Setup Node.js
uses: actions/setup-node#v1
with:
node-version: '15.x'
- name: Cache node_modules directory
uses: actions/cache#v2
id: node_modules-cache
with:
path: node_modules
key: ${{ runner.OS }}-build-${{ hashFiles('**/package.json') }}-${{ hashFiles('**/package-lock.json') }}
- name: Install NPM packages
if: steps.node_modules-cache.outputs.cache-hit != 'true'
run: npm ci
- name: Build frontend
run: npm run development
- name: Install PHP versions
uses: shivammathur/setup-php#v2
with:
php-version: ${{ matrix.php-versions }}
- name: Get Composer Cache Directory 2
id: composer-cache
run: |
echo "::set-output name=dir::$(composer config cache-files-dir)"
- uses: actions/cache#v2
id: actions-cache
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: |
${{ runner.os }}-composer-
- name: Cache PHP dependencies
uses: actions/cache#v2
id: vendor-cache
with:
path: vendor
key: ${{ runner.OS }}-build-${{ hashFiles('**/composer.lock') }}
- name: Copy .env
run: php -r "file_exists('.env') || copy('.env.example', '.env');"
- name: Install Dependencies
if: steps.vendor-cache.outputs.cache-hit != 'true'
run: composer install -q --no-ansi --no-interaction --no-scripts --no-progress --prefer-dist
- name: Generate key
run: php artisan key:generate
- name: Directory Permissions
run: chmod -R 777 storage bootstrap/cache
- name: Run Migrations
# Set environment
env:
DB_CONNECTION: pgsql
DB_DATABASE: db_test_laravel
DB_PORT: 55432
DB_USERNAME: postgres
DB_PASSWORD: postgres
run: php artisan migrate
- name: Show dir
run: pwd
- name: PHP Version
run: php --version
# Code quality
- name: Execute tests (Unit and Feature tests) via PHPUnit
# Set environment
env:
DB_CONNECTION: pgsql
DB_DATABASE: db_test_laravel
DB_PORT: 55432
DB_USERNAME: postgres
DB_PASSWORD: postgres
run: vendor/bin/phpunit --testdox
Action summary image
Composer.json
"require": {
"php": "^7.3|^8.0",
Fixed this by doing the following:
Changed the version to php-versions: [ '8.0' ]
Github actions run successfully.
I am trying to install Airflow(1.10.0) using Ansible after following below link.
https://airflow.apache.org/installation.html
config.yml
---
- name: Airflow | Config | Ensure airflow directories structure
file:
path: "{{ item }}"
state: directory
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
with_items:
- "{{ airflow_logs_folder }}"
- "{{ airflow_child_process_log_folder }}"
- "{{ airflow_dags_folder }}"
- "{{ airflow_plugins_folder }}"
- name: Airflow | Config | Copy gunicorn logrotate config
template:
src: gunicorn-logrotate.j2
dest: /etc/logrotate.d/airflow
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0644
become: yes
become_method: sudo
become_user: root
- name: Airflow | Config | Copy sample dag hello_world
copy:
src: "{{ airflow_home }}/cng-ansible/roles/airflow/files/cng-hello_world.py"
dest: "{{ airflow_dags_folder }}/cng-hello_world.py"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0644
remote_src: True
- name: Airflow | Config | Synchronization of DAGs
synchronize:
src: "{{ airflow_home }}/cng-ansible/roles/airflow/files/"
dest: "{{ airflow_dags_folder }}"
- name: Airflow | Config | Install airflow environmet file
template:
src: airflow-environment-file.j2
dest: "{{ airflow_environment_file_folder }}/airflow"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
- name: Airflow | Config | Initialize Airflow Database
shell: "{{ airflow_executable }} initdb"
args:
chdir: "{{ airflow_home }}"
executable: /bin/bash
become: yes
become_method: sudo
become_user: "{{ airflow_user }}"
- name: Airflow | Config | Copy basic airflow config file
template:
src: airflow.cfg.j2
dest: "{{ airflow_home }}/airflow/airflow.cfg"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Install webserver systemd unit file
template:
src: airflow-webserver.service.j2
dest: /usr/lib/systemd/system/airflow-webserver.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Install scheduler systemd unit file
template:
src: airflow-scheduler.service.j2
dest: /usr/lib/systemd/system/airflow-scheduler.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Install worker systemd unit file
template:
src: airflow-worker.service.j2
dest: /usr/lib/systemd/system/airflow-worker.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Copy extra airflow config files (provided by playbooks)
copy:
src: "{{ item }}"
dest: "{{ airflow_home }}/{{ item | basename }}"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
with_fileglob:
- "{{ airflow_extra_conf_path }}/*"
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Copy extra airflow config templates (provided by playbooks)
template:
src: "{{ item }}"
dest: "{{ airflow_home }}/{{ item | basename }}"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
with_fileglob:
- "{{ airflow_extra_conf_template_path }}/*"
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Add variables from configuration file
command: "{{ airflow_executable }} variables -s {{ item.key }} {{ item.value }}"
environment:
AIRFLOW_HOME: "{{ airflow_home }}"
become: true
become_user: "{{ airflow_user }}"
with_items: "{{ airflow_admin_variables }}"
tags:
skip_ansible_lint
- name: Airflow | Config | Add connections from configuration file
command: "{{ airflow_executable }} connections -a {% for key, value in item.iteritems() %}--{{ key }} '{{ value }}' {% endfor %}"
environment:
AIRFLOW_HOME: "{{ airflow_home }}"
become: true
become_user: "{{ airflow_user }}"
with_items: "{{ airflow_admin_connections }}"
tags:
skip_ansible_lint
service.yml
---
- name: Airflow | Services |Configuring service
systemd:
name: "{{ item.key }}"
state: "{{ item.value.state }}"
enabled: "{{ item.value.enabled }}"
daemon_reload: yes
become: yes
become_method: sudo
become_user: root
with_dict: "{{ airflow_services }}"
when: "{{ item.value.enabled }}"
changed_when: false
database.yml
---
- name: Airflow | DB | Uninstall markupsafe
pip:
name: markupsafe
state: absent
- name: Airflow | DB | Install markupsafe
pip:
name: markupsafe
version: latest
- name: Airflow | DB | Set PostgreSQL environment variables
template:
src: postgres.sh.j2
dest: /etc/profile.d/postgres.sh
mode: 0644
notify: restart postgresql
- name: Airflow | DB | Ensure PostgreSQL data directory exists
file:
path: "{{ postgresql_data_dir }}"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
state: directory
mode: 0700
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Check if PostgreSQL database is initialized
stat:
path: "{{ postgresql_data_dir }}/PG_VERSION"
register: file_exists
- name: Airflow | DB | Initialize PostgreSQL Database
command: "{{ airflow_executable_pgsql }} initdb"
when: not file_exists.stat.exists
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Copy Postgresql hba file
copy:
src: ../templates/pg_hba.conf.j2
dest: "{{ postgresql_data_dir }}/pg_hba.conf"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
mode: 0600
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Copy Postgresql config file
copy:
src: ../templates/postgresql.conf.j2
dest: "{{ postgresql_data_dir }}/postgresql.conf.j2"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
mode: 0600
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Restart PostgreSQL
shell: "systemctl restart postgresql"
become: yes
become_method: sudo
become_user: root
- name: Airflow | DB | Postgresql Create DB
postgresql_db:
name: airflow
- name: Airflow | DB | Postgresql User
postgresql_user:
db: airflow
name: airflow
password: airflow
priv: "ALL"
expires: infinity
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Postgresql Privileges
postgresql_privs:
db: airflow
objs: ALL_DEFAULT
privs: ALL
type: default_privs
role: airflow
grant_option: yes
- name: Airflow | DB | Restart RabbitMQ-Server
shell: "systemctl restart rabbitmq-server"
become: yes
become_method: sudo
become_user: root
- name: Airflow | DB | RabbitMQ Add v_host
rabbitmq_vhost:
name: af-host
state: present
- name: Airflow | DB | RabbitMQ User
rabbitmq_user:
user: airflow
password: airflow
tags: airflow-user
vhost: af-host
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
force: yes
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart rabbitmq-server
- name: Airflow | DB | Create MySQL DB
mysql_db:
name: airflow
state: present
- name: MySQL user
mysql_user:
name: airflow
password: airflow
priv: '*.*:ALL'
state: present
#- name: CREATE USER
# shell: "sudo -i -u postgres psql -c "CREATE USER airflow WITH PASSWORD 'airflow';""
#- name: CREATE DATABASE
# shell: "sudo -i -u postgres psql -c "CREATE DATABASE airflow;""
#- name: GRANT PRIVILEGES ON DATABASE
# shell: "sudo -i -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE airflow TO airflow;""
#- name: GRANT PRIVILEGES ON TABLES
# shell: "sudo -i -u postgres psql -c "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO airflow;""
install-up.yml
- name: Airflow | Install Pip | Check to see if pip is already installed
command: "pip --version"
ignore_errors: true
register: pip_is_installed
changed_when: false
- block:
- name: Download get-pip.py
get_url: url=https://bootstrap.pypa.io/get-pip.py dest=/tmp
- name: Install pip
command: "python /tmp/get-pip.py"
- name: Delete get-pip.py
file: state=absent path=/tmp/get-pip.py
when: pip_is_installed.rc != 0
install.yml
---
- name: Airflow | Install | Basic Packages
yum:
name: "{{ packages }}"
vars:
packages:
- gcc
- gcc-c++
- zlib-devel
- bzip2-devel
- openssl-devel
- ncurses-devel
- sqlite-devel
- cyrus-sasl-devel
- postgresql
- postgresql-server
- mariadb-server
- mariadb
- python-pip
- python-devel
- mysql-devel
- python-setuptools
- java-1.8.0-openjdk.x86_64
- MySQL-python
register: airflow_dbsetup
notify:
- restart postgresql
- restart rabbitmq-server
- restart mariadb
- name: Airflow | Install | Upgrade pip
shell: "pip install --upgrade pip"
- name: Airflow | Install | Upgrade setuptools
shell: "pip install --upgrade setuptools"
- name: Airflow | Inatall | Start mariadb
systemd: state=started name=mariadb daemon_reload=yes
sudo: yes
- name: Airflow | Install | Group dev
yum:
name: "#Development"
state: latest
- name: Airflow | Install | Numpy
pip:
name: numpy
version: latest
sudo: yes
- name: Airflow | Install | cython
pip:
name: cython
version: latest
sudo: yes
- name: Airflow | Install | With pip
pip:
name: apache-airflow
version: 1.10.0
- name: Airflow | Install | crypto
pip:
name: apache-airflow[crypto]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | hive
pip:
name: apache-airflow[hive]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | jdbc
pip:
name: apache-airflow[jdbc]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | password
pip:
name: apache-airflow[password]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | s3
pip:
name: apache-airflow[s3]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | slack
pip:
name: apache-airflow[slack]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | ssh
pip:
name: apache-airflow[ssh]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | Degrade pip
shell: "pip install --upgrade --force-reinstall pip==9.0.0"
- name: Airflow | Install | devel
pip:
name: apache-airflow[devel]
version: 1.10.0
register: airflow_install
- name: Airflow | Inatall | MSSql
pip:
name: apache-airflow[mssql]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | MySQL-python
pip:
name: MySQL-python
- name: Airflow | Install | Celery
pip:
name: celery
- name: Airflow | Install | psycopg2
pip:
name: psycopg2
- name: Airflow | Inatall | psycopg2-binary
pip:
name: psycopg2-binary
- name: Airflow | Install | erlang
yum:
name: https://github.com/rabbitmq/erlang-rpm/releases/download/v20.1.7/erlang-20.1.7-1.el6.x86_64.rpm
state: present
- name: Airflow | Install | socat
yum:
name: socat
state: present
- name: Airflow | Install | Rabbitmq
yum:
name: https://dl.bintray.com/rabbitmq/all/rabbitmq-server/3.7.8/rabbitmq-server-3.7.8-1.el7.noarch.rpm
state: present
Airflow ran successfully but when I say systemctl status airflow-webserver.service it gives me below error.
[root#localhost ~]# systemctl status airflow-webserver.service
● airflow-webserver.service - Airflow webserver daemon
Loaded: loaded (/usr/lib/systemd/system/airflow-webserver.service; enabled; vendor preset: disabled)
Active: activating (auto-restart) (Result: exit-code) since Wed 2018-12-26 05:01:22 GMT; 9s ago
Process: 18838 ExecStart=/usr/bin/airflow webserver --pid /home/ec2-user/airflow/webserver.pid (code=exited, status=1/FAILURE)
Main PID: 18838 (code=exited, status=1/FAILURE)
CGroup: /system.slice/airflow-webserver.service
Dec 26 05:01:22 localhost.localdomain systemd[1]: airflow-webserver.service: main process exited, code=exited, status=1/FAILURE
Dec 26 05:01:22 localhost.localdomain systemd[1]: Unit airflow-webserver.service entered failed state.
Dec 26 05:01:22 localhost.localdomain systemd[1]: airflow-webserver.service failed.
airflow.cfg
[root#localhost airflow]# cat airflow.cfg
[core]
airflow_home = /root/airflow
dags_folder = /root/airflow/dags
base_log_folder = /root/airflow/logs
remote_logging = False
remote_log_conn_id =
remote_base_log_folder =
encrypt_s3_logs = False
logging_level = INFO
fab_logging_level = WARN
logging_config_class =
log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s
simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s
log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log
log_processor_filename_template = {{ filename }}.log
hostname_callable = socket:getfqdn
default_timezone = utc
executor = SequentialExecutor
sql_alchemy_conn = sqlite:////root/airflow/airflow.db
sql_alchemy_pool_enabled = True
sql_alchemy_pool_size = 5
sql_alchemy_pool_recycle = 1800
sql_alchemy_reconnect_timeout = 300
parallelism = 32
dag_concurrency = 16
dags_are_paused_at_creation = True
non_pooled_task_slot_count = 128
max_active_runs_per_dag = 16
load_examples = True
plugins_folder = /root/airflow/plugins
fernet_key = _eooBh6cIC0cBxvNyvwc3n8kjHNniGAPJXWkBK8n6rI=
donot_pickle = False
dagbag_import_timeout = 30
task_runner = BashTaskRunner
default_impersonation =
security =
secure_mode = False
unit_test_mode = False
task_log_reader = task
enable_xcom_pickling = True
killed_task_cleanup_time = 60
dag_run_conf_overrides_params = False
[cli]
api_client = airflow.api.client.local_client
endpoint_url = http://localhost:8080
[api]
auth_backend = airflow.api.auth.backend.default
[lineage]
backend =
[atlas]
sasl_enabled = False
host =
port = 21000
username =
password =
[operators]
# The default owner assigned to each new operator, unless
# provided explicitly or passed via `default_args`
default_owner = Airflow
default_cpus = 1
default_ram = 512
default_disk = 512
default_gpus = 0
[hive]
# Default mapreduce queue for HiveOperator tasks
default_hive_mapred_queue =
[webserver]
# The base url of your website as airflow cannot guess what domain or
# cname you are using. This is used in automated emails that
# airflow sends to point links to the right web server
base_url = http://localhost:8080
# The ip specified when starting the web server
web_server_host = 0.0.0.0
# The port on which to run the web server
web_server_port = 8080
web_server_ssl_cert =
web_server_ssl_key =
web_server_master_timeout = 120
web_server_worker_timeout = 120
worker_refresh_batch_size = 1
worker_refresh_interval = 30
secret_key = temporary_key
workers = 4
worker_class = sync
access_logfile = -
error_logfile = -
expose_config = False
authenticate = False
filter_by_owner = False
owner_mode = user
dag_default_view = tree
dag_orientation = LR
demo_mode = False
log_fetch_timeout_sec = 5
hide_paused_dags_by_default = False
page_size = 100
rbac = False
navbar_color = #007A87
default_dag_run_display_number = 25
[email]
email_backend = airflow.utils.email.send_email_smtp
[smtp]
smtp_host = localhost
smtp_starttls = True
smtp_ssl = False
smtp_port = 25
smtp_mail_from = airflow#example.com
[celery]
celery_app_name = airflow.executors.celery_executor
worker_concurrency = 16
worker_log_server_port = 8793
broker_url = sqla+mysql://airflow:airflow#localhost:3306/airflow
result_backend = db+mysql://airflow:airflow#localhost:3306/airflow
# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start
# it `airflow flower`. This defines the IP that Celery Flower runs on
flower_host = 0.0.0.0
# The root URL for Flower
# Ex: flower_url_prefix = /flower
flower_url_prefix =
# This defines the port that Celery Flower runs on
flower_port = 5555
# Default queue that tasks get assigned to and that worker listen on.
default_queue = default
# Import path for celery configuration options
celery_config_options = airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG
# In case of using SSL
ssl_active = False
ssl_key =
ssl_cert =
ssl_cacert =
[celery_broker_transport_options]
[dask]
cluster_address = 127.0.0.1:8786
tls_ca =
tls_cert =
tls_key =
[scheduler]
job_heartbeat_sec = 5
scheduler_heartbeat_sec = 5
run_duration = -1
min_file_process_interval = 0
min_file_parsing_loop_time = 1
dag_dir_list_interval = 300
print_stats_interval = 30
child_process_log_directory = /root/airflow/logs/scheduler
scheduler_zombie_task_threshold = 300
catchup_by_default = True
max_tis_per_query = 512
statsd_on = False
statsd_host = localhost
statsd_port = 8125
statsd_prefix = airflow
max_threads = 2
authenticate = False
[ldap]
uri =
user_filter = objectClass=*
user_name_attr = uid
group_member_attr = memberOf
superuser_filter =
data_profiler_filter =
bind_user = cn=Manager,dc=example,dc=com
bind_password = insecure
basedn = dc=example,dc=com
cacert = /etc/ca/ldap_ca.crt
search_scope = LEVEL
[mesos]
master = localhost:5050
framework_name = Airflow
task_cpu = 1
task_memory = 256
checkpoint = False
authenticate = False
[kerberos]
ccache = /tmp/airflow_krb5_ccache
# gets augmented with fqdn
principal = airflow
reinit_frequency = 3600
kinit_path = kinit
keytab = airflow.keytab
[github_enterprise]
api_rev = v3
[admin]
hide_sensitive_variable_fields = True
[elasticsearch]
elasticsearch_host =
elasticsearch_log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number}
elasticsearch_end_of_log_mark = end_of_log
[kubernetes]
worker_container_repository =
worker_container_tag =
delete_worker_pods = True
namespace = default
airflow_configmap =
dags_volume_subpath =
dags_volume_claim =
logs_volume_subpath =
logs_volume_claim =
git_repo =
git_branch =
git_user =
git_password =
git_subpath =
git_sync_container_repository = gcr.io/google-containers/git-sync-amd64
git_sync_container_tag = v2.0.5
git_sync_init_container_name = git-sync-clone
worker_service_account_name =
image_pull_secrets =
gcp_service_account_keys =
in_cluster = True
[kubernetes_secrets]
Make sure that the dependencies of cryptography exist on your system:
Debian or Ubuntu derived distributions
Run this command
apt-get install build-essential libssl-dev libffi-dev python-dev
followed by
pip install cryptography
Red Hat derived distributions
Run it
yum install gcc openssl-devel libffi-devel python-devel
followed by
pip install cryptography
I would suggest you try it manually first and then automate it using Ansible.
Below is my Ansible code which is trying to install Airflow 1.10.0.
sudo journalctl -u airflow-webserver -e output is
Dec 31 12:13:48 ip-10-136-94-232.eu-central-1.compute.internal airflow[22224]: ProgrammingError: (_mysql_exceptions.ProgrammingError) (1146, "Table 'airflow.log' doesn't exist") [SQL: u'INSERT INTO log (dttm, dag_id,
sudo journalctl -u airflow-scheduler -e output is
Dec 31 12:14:19 ip-10-136-94-232.eu-central-1.compute.internal airflow[22307]: ProgrammingError: (_mysql_exceptions.ProgrammingError) (1146, "Table 'airflow.log' doesn't exist") [SQL: u'INSERT INTO log (dttm, dag_id,
install.yml
---
- name: Airflow | Install | Basic Packages
yum:
name: "{{ packages }}"
vars:
packages:
- gcc
- gcc-c++
- zlib-devel
- bzip2-devel
- openssl-devel
- ncurses-devel
- sqlite-devel
- cyrus-sasl-devel
- postgresql
- postgresql-server
- mariadb-server
- mariadb
- python2-pip
- python2-devel
- mysql-devel
- python-setuptools
- java-1.8.0-openjdk.x86_64
- MySQL-python
- mysql-connector-python
register: airflow_dbsetup
notify:
- restart postgresql
- restart rabbitmq-server
- restart mariadb
- name: Airflow | Install | Upgrade pip
shell: "pip install --upgrade pip"
- name: Airflow | Install | Upgrade setuptools
shell: "pip install --upgrade setuptools"
- name: Airflow | Inatall | Start mariadb
systemd: state=started name=mariadb daemon_reload=yes
sudo: yes
- name: Airflow | Install | Group dev
yum:
name: "#Development"
state: latest
- name: Airflow | Install | Numpy
pip:
name: numpy
version: latest
sudo: yes
- name: Airflow | Install | cython
pip:
name: cython
version: latest
sudo: yes
- name: Airflow | Install | With pip
pip:
name: apache-airflow
version: 1.10.0
- name: Airflow | Install | crypto
pip:
name: apache-airflow[crypto]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | hive
pip:
name: apache-airflow[hive]
version: 1.10.0
register: airflow_install
- name: Airflow | Inatall | MySQL
pip:
name: apache-airflow[mysql]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | jdbc
pip:
name: apache-airflow[jdbc]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | password
pip:
name: apache-airflow[password]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | s3
pip:
name: apache-airflow[s3]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | slack
pip:
name: apache-airflow[slack]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | ssh
pip:
name: apache-airflow[ssh]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | Reinstall pip
shell: "pip install --upgrade --force-reinstall pip==9.0.0"
- name: Airflow | Install | devel
pip:
name: apache-airflow[devel]
version: 1.10.0
register: airflow_install
- name: Airflow | Inatall | MSSql
pip:
name: apache-airflow[mssql]
version: 1.10.0
register: airflow_install
- name: Airflow | Install | Celery
pip:
name: celery
- name: Airflow | Install | psycopg2
pip:
name: psycopg2
- name: Airflow | Inatall | psycopg2-binary
pip:
name: psycopg2-binary
- name: Airflow | Install | erlang
yum:
name: https://github.com/rabbitmq/erlang-rpm/releases/download/v20.1.7/erlang-20.1.7-1.el6.x86_64.rpm
state: present
- name: Airflow | Install | socat
yum:
name: socat
state: present
- name: Airflow | Install | Rabbitmq
yum:
name: https://dl.bintray.com/rabbitmq/all/rabbitmq-server/3.7.8/rabbitmq-server-3.7.8-1.el7.noarch.rpm
state: present
database.yml
---
- name: Airflow | DB | Uninstall markupsafe
pip:
name: markupsafe
state: absent
- name: Airflow | DB | Install markupsafe
pip:
name: markupsafe
version: latest
- name: Airflow | DB | Set PostgreSQL environment variables
template:
src: postgres.sh.j2
dest: /etc/profile.d/postgres.sh
mode: 0644
notify: restart postgresql
- name: Airflow | DB | Ensure PostgreSQL data directory exists
file:
path: "{{ postgresql_data_dir }}"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
state: directory
mode: 0700
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Check if PostgreSQL database is initialized
stat:
path: "{{ postgresql_data_dir }}/PG_VERSION"
register: file_exists
- name: Airflow | DB | Initialize PostgreSQL Database
command: "{{ airflow_executable_pgsql }} initdb"
when: not file_exists.stat.exists
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Copy Postgresql hba file
copy:
src: ../templates/pg_hba.conf.j2
dest: "{{ postgresql_data_dir }}/pg_hba.conf"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
mode: 0600
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Copy Postgresql config file
copy:
src: ../templates/postgresql.conf.j2
dest: "{{ postgresql_data_dir }}/postgresql.conf.j2"
owner: "{{ postgresql_user }}"
group: "{{ postgresql_group }}"
mode: 0600
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Restart PostgreSQL
shell: "systemctl restart postgresql"
become: yes
become_method: sudo
become_user: root
- name: Airflow | DB | Postgresql Create DB
postgresql_db:
name: airflow
- name: Airflow | DB | Postgresql User
postgresql_user:
db: airflow
name: airflow
password: airflow
priv: "ALL"
expires: infinity
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart postgresql
- name: Airflow | DB | Postgresql Privileges
postgresql_privs:
db: airflow
objs: ALL_DEFAULT
privs: ALL
type: default_privs
role: airflow
grant_option: yes
- name: Airflow | DB | Restart RabbitMQ-Server
shell: "systemctl restart rabbitmq-server"
become: yes
become_method: sudo
become_user: root
- name: Airflow | DB | RabbitMQ Add v_host
rabbitmq_vhost:
name: af-host
state: present
- name: Airflow | DB | RabbitMQ User
rabbitmq_user:
user: airflow
password: airflow
tags: airflow-user
vhost: af-host
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
force: yes
become: yes
become_method: sudo
become_user: root
register: airflow_dbsetup
notify:
- restart rabbitmq-server
- name: Airflow | DB | Create MySQL DB
mysql_db:
name: airflow
state: present
- name: Airflow | DB | MySQL user
mysql_user:
name: airflow
password: airflow
priv: '*.*:ALL,GRANT'
state: present
#- name: CREATE USER
# shell: "sudo -i -u postgres psql -c "CREATE USER airflow WITH PASSWORD 'airflow';""
#- name: CREATE DATABASE
# shell: "sudo -i -u postgres psql -c "CREATE DATABASE airflow;""
#- name: GRANT PRIVILEGES ON DATABASE
# shell: "sudo -i -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE airflow TO airflow;""
#- name: GRANT PRIVILEGES ON TABLES
# shell: "sudo -i -u postgres psql -c "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO airflow;""
config.yml
- name: Airflow | Config | Ensure airflow directories structure
file:
path: "{{ item }}"
state: directory
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
with_items:
- "{{ airflow_logs_folder }}"
- "{{ airflow_child_process_log_folder }}"
- "{{ airflow_dags_folder }}"
- "{{ airflow_plugins_folder }}"
- name: Airflow | Config | Copy gunicorn logrotate config
template:
src: gunicorn-logrotate.j2
dest: /etc/logrotate.d/airflow
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0644
become: yes
become_method: sudo
become_user: root
- name: Airflow | Config | Copy sample dag hello_world
copy:
src: "{{ airflow_home }}/cng-ansible/roles/airflow/files/cng-hello_world.py"
dest: "{{ airflow_dags_folder }}/cng-hello_world.py"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0644
remote_src: True
- name: Airflow | Config | Synchronization of DAGs
synchronize:
src: "{{ airflow_home }}/cng-ansible/roles/airflow/files/"
dest: "{{ airflow_dags_folder }}"
- name: Airflow | Config | Install airflow environmet file
template:
src: airflow-environment-file.j2
dest: "{{ airflow_environment_file_folder }}/airflow"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
- name: Airflow | Config | Copy basic airflow config file
template:
src: airflow.cfg.j2
dest: "{{ airflow_home }}/airflow/airflow.cfg"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Initialize Airflow Database
shell: "{{ airflow_executable }} initdb"
args:
chdir: "{{ airflow_home }}"
executable: /bin/bash
become: yes
become_method: sudo
become_user: "{{ airflow_user }}"
- name: Airflow | Config | Install webserver systemd unit file
template:
src: airflow-webserver.service.j2
dest: /usr/lib/systemd/system/airflow-webserver.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Install scheduler systemd unit file
template:
src: airflow-scheduler.service.j2
dest: /usr/lib/systemd/system/airflow-scheduler.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Install worker systemd unit file
template:
src: airflow-worker.service.j2
dest: /usr/lib/systemd/system/airflow-worker.service
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Copy extra airflow config files (provided by playbooks)
copy:
src: "{{ item }}"
dest: "{{ airflow_home }}/{{ item | basename }}"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
with_fileglob:
- "{{ airflow_extra_conf_path }}/*"
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Copy extra airflow config templates (provided by playbooks)
template:
src: "{{ item }}"
dest: "{{ airflow_home }}/{{ item | basename }}"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
with_fileglob:
- "{{ airflow_extra_conf_template_path }}/*"
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
- name: Airflow | Config | Add variables from configuration file
command: "{{ airflow_executable }} variables -s {{ item.key }} {{ item.value }}"
environment:
AIRFLOW_HOME: "{{ airflow_home }}"
become: true
become_user: "{{ airflow_user }}"
with_items: "{{ airflow_admin_variables }}"
tags:
skip_ansible_lint
- name: Airflow | Config | Add connections from configuration file
command: "{{ airflow_executable }} connections -a {% for key, value in item.iteritems() %}--{{ key }} '{{ value }}' {% endfor %}"
environment:
AIRFLOW_HOME: "{{ airflow_home }}"
become: true
become_user: "{{ airflow_user }}"
with_items: "{{ airflow_admin_connections }}"
tags:
skip_ansible_lint
service.yml
---
- name: Airflow | Services |Configuring service
systemd:
name: "{{ item.key }}"
state: "{{ item.value.state }}"
enabled: "{{ item.value.enabled }}"
daemon_reload: yes
become: yes
become_method: sudo
become_user: root
with_dict: "{{ airflow_services }}"
when: "{{ item.value.enabled }}"
changed_when: false
health.yml
---
- name: Airflow | Health | DB Bug fix
shell: "mysql -u root -e 'alter table airflow.task_instance add column executor_config varchar(15) after task_id;'"
- name: Airflow | Health | Status
wait_for:
host: localhost
port: "{{ item }}"
state: started # Port should be open
delay: 15 # No wait before first check (sec)
timeout: 3 # Stop checking after timeout (sec)
ignore_errors: yes
with_items:
- 8080
Error Log while installing this in AWS RHEL server
TASK [../../roles/airflow : Airflow | Health | DB Bug fix] ********************************************************************************************************************
fatal: [127.0.0.1]: FAILED! => {"changed": true, "cmd": "mysql -u root -e 'alter table airflow.task_instance add column executor_config varchar(15) after task_id;'", "delta": "0:00:00.192266", "end": "2018-12-31 10:35:22.455342", "msg": "non-zero return code", "rc": 1, "start": "2018-12-31 10:35:22.263076", "stderr": "ERROR 1146 (42S02) at line 1: Table 'airflow.task_instance' doesn't exist", "stderr_lines": ["ERROR 1146 (42S02) at line 1: Table 'airflow.task_instance' doesn't exist"], "stdout": "", "stdout_lines": []}
I was following below link to proceed with the installation to upgrade from 1.8 to 1.10.0 :-
https://airflow.apache.org/installation.html
Error after suggestions:-
TASK [../../roles/airflow : Airflow | Config | Initialize Airflow Database] ***********************************************************************************************************************
fatal: [127.0.0.1]: FAILED! => {"changed": true, "cmd": "/usr/bin/airflow initdb", "delta": "0:00:00.202622", "end": "2018-12-31 16:15:59.082736", "msg": "non-zero return code", "rc": 1, "start": "2018-12-31 16:15:58.880114", "stderr": "Traceback (most recent call last):\n File \"/usr/bin/airflow\", line 21, in <module>\n from airflow import configuration\n File \"/usr/lib/python2.7/site-packages/airflow/__init__.py\", line 35, in <module>\n from airflow import configuration as conf\n File \"/usr/lib/python2.7/site-packages/airflow/configuration.py\", line 506, in <module>\n conf.read(AIRFLOW_CONFIG)\n File \"/usr/lib/python2.7/site-packages/airflow/configuration.py\", line 280, in read\n super(AirflowConfigParser, self).read(filenames)\n File \"/usr/lib/python2.7/site-packages/backports/configparser/__init__.py\", line 705, in read\n self._read(fp, filename)\n File \"/usr/lib/python2.7/site-packages/backports/configparser/__init__.py\", line 1087, in _read\n lineno)\nbackports.configparser.DuplicateSectionError: While reading from '/home/ec2-user/airflow/airflow.cfg' [line 60]: section u'core' already exists", "stderr_lines": ["Traceback (most recent call last):", " File \"/usr/bin/airflow\", line 21, in <module>", " from airflow import configuration", " File \"/usr/lib/python2.7/site-packages/airflow/__init__.py\", line 35, in <module>", " from airflow import configuration as conf", " File \"/usr/lib/python2.7/site-packages/airflow/configuration.py\", line 506, in <module>", " conf.read(AIRFLOW_CONFIG)", " File \"/usr/lib/python2.7/site-packages/airflow/configuration.py\", line 280, in read", " super(AirflowConfigParser, self).read(filenames)", " File \"/usr/lib/python2.7/site-packages/backports/configparser/__init__.py\", line 705, in read", " self._read(fp, filename)", " File \"/usr/lib/python2.7/site-packages/backports/configparser/__init__.py\", line 1087, in _read", " lineno)", "backports.configparser.DuplicateSectionError: While reading from '/home/ec2-user/airflow/airflow.cfg' [line 60]: section u'core' already exists"], "stdout": "", "stdout_lines": []}
New error log after implementing #kaxil suggestion:-
sqlalchemy.exc.InvalidRequestError: This Session's transaction has been rolled back due to a previous exception during flush. To begin a new transaction with this Session, first issue Session.rollback(). Original exception was: (_mysql_exceptions.ProgrammingError) (1146, "Table 'airflow.log' doesn't exist") [SQL: u'INSERT INTO log (dttm, dag_id, task_id, event, execution_date, owner, extra) VALUES (%s, %s, %s, %s, %s, %s, %s)'] [parameters: (datetime.datetime(2019, 1, 2, 10, 49, 11, 49590, tzinfo=<Timezone [UTC]>), None, None, 'cli_webserver', None, 'ec2-user', '{"full_command": "[\'/usr/bin/airflow\', \'webserver\']", "host_name": "ip-10-136-94-144.eu-central-1.compute.internal"}')]
In your config.yml file, can you reorder the below 2 tasks (Airflow | Config | Initialize Airflow Database & Airflow | Config | Copy basic airflow config file) to
Airflow | Config | Copy basic airflow config file
Airflow | Config | Initialize Airflow Database
Basically, your airflow.cfg.j2 file should contain metadata database connection string like this sql_alchemy_conn = my_conn_string in [core] section as mentioned in https://airflow.apache.org/howto/set-config.html#setting-configuration-options (Double check it)
Once your config file is copied and initdb is run, it creates all the necessary tables needed by airflow.
- name: Airflow | Config | Initialize Airflow Database
shell: "{{ airflow_executable }} initdb"
args:
chdir: "{{ airflow_home }}"
executable: /bin/bash
become: yes
become_method: sudo
become_user: "{{ airflow_user }}"
- name: Airflow | Config | Copy basic airflow config file
template:
src: airflow.cfg.j2
dest: "{{ airflow_home }}/airflow/airflow.cfg"
owner: "{{ airflow_user }}"
group: "{{ airflow_group }}"
mode: 0640
register: airflow_config
notify:
- restart airflow-webserver
- restart airflow-scheduler
- restart airflow-worker
Since 2 weeks, I'm working on MySQL deployment and stuff with Ansible. I have to install MySQL on a LV.
Before MySQL deployment, Ansible script creates /var/lib/mysql, LV and mount it on /var/lib/mysql. Then, it create MySQL user and MySQL group to set 0700 right on MySQL directory. When it done, Ansible deploy MySQL 5.7.
Part of my Ansible code :
- name: "Group : mysql"
group:
name: "mysql"
state: "present"
tags:
- User mysql
- name: "user : mysql"
user:
name: "mysql"
shell: "mysql"
group: "mysql"
createhome: "no"
append: "True"
state: "present"
tags:
- User
- name: "Set rights on mysql dir "
file:
path: "/var/lib/mysql"
owner: "mysql"
group: "mysql"
mode: 0700
tags:
- mysql dir rights
- name: "mysql root password"
debconf:
name: "mysql-server"
question: "mysql-server/root_password"
value: "{{ password_root_mysql }}"
vtype: "password"
when: password_root_mysql is defined
tags:
- Install
- name: "mysql root password confirmation"
debconf:
name: "mysql-server"
question: "mysql-server/root_password_again"
value: "{{ password_root_mysql }}"
vtype: "password"
when: password_root_mysql is defined
tags:
- Install mysql
- name: "Install : MySQL Server"
apt:
update_cache: "True"
name: "mysql-server"
install_recommends: "True"
tags:
- Install mysql
notify:
- stop mysql
- name: "Copie du template root.cnf.j2 vers root/.my.cnf "
template:
src: "{{ mysql_template_rootcnf }}"
dest: "~/.my.cnf"
owner: "root"
mode: "0600"
tags:
- Install mysql
So when I try to install mysql-server without any LV and directory settings, it works. But when I prepare directory MySQL with good rights, installation doesn't work, whether manual or automatic deployment.
Any ideas ?
Ubuntu 16.04 with MYSQL 5.7.
Ansible v2.7
Ok, I've found the problem, Lost+Found directory in /var/lib/mysql (lv mounted on it) is considerated like a database, mysql doesn't like it. In my code, ive just added :
- name: "Remove lost+found from {{ mysql_dir }}"
file:
path: "{{ mysql_dir }}/lost+found"
state: absent