wordpress docker exits immediately - mysql

I’m trying to get wordpress up and running with docker on OSX 10.12.1...
First I start up mysql:
docker run --name dockermysql -e MYSQL_ROOT_PASSWORD=secret -e MYSQL_DATABASE=wordpress -e MYSQL_USER=wordpress -e MYSQL_PASSWORD=secret -d mysql:5
That seems to work fine:
$ docker ps -a
f7d1562def4a mysql:5 "docker-entrypoint.sh" 33 minutes ago Up 33 minutes 3306/tcp dockermysql
but then I try wordpress…
docker run --name dockerpress --link dockermysql:mysql -d wordpress --net="host" -e WORDPRESS_DB_USER=wordpress -e WORDPRESS_DB_PASSWORD=secret
and it seems like it exits immediately
$ docker ps -a
84cc4d2abb57 wordpress "docker-entrypoint.sh" 7 minutes ago Exited (2) 7 minutes ago dockerpress
f7d1562def4a mysql:5 "docker-entrypoint.sh" 33 minutes ago Up 33 minutes 3306/tcp dockermysql
Here are the results of docker inspect dockerpress:
[
{
"Id": "84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f",
"Created": "2016-11-28T14:41:04.757044577Z",
"Path": "docker-entrypoint.sh",
"Args": [
"--net=host",
"-e",
"WORDPRESS_DB_USER=wordpress",
"-e",
"WORDPRESS_DB_PASSWORD=secret"
],
"State": {
"Status": "exited",
"Running": false,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 0,
"ExitCode": 2,
"Error": "",
"StartedAt": "2016-11-28T14:41:05.523186975Z",
"FinishedAt": "2016-11-28T14:41:05.619181506Z"
},
"Image": "sha256:ee397259d4e59c65e2c1c5979a3634eb3ab106bba389acea8b21862053359134",
"ResolvConfPath": "/var/lib/docker/containers/84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f/hostname",
"HostsPath": "/var/lib/docker/containers/84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f/hosts",
"LogPath": "/var/lib/docker/containers/84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f/84cc4d2abb57d00fbd67d10c02cc6333a401a7d8bfe4bc15f588da11a834333f-json.log",
"Name": "/dockerpress",
"RestartCount": 0,
"Driver": "aufs",
"MountLabel": "",
"ProcessLabel": "",
"AppArmorProfile": "",
"ExecIDs": null,
"HostConfig": {
"Binds": null,
"ContainerIDFile": "",
"LogConfig": {
"Type": "json-file",
"Config": {}
},
"NetworkMode": "default",
"PortBindings": {},
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": null,
"CapAdd": null,
"CapDrop": null,
"Dns": [],
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": null,
"GroupAdd": null,
"IpcMode": "",
"Cgroup": "",
"Links": [
"/dockermysql:/dockerpress/mysql"
],
"OomScoreAdj": 0,
"PidMode": "",
"Privileged": false,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": null,
"UTSMode": "",
"UsernsMode": "",
"ShmSize": 67108864,
"Runtime": "runc",
"ConsoleSize": [
0,
0
],
"Isolation": "",
"CpuShares": 0,
"Memory": 0,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": null,
"BlkioDeviceReadBps": null,
"BlkioDeviceWriteBps": null,
"BlkioDeviceReadIOps": null,
"BlkioDeviceWriteIOps": null,
"CpuPeriod": 0,
"CpuQuota": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": [],
"DiskQuota": 0,
"KernelMemory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"MemorySwappiness": -1,
"OomKillDisable": false,
"PidsLimit": 0,
"Ulimits": null,
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0
},
"GraphDriver": {
"Name": "aufs",
"Data": null
},
"Mounts": [
{
"Name": "928da9b62b2adb3dd93905dfd702b766d80f0e7bb6f17b7e7f57f7c6433bdc5d",
"Source": "/var/lib/docker/volumes/928da9b62b2adb3dd93905dfd702b766d80f0e7bb6f17b7e7f57f7c6433bdc5d/_data",
"Destination": "/var/www/html",
"Driver": "local",
"Mode": "",
"RW": true,
"Propagation": ""
}
],
"Config": {
"Hostname": "84cc4d2abb57",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"ExposedPorts": {
"80/tcp": {}
},
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"no_proxy=*.local, 169.254/16",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"PHPIZE_DEPS=autoconf \t\tfile \t\tg++ \t\tgcc \t\tlibc-dev \t\tmake \t\tpkg-config \t\tre2c",
"PHP_INI_DIR=/usr/local/etc/php",
"APACHE_CONFDIR=/etc/apache2",
"APACHE_ENVVARS=/etc/apache2/envvars",
"PHP_EXTRA_BUILD_DEPS=apache2-dev",
"PHP_EXTRA_CONFIGURE_ARGS=--with-apxs2",
"GPG_KEYS=0BD78B5F97500D450838F95DFE857D9A90D90EC1 6E4F6AB321FDC07F2C332E3AC2BF0BC433CFC8B3",
"PHP_VERSION=5.6.28",
"PHP_URL=https://secure.php.net/get/php-5.6.28.tar.xz/from/this/mirror",
"PHP_ASC_URL=https://secure.php.net/get/php-5.6.28.tar.xz.asc/from/this/mirror",
"PHP_SHA256=07187ba2870f89cef334cd2ad6cb801aeec5eaf283da0293a9a6be75d6786d11",
"PHP_MD5=1e01c66b2e67ab3b56a6180ee560fe4c",
"WORDPRESS_VERSION=4.6.1",
"WORDPRESS_SHA1=027e065d30a64720624a7404a1820e6c6fff1202"
],
"Cmd": [
"--net=host",
"-e",
"WORDPRESS_DB_USER=wordpress",
"-e",
"WORDPRESS_DB_PASSWORD=secret"
],
"Image": "wordpress",
"Volumes": {
"/var/www/html": {}
},
"WorkingDir": "/var/www/html",
"Entrypoint": [
"docker-entrypoint.sh"
],
"OnBuild": null,
"Labels": {}
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "d946a78e9a50fc8fb10873a0c043c9f49a377db44b19edb5e83362b37c025948",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": null,
"SandboxKey": "/var/run/docker/netns/d946a78e9a50",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "",
"Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"MacAddress": "",
"Networks": {
"bridge": {
"IPAMConfig": null,
"Links": null,
"Aliases": null,
"NetworkID": "37e08b5c5d5f8a159552b8ab22a0fb37bfbcfd720fd83ce20a983ab85e60157f",
"EndpointID": "",
"Gateway": "",
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": ""
}
}
}
}
]

For anyone still learning, like me, I found the better solution here was to use docker-compose as described in the docs here: https://docs.docker.com/compose/wordpress/
One nice thing about this, aside from the fact that it's way easier to deal with, is you can see all of your env variables to avoid mismatched variables between your db and wp server - which is what was causing my initial problem

It worked for me to pull image version 5.7 for mysql and wordpress the latest image version.
Manual who i created it:
---1. docker pull wordpress
---2. docker pull mysql:5.7
---3. docker run --name nwplm-database -e MYSQL_ROOT_PASSWORD=Pa$$w0rd -d mysql:5.7
---4. docker run --name nwplm-wordpress --link nwplm-database -p 8080:80 -e WORDPRESS_DB_HOST=nwplm-database:3306 -e WORDPRESS_DB_USER=root -e WORDPRESS_DB_PASSWORD=Pa$$w0rd -e WORDPRESS_DB_NAME=nwplm-wordpress -e WORDPRESS_TABLE_PREFIX=wp_ -d wordpress
---5. open localhost:8080 in your browser

Related

converting string to integer in ansible

In ansible i am running a role to create retention rules for some projects. using PUT method i stored results in register module, from that i am trying to read retention_id: "59" and project_id: "80" i need this as integer like 59 and 80 , but it is taking as string to the field but i need it as integer. i tried to change that by using "{{ item.project_id | int }}"
and {{ item.metadata.retention_id | int}}
it was not changed to integer, i tried to converted the yaml body to JSON for luck. still i am getting the error.
i got struck with this task since one week . please provide any solution to get out of this.
your response will be appreciated. Thank you
actual playbook:
---
# List of projects to check retention and project ID's
- name: get projects
uri:
url: "https://{{ ansible_host }}/api/v2.0/projects"
method: GET
headers:
Authorization: "{{ admin_passwd }}"
Accept: application/json
validate_certs: no
register: projects_result
- debug:
var: projects_result.json | flatten(levels=1)
# Create retention rules for the projects
- name: creating retention rules
uri:
url: "https://{{ ansible_host }}/api/v2.0/retentions/{{ item.metadata.retention_id | int}}"
method: PUT
body_format: json
headers:
Content-Type: application/json
Authorization: "{{ admin_passwd }}"
Accept: 'application/json, text/plain, */*'
body:
algorithm: "or"
id: "{{ item.metadata.retention_id | int }}"
rules:
- action: "retain"
params:
latestPushedK: 3
scope_selectors:
repository:
- decoration: "repoMatches"
kind: "doublestar"
pattern: "**"
tag_selectors:
- decoration: "matches"
extras: "{\"untagged\":true}"
kind: "doublestar"
pattern: "**"
template: latestPushedK
scope:
level: "project"
ref: "{{ item.project_id | int }}"
trigger:
kind: "Schedule"
settings:
cron: "0 0 * * * *"
validate_certs: no
status_code: 200
when: "item.registry_id is defined"
loop: "{{ projects_result.json | flatten(levels=1) }}"
ignore_errors: yes
with this i was getting this error:
$ ansible-playbook -i ansible/inventories/shiplab ansible/playbooks/harbor-configurations.yaml --ask-vault-pass
Vault password:
PLAY [harbor] ***********************************************************************************************************************************************
TASK [Gathering Facts] **************************************************************************************************************************************
ok: [harbor_shiplab_test1e]
TASK [../roles/harbor-retention-rules : get projects] *******************************************************************************************************
ok: [harbor_shiplab_test1e]
TASK [../roles/harbor-retention-rules : debug] **************************************************************************************************************
ok: [harbor_shiplab_test1e] => {
"projects_result.json | flatten(levels=1)": [
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:28.228Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 56,
"items": [],
"project_id": 75,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "54"
},
"name": "harbor-esl-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 75,
"registry_id": 101,
"repo_count": 0,
"update_time": "2021-10-21T07:35:28.228Z"
},
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:43.098Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 58,
"items": [],
"project_id": 77,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "56"
},
"name": "harbor-library-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 77,
"registry_id": 103,
"repo_count": 0,
"update_time": "2021-10-21T07:35:43.098Z"
},
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:13.930Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 54,
"items": [],
"project_id": 73,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "52"
},
"name": "harbor-megatron-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 73,
"registry_id": 104,
"repo_count": 0,
"update_time": "2021-10-21T07:35:13.930Z"
},
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:50.271Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 59,
"items": [],
"project_id": 78,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "57"
},
"name": "harbor-migration-poc-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 78,
"registry_id": 105,
"repo_count": 0,
"update_time": "2021-10-21T07:35:50.271Z"
},
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:57.671Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 60,
"items": [],
"project_id": 79,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "58"
},
"name": "harbor-payment-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 79,
"registry_id": 106,
"repo_count": 0,
"update_time": "2021-10-21T07:35:57.671Z"
},
{
"chart_count": 0,
"creation_time": "2021-10-21T07:36:05.861Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 61,
"items": [],
"project_id": 80,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "59"
},
"name": "harbor-platform-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 80,
"registry_id": 107,
"repo_count": 0,
"update_time": "2021-10-21T07:36:05.861Z"
},
{
"chart_count": 1,
"creation_time": "2021-08-19T15:50:23.772Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 1,
"items": [],
"project_id": 1,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true"
},
"name": "library",
"owner_id": 1,
"owner_name": "admin",
"project_id": 1,
"repo_count": 3,
"update_time": "2021-08-19T15:50:23.772Z"
},
{
"chart_count": 2,
"creation_time": "2021-08-24T16:45:19.230Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 2,
"items": [],
"project_id": 2,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true"
},
"name": "platform",
"owner_id": 1,
"owner_name": "admin",
"project_id": 2,
"repo_count": 11,
"update_time": "2021-08-24T16:45:19.230Z"
}
]
}
TASK [../roles/harbor-retention-rules : creating retention rules] *******************************************************************************************
failed: [harbor_shiplab_test1e] (item={'chart_count': 0, 'creation_time': '2021-10-21T07:35:28.228Z',
'current_user_role_id': 1, 'current_user_role_ids': [1], 'cve_allowlist': {'creation_time': '0001-01-01T00:00:00.000Z',
'id': 56, 'items': [], 'project_id': 75, 'update_time': '0001-01-01T00:00:00.000Z'},
'metadata': {'public': 'true', 'retention_id': '54'}, 'name': 'harbor-esl-proxy', 'owner_id': 1, 'owner_name': 'admin', 'project_id': 75,
'registry_id': 101, 'repo_count': 0, 'update_time': '2021-10-21T07:35:28.228Z'}) => {"ansible_loop_var": "item", "changed": false,
"connection": "close", "content": "{\"errors\":[{\"code\":\"UNPROCESSABLE_ENTITY\",\"message\":\"validation failure
list:\\nparsing policy body from \\\"\\\" failed, because json: cannot unmarshal string into Go struct field RetentionPolicy.id of type int64\"}]}\n",
"content_length": "213", "content_type": "application/json; charset=utf-8", "date": "Fri, 22 Oct 2021 12:26:56 GMT", "elapsed": 1,
"item": {"chart_count": 0, "creation_time": "2021-10-21T07:35:28.228Z", "current_user_role_id": 1, "current_user_role_ids": [1],
"cve_allowlist": {"creation_time": "0001-01-01T00:00:00.000Z", "id": 56, "items": [], "project_id": 75, "update_time": "0001-01-01T00:00:00.000Z"},
"metadata": {"public": "true", "retention_id": "54"}, "name": "harbor-esl-proxy", "owner_id": 1, "owner_name": "admin", "project_id": 75,
"registry_id": 101, "repo_count": 0, "update_time": "2021-10-21T07:35:28.228Z"}, "json": {"errors": [{"code": "UNPROCESSABLE_ENTITY",
"message": "validation failure list:\nparsing policy body from \"\" failed, because json: cannot unmarshal string into Go struct field RetentionPolicy.id of type int64"}]},
"msg": "Status code was 422 and not [200]: HTTP Error 422: Unprocessable Entity", "redirected": false, "set_cookie": "sid=2e5f911867c675528b5e23c74f803240; Path=/; HttpOnly",
"status": 422, "url": "https://harbor.tst1e.k8s.shiplab.ss.acl.com/api/v2.0/retentions/54", "vary": "Accept-Encoding", "x_request_id": "334cb06f-c161-4085-8df2-a448b7776819"}
after this i changed body to json :
json converted:
- name: creating retention rules
uri:
url: "https://{{ ansible_host }}/api/v2.0/retentions/{{ item.metadata.retention_id | int }}"
method: PUT
body_format: json
headers:
Content-Type: application/json
Authorization: "{{ admin_passwd }}"
Accept: 'application/json, text/plain, */*'
body:
'{ "algorithm": "or",
"id": {{ item.metadata.retention_id | int }},
"rules": [{
"action": "retain",
"params": {
"latestPushedK": 3 },
"scope_selectors":{
"repository": [{
"decoration": "repoMatches",
"kind": "doublestar",
"pattern": "**" } ] },
"tag_selectors": [{
"decoration": "matches",
"extras": "{\"untagged\":true}",
"kind": "doublestar",
"pattern": "**" }],
"template": latestPushedK } ],
"scope": {
"level": "project",
"ref": {{ item.project_id | int }} },
"trigger": {
"kind": "Schedule",
"settings":{
"cron": "0 0 * * * *" }}}'
validate_certs: no
status_code: 200
when: "item.registry_id is defined"
loop: "{{ projects_result.json | flatten(levels=1) }}"
ignore_errors: yes
with this also i am getting the same error. please help me out on this.
verbosity information:
ansible-playbook -i ansible/inventories/shiplab ansible/playbooks/harbor-configurations.yaml --ask-vault-pass -vvv
ansible-playbook -i ansible/inventories/shiplab ansible/playbooks/harbor-configurations.yaml --ask-vault-pass -vvv
ansible-playbook 2.8.4
config file = /home/user1/repos/harbor/konvoy/ansible.cfg
configured module search path = ['/home/user1/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.7/site-packages/ansible
executable location = /usr/bin/ansible-playbook
python version = 3.7.10 (default, May 5 2021, 11:43:58) [GCC 10.2.0]
Using /home/user1/repos/harbor/konvoy/ansible.cfg as config file
Vault password:
host_list declined parsing /home/user1/repos/harbor/konvoy/ansible/inventories/shiplab/test1e as it did not pass it's verify_file() method
auto declined parsing /home/user1/repos/harbor/konvoy/ansible/inventories/shiplab/test1e as it did not pass it's verify_file() method
Parsed /home/user1/repos/harbor/konvoy/ansible/inventories/shiplab/test1e inventory source with ini plugin
PLAYBOOK: harbor-configurations.yaml ************************************************************************************************************************
1 plays in ansible/playbooks/harbor-configurations.yaml
PLAY [harbor] ***********************************************************************************************************************************************
TASK [Gathering Facts] **************************************************************************************************************************************
task path: /home/user1/repos/harbor/konvoy/ansible/playbooks/harbor-configurations.yaml:2
<harbor.tst1e.k8s.shiplab.ss.acl.com> ESTABLISH LOCAL CONNECTION FOR USER: MEA+user1
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'echo '"'"'~MEA+user1'"'"' && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129 `" && echo ansible-tmp-1634916091.044559-272844769197129="` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129 `" ) && sleep 0'
<harbor_shiplab_test1e> Attempting python interpreter discovery
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'echo PLATFORM; uname; echo FOUND; command -v '"'"'/usr/bin/python'"'"'; command -v '"'"'python3.7'"'"'; command -v '"'"'python3.6'"'"'; command -v '"'"'python3.5'"'"'; command -v '"'"'python2.7'"'"'; command -v '"'"'python2.6'"'"'; command -v '"'"'/usr/libexec/platform-python'"'"'; command -v '"'"'/usr/bin/python3'"'"'; command -v '"'"'python'"'"'; echo ENDFOUND && sleep 0'
<harbor_shiplab_test1e> Python interpreter discovery fallback (unsupported platform for extended discovery: cygwin_nt-10.0)
Using module file /usr/lib/python3.7/site-packages/ansible/modules/system/setup.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> PUT /home/user1/.ansible/tmp/ansible-local-1736l28mza02/tmp5ucaxth_ TO /home/user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129/AnsiballZ_setup.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'chmod u+x /home/user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129/ /home/user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129/AnsiballZ_setup.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '/usr/bin/python /home/user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129/AnsiballZ_setup.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'rm -f -r /home/user1/.ansible/tmp/ansible-tmp-1634916091.044559-272844769197129/ > /dev/null 2>&1 && sleep 0'
ok: [harbor_shiplab_test1e]
META: ran handlers
TASK [../roles/harbor-retention-rules : get projects] *******************************************************************************************************
task path: /home/user1/repos/harbor/konvoy/ansible/roles/harbor-retention-rules/tasks/main.yml:3
<harbor.tst1e.k8s.shiplab.ss.acl.com> ESTABLISH LOCAL CONNECTION FOR USER: MEA+user1
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'echo '"'"'~MEA+user1'"'"' && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630 `" && echo ansible-tmp-1634916112.1313658-277975019551630="` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630 `" ) && sleep 0'
Using module file /usr/lib/python3.7/site-packages/ansible/modules/net_tools/basics/uri.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> PUT /home/user1/.ansible/tmp/ansible-local-1736l28mza02/tmpsm6sxu8_ TO /home/user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630/AnsiballZ_uri.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'chmod u+x /home/user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630/ /home/user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630/AnsiballZ_uri.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '/usr/bin/python /home/user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630/AnsiballZ_uri.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'rm -f -r /home/user1/.ansible/tmp/ansible-tmp-1634916112.1313658-277975019551630/ > /dev/null 2>&1 && sleep 0'
ok: [harbor_shiplab_test1e] => {
"changed": false,
"connection": "close",
"content_type": "application/json",
"cookies": {
"sid": "033af4ae913d69a8314a4edb84512aa6"
},
"cookies_string": "sid=033af4ae913d69a8314a4edb84512aa6",
"date": "Fri, 22 Oct 2021 15:22:01 GMT",
"elapsed": 1,
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"body": null,
"body_format": "raw",
"client_cert": null,
"client_key": null,
"content": null,
"creates": null,
"delimiter": null,
"dest": null,
"directory_mode": null,
"follow": false,
"follow_redirects": "safe",
"force": false,
"force_basic_auth": false,
"group": null,
"headers": {
"Accept": "application/json",
"Authorization": "Basic YWRtaW46SGFyYm9yMTIzNDU="
},
"http_agent": "ansible-httpget",
"method": "GET",
"mode": null,
"owner": null,
"regexp": null,
"remote_src": null,
"removes": null,
"return_content": false,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"status_code": [
200
],
"timeout": 30,
"unix_socket": null,
"unsafe_writes": null,
"url": "https://harbor.tst1e.k8s.shiplab.ss.acl.com/api/v2.0/projects",
"url_password": null,
"url_username": null,
"use_proxy": true,
"validate_certs": false
}
},
"json": [
{
"chart_count": 0,
"creation_time": "2021-10-21T07:35:28.228Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 56,
"items": [],
"project_id": 75,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "54"
},
"name": "harbor-esl-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 75,
"registry_id": 101,
"repo_count": 0,
"update_time": "2021-10-21T07:35:28.228Z"
}
],
"msg": "OK (unknown bytes)",
"redirected": false,
"set_cookie": "sid=033af4ae913d69a8314a4edb84512aa6; Path=/; HttpOnly",
"status": 200,
"transfer_encoding": "chunked",
"url": "https://harbor.tst1e.k8s.shiplab.ss.acl.com/api/v2.0/projects",
"vary": "Accept-Encoding",
"x_request_id": "c1d907c3-e324-49a5-a582-82f9672d1f8d",
"x_total_count": "8"
}
TASK [../roles/harbor-retention-rules : creating retention rules] *******************************************************************************************
task path: /home/user1/repos/harbor/konvoy/ansible/roles/harbor-retention-rules/tasks/main.yml:36
<harbor.tst1e.k8s.shiplab.ss.acl.com> ESTABLISH LOCAL CONNECTION FOR USER: MEA+user1
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'echo '"'"'~MEA+user1'"'"' && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702 `" && echo ansible-tmp-1634916125.1009495-142461137289702="` echo ~MEA+user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702 `" ) && sleep 0'
Using module file /usr/lib/python3.7/site-packages/ansible/modules/net_tools/basics/uri.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> PUT /home/user1/.ansible/tmp/ansible-local-1736l28mza02/tmpy38u0lww TO /home/user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702/AnsiballZ_uri.py
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'chmod u+x /home/user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702/ /home/user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702/AnsiballZ_uri.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c '/usr/bin/python /home/user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702/AnsiballZ_uri.py && sleep 0'
<harbor.tst1e.k8s.shiplab.ss.acl.com> EXEC /bin/sh -c 'rm -f -r /home/user1/.ansible/tmp/ansible-tmp-1634916125.1009495-142461137289702/ > /dev/null 2>&1 && sleep 0'
failed: [harbor_shiplab_test1e] (item={'chart_count': 0, 'creation_time': '2021-10-21T07:35:28.228Z', 'current_user_role_id': 1, 'current_user_role_ids': [1], 'cve_allowlist': {'creation_time': '0001-01-01T00:00:00.000Z', 'id': 56, 'items': [], 'project_id': 75, 'update_time': '0001-01-01T00:00:00.000Z'}, 'metadata': {'public': 'true', 'retention_id': '54'}, 'name': 'harbor-esl-proxy', 'owner_id': 1, 'owner_name': 'admin', 'project_id': 75, 'registry_id': 101, 'repo_count': 0, 'update_time': '2021-10-21T07:35:28.228Z'}) => {
"ansible_loop_var": "item",
"changed": false,
"connection": "close",
"content": "{\"errors\":[{\"code\":\"UNPROCESSABLE_ENTITY\",\"message\":\"validation failure list:\\nparsing policy body from \\\"\\\" failed, because json: cannot unmarshal string into Go struct field RetentionPolicy.id of type int64\"}]}\n",
"content_length": "213",
"content_type": "application/json; charset=utf-8",
"date": "Fri, 22 Oct 2021 15:22:17 GMT",
"elapsed": 3,
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"body": {
"algorithm": "or",
"id": "54",
"rules": [
{
"action": "retain",
"params": {
"latestPushedK": 3
},
"scope_selectors": {
"repository": [
{
"decoration": "repoMatches",
"kind": "doublestar",
"pattern": "**"
}
]
},
"tag_selectors": [
{
"decoration": "matches",
"extras": "{\"untagged\":true}",
"kind": "doublestar",
"pattern": "**"
}
],
"template": "latestPushedK"
}
],
"scope": {
"level": "project",
"ref": "75"
},
"trigger": {
"kind": "Schedule",
"settings": {
"cron": "0 0 * * * *"
}
}
},
"body_format": "json",
"client_cert": null,
"client_key": null,
"content": null,
"creates": null,
"delimiter": null,
"dest": null,
"directory_mode": null,
"follow": false,
"follow_redirects": "safe",
"force": false,
"force_basic_auth": false,
"group": null,
"headers": {
"Accept": "application/json, text/plain, */*",
"Authorization": "Basic YWRtaW46SGFyYm9yMTIzNDU=",
"Content-Type": "application/json"
},
"http_agent": "ansible-httpget",
"method": "PUT",
"mode": null,
"owner": null,
"regexp": null,
"remote_src": null,
"removes": null,
"return_content": false,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"status_code": [
"200"
],
"timeout": 30,
"unix_socket": null,
"unsafe_writes": null,
"url": "https://harbor.tst1e.k8s.shiplab.ss.acl.com/api/v2.0/retentions/54",
"url_password": null,
"url_username": null,
"use_proxy": true,
"validate_certs": false
}
},
"item": {
"chart_count": 0,
"creation_time": "2021-10-21T07:35:28.228Z",
"current_user_role_id": 1,
"current_user_role_ids": [
1
],
"cve_allowlist": {
"creation_time": "0001-01-01T00:00:00.000Z",
"id": 56,
"items": [],
"project_id": 75,
"update_time": "0001-01-01T00:00:00.000Z"
},
"metadata": {
"public": "true",
"retention_id": "54"
},
"name": "harbor-esl-proxy",
"owner_id": 1,
"owner_name": "admin",
"project_id": 75,
"registry_id": 101,
"repo_count": 0,
"update_time": "2021-10-21T07:35:28.228Z"
},
"json": {
"errors": [
{
"code": "UNPROCESSABLE_ENTITY",
"message": "validation failure list:\nparsing policy body from \"\" failed, because json: cannot unmarshal string into Go struct field RetentionPolicy.id of type int64"
}
]
},
"msg": "Status code was 422 and not [200]: HTTP Error 422: Unprocessable Entity",
"redirected": false,
"set_cookie": "sid=5ffe84980b5cb188e7821ae838e4fc33; Path=/; HttpOnly",
"status": 422,
"url": "https://harbor.tst1e.k8s.shiplab.ss.acl.com/api/v2.0/retentions/54",
"vary": "Accept-Encoding",
"x_request_id": "d7a855cf-3de6-47ab-8b47-d275cda5274b"
}
"skip_reason": "Conditional result was False"
}
...ignoring
META: ran handlers
META: ran handlers
PLAY RECAP **************************************************************************************************************************************************
harbor_shiplab_test1e : ok=4 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=1
Wow, what a fascinating rabbit hole :-(
So, there is an existing issue reporting that behavior, but it was closed "works-as-designed," along with some nonsense about configuring jinja2 in some incompatible way
As best I can tell from running a bunch of experiments, manually crafting that JSON payload is the only safe way:
body_format: json
body: >-
{{
(
{
"algorithm": "or",
"id": item.metadata.retention_id | int,
"rules": [
{
"action": "retain",
"params": {
"latestPushedK": 3
},
"scope_selectors": {
"repository": [
{
"decoration": "repoMatches",
"kind": "doublestar",
"pattern": "**"
}
]
},
"tag_selectors": [
{
"decoration": "matches",
"kind": "doublestar",
"pattern": "**"
}
],
"template": "latestPushedK"
}
],
"scope": {
"level": "project",
"ref": item.project_id | int
},
"trigger": {
"kind": "Schedule",
"settings": {
"cron": "0 0 * * * *"
}
}
}
) | to_json }}
when: "item.registry_id is defined"
I wish I had a better explanation as to what is going on, but given the response to that issue, I'm not super sure they even consider your experience to be a bug
But for clarity, what is happening here is that those leading brace characters inside the () are python dict literals, which is why the "id": item.metadata.retention_id | int, doesn't need any special consideration -- both sides of that are python (err, jinja2, but same-same)
The () are likely not strictly necessary, but do make it super obvious what is being fed into the | to_json filter
Then, the outer {{ are the jinja2 delimiters that you're used to, in order to have all that inner jinja2 code run, and the body: >- is to get us out of yaml quoting hell

Metabase not connecting to MySQL (docker containers)

I'm running metabase and MySQL in seperate docker containers, both connecting to the same bridge.
I can ping the MySQL container from the metabase container. However, when I try to connect to MySQL from the metabase interface, I am getting the following error:
"unexpected end of stream, read 0 bytes from 4 (socket was closed by server)".
Here is my config:
Windows 10
Metabase version v0.36.4 (196c1f6 release-0.36.x)
8.0.21 MySQL Community Server - GPL
And my docker network configuration:
[
{
"Name": "mysql-metabase-net",
"Id": "bbe21c1873049a3ce0aee6f2e8b2cd3ba5c443cc655d685368f342b42e9d6e98",
"Created": "2020-09-07T05:18:19.355990708Z",
"Scope": "local",
"Driver": "bridge",
"EnableIPv6": false,
"IPAM": {
"Driver": "default",
"Options": {},
"Config": [
{
"Subnet": "172.19.0.0/16",
"Gateway": "172.19.0.1"
}
]
},
"Internal": false,
"Attachable": false,
"Ingress": false,
"ConfigFrom": {
"Network": ""
},
"ConfigOnly": false,
"Containers": {
"7c1dfaee4a8783aae6afccbbc1970d3fb971645a972c2484e67125b7aba027bc": {
"Name": "my-container",
"EndpointID": "7ee6b1f4b850f2a9389fa6cda311eb19e28016890eafd7659255d2fab9b7a38b",
"MacAddress": "02:42:ac:13:00:02",
"IPv4Address": "172.19.0.2/16",
"IPv6Address": ""
},
"cc261fe878298ec8199a700351195901a65f3575d395971a6f5268e9a0b9d93f": {
"Name": "metabase",
"EndpointID": "0c3541adc8d42c57a14c7d3f465111c2734718734432df5a61ee676628ef79b2",
"MacAddress": "02:42:ac:13:00:03",
"IPv4Address": "172.19.0.3/16",
"IPv6Address": ""
}
},
"Options": {},
"Labels": {}
}
]
Am I doing something wrong? Any idea what the issue could be?
Thanks for your help.
Jeremy
So it turned out I had my ports messed up. Further troubleshooting involving public key and SSL was solved here

Network IP addresses with jq bash command from docker inspect

I want to process docker inspect <container1> <container2> out to retrieve via jq command exposed ports and IPAddresses assigned to defined networks. So far I've got jq '{(.[].Name): getpath(path(.[].NetworkSettings.Networks)) | .[].IPAddress}' which is clearly far from enough.
p.s. I know its easy to write a script that would give such output but I'm curious how to do that using jq
Expected output
{
# .[].Name
"/prometheus_prometheus_1": [
[
# .[].Config.ExposedPorts
"9090/tcp"
],
{
# key = .[].NetworkSettings.Networks.<index>
# value = .[].NetworkSettings.Networks[].IPAddress
"prometheus_internal": "172.21.0.4",
"system-information": "172.28.0.2"
}
]
}
Input:
[
{
"Id": "c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa",
"Created": "2018-06-12T05:33:19.811448302Z",
"Path": "/bin/prometheus",
"Args": [
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles"
],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 13217,
"ExitCode": 0,
"Error": "",
"StartedAt": "2018-06-12T05:33:20.264845701Z",
"FinishedAt": "0001-01-01T00:00:00Z"
},
"Image": "sha256:44a4c1c270dcb5e8caf51e3205fa23343cc903f3823eab53ae204f304d16f62a",
"ResolvConfPath": "/var/lib/docker/containers/c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa/hostname",
"HostsPath": "/var/lib/docker/containers/c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa/hosts",
"LogPath": "/var/lib/docker/containers/c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa/c2f86bce68441f50f973ae89a835d052e74c083e35b5438f3cec2497bcac79fa-json.log",
"Name": "/prometheus_prometheus_1",
"RestartCount": 0,
"Driver": "overlay2",
"Platform": "linux",
"MountLabel": "",
"ProcessLabel": "",
"AppArmorProfile": "",
"ExecIDs": null,
"HostConfig": {
"Binds": [
"prometheus_prometheus-data:/prometheus:rw",
"/home/sebastian/projects/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:rw"
],
"ContainerIDFile": "",
"LogConfig": {
"Type": "json-file",
"Config": {}
},
"NetworkMode": "prometheus_internal",
"PortBindings": {},
"RestartPolicy": {
"Name": "always",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": [],
"CapAdd": null,
"CapDrop": null,
"Dns": null,
"DnsOptions": null,
"DnsSearch": null,
"ExtraHosts": null,
"GroupAdd": null,
"IpcMode": "shareable",
"Cgroup": "",
"Links": null,
"OomScoreAdj": 0,
"PidMode": "",
"Privileged": false,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": null,
"UTSMode": "",
"UsernsMode": "",
"ShmSize": 67108864,
"Runtime": "runc",
"ConsoleSize": [
0,
0
],
"Isolation": "",
"CpuShares": 0,
"Memory": 0,
"NanoCpus": 0,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": null,
"BlkioDeviceReadBps": null,
"BlkioDeviceWriteBps": null,
"BlkioDeviceReadIOps": null,
"BlkioDeviceWriteIOps": null,
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuRealtimePeriod": 0,
"CpuRealtimeRuntime": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": null,
"DeviceCgroupRules": null,
"DiskQuota": 0,
"KernelMemory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"MemorySwappiness": null,
"OomKillDisable": false,
"PidsLimit": 0,
"Ulimits": null,
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0
},
"GraphDriver": {
"Data": {
"LowerDir": "/var/lib/docker/overlay2/73c16b0735eb15494741e5b821a9df3be8cebb716219ac206d9ee0042e4166ac-init/diff:/var/lib/docker/overlay2/03c66042305488fea2fe693246e90374743fbb7836ab31e4ee5cdbfbb1c97785/diff:/var/lib/docker/overlay2/80821a00b79334493729e671dfb66cc878aec19fe2d0dfbaf4cde187522b06e8/diff:/var/lib/docker/overlay2/f5071cce5513fb3c465ad883297157271ef48c6f59cd88768068224d850c6903/diff:/var/lib/docker/overlay2/c9a0cc617abb45fbd4572abb41c7281c96bc9d59c109270c7f1ec7a99f5b055b/diff:/var/lib/docker/overlay2/5b425f8a440139a8fdcdd7f7fee8189565e9e00304e96f4cfee86ff6b861e068/diff:/var/lib/docker/overlay2/e85740d997591d2f60bfa61ea0c82a58636bccaaba970e55f12cf0ff24565c70/diff:/var/lib/docker/overlay2/e9c362d971c460b02111122b241888392d77c5b8545ed520c3d74660b16db9b2/diff:/var/lib/docker/overlay2/5001184fc8ddd25204626f419c1530b970cb37f936a196fb1173f2774310085e/diff:/var/lib/docker/overlay2/ed2e676f09a4583f2194e752083a7c95be80131f055891dc39f9fed5560f5a73/diff",
"MergedDir": "/var/lib/docker/overlay2/73c16b0735eb15494741e5b821a9df3be8cebb716219ac206d9ee0042e4166ac/merged",
"UpperDir": "/var/lib/docker/overlay2/73c16b0735eb15494741e5b821a9df3be8cebb716219ac206d9ee0042e4166ac/diff",
"WorkDir": "/var/lib/docker/overlay2/73c16b0735eb15494741e5b821a9df3be8cebb716219ac206d9ee0042e4166ac/work"
},
"Name": "overlay2"
},
"Mounts": [
{
"Type": "volume",
"Name": "prometheus_prometheus-data",
"Source": "/var/lib/docker/volumes/prometheus_prometheus-data/_data",
"Destination": "/prometheus",
"Driver": "local",
"Mode": "rw",
"RW": true,
"Propagation": ""
},
{
"Type": "bind",
"Source": "/home/sebastian/projects/prometheus/prometheus.yml",
"Destination": "/etc/prometheus/prometheus.yml",
"Mode": "rw",
"RW": true,
"Propagation": "rprivate"
}
],
"Config": {
"Hostname": "c2f86bce6844",
"Domainname": "",
"User": "nobody",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"ExposedPorts": {
"9090/tcp": {}
},
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Cmd": [
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles"
],
"Image": "prom/prometheus:v2.3.0",
"Volumes": {
"/etc/prometheus/prometheus.yml": {},
"/prometheus": {}
},
"WorkingDir": "/prometheus",
"Entrypoint": [
"/bin/prometheus"
],
"OnBuild": null,
"Labels": {
"com.docker.compose.config-hash": "b8ff39fd239739ae66051752c1b49b0119862b8f21678fb42400c907d25b1c57",
"com.docker.compose.container-number": "1",
"com.docker.compose.oneoff": "False",
"com.docker.compose.project": "prometheus",
"com.docker.compose.service": "prometheus",
"com.docker.compose.version": "1.16.1",
"maintainer": "The Prometheus Authors <prometheus-developers#googlegroups.com>"
}
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "7be77954d0e6990f565a8526b157236140238cabf0954629598ce3bb9aaae885",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": {
"9090/tcp": null
},
"SandboxKey": "/var/run/docker/netns/7be77954d0e6",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "",
"Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"MacAddress": "",
"Networks": {
"prometheus_internal": {
"IPAMConfig": null,
"Links": null,
"Aliases": [
"prometheus",
"c2f86bce6844"
],
"NetworkID": "145c59986c16003f9a3a30e6b671c34b56225d90243fba88fc295c49b4dcfff6",
"EndpointID": "8e3ac8bd4719ea85358c20a8141c37377dd327265916d41ee5d1db6b7496898c",
"Gateway": "172.21.0.1",
"IPAddress": "172.21.0.4",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:15:00:04",
"DriverOpts": null
},
"system-information": {
"IPAMConfig": null,
"Links": null,
"Aliases": [
"prometheus",
"c2f86bce6844"
],
"NetworkID": "f6e67721cb73da7e8d4eacdb709de240014af9af35e66216e6186300c72de330",
"EndpointID": "2c31369f8175f2b38a5bc966a8306c8f0f11ac4c264e6ce735d00c87b8a6f44f",
"Gateway": "172.28.0.1",
"IPAddress": "172.28.0.2",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:1c:00:02",
"DriverOpts": null
}
}
}
}
]
jq solution:
jq '.[] | {(.Name) : [(.Config.ExposedPorts | keys),
(.NetworkSettings.Networks | to_entries
| map({(.key): .value.IPAddress}) | add)
]
}' input
The output:
{
"/prometheus_prometheus_1": [
[
"9090/tcp"
],
{
"prometheus_internal": "172.21.0.4",
"system-information": "172.28.0.2"
}
]
}

Ansible: EC2 provisioning and Iterations

I am trying to start a bunch of EC2 instances, then install something on them based on the IP given by AWS. With only one EC2, I can add the host and proceed without any issue,but when I chain them using with_dict, I can't achieve it anymore...
The following runs as I want, but I can't understand how to deal with the registered variable ec2_infos I got from the provisioning...
- name: Create Test EC2 instances
ec2:
group: default
image: ami-40d28157
instance_type: '{{item.value.type}}'
instance_tags:
Name: "{{ tag+'-'+item.value.name }}"
key_name: privatekey
region: us-west-1
vpc_subnet_id: subnet-REDACTD
wait: yes
with_dict: '{{ec2_stack}}'
register: ec2_infos
With a dictionary like
ec2_stack:
serv1:
type: t2.micro
name: server1
serv2:
type: t2.small
name: server2
ec2_infos is structures like:
"ec2_infos": {
"changed": true,
"msg": "All items completed",
"results": [
{
"_ansible_item_result": true,
"_ansible_no_log": false,
"_ansible_parsed": true,
"changed": true,
"instance_ids": [
"i-0fewq09812ddq6"
],
"instances": [
{
"ami_launch_index": "0",
"architecture": "x86_64",
"block_device_mapping": {
"/dev/sda1": {
"delete_on_termination": true,
"status": "attached",
"volume_id": "vol-0987654"
}
},
"dns_name": "",
"ebs_optimized": false,
"groups": {
"sg-qdwdww": "default"
},
"hypervisor": "xen",
"id": "i-083665656521dwq6",
"image_id": "ami-40d28157",
"launch_time": "2016-11-24T20:38:53.000Z",
"placement": "us-west-1d",
"private_ip": "x.x.x.x",
"public_dns_name": "",
"public_ip": null,
"ramdisk": null,
"region": "us-east-1",
"root_device_name": "/dev/sda1",
"root_device_type": "ebs",
"state": "running",
"state_code": 16,
"tags": {
"Name": "server1",
"Team": "blah"
},
"tenancy": "default","tenancy": "default",
"virtualization_type": "hvm"
}
],
"invocation": {
"module_args": {
"assign_public_ip": false,
"exact_count": null,
"group": [
"default"
],
"group_id": null,
"id": null,
"image": "ami-40d28157",
"instance_ids": null,
"instance_initiated_shutdown_behavior": null,
"instance_profile_name": null,
"instance_tags": {
"Name": "server1",
"Team": "blah"
},
"instance_type": "t2.micro",
"kernel": null,
"volumes": null,
"vpc_subnet_id": "subnet-abcdfed",
"wait": true,
"wait_timeout": "300",
"zone": null
},
"module_name": "ec2"
},
"item": {
"key": "serv1",
"value": {
"name": "server1",
"type": "t2.micro"
}
},
"tagged_instances": []
},
{
"_ansible_item_result": true,
"_ansible_no_log": false,
"_ansible_parsed": true,
"changed": true,
"instance_ids": [
"i-0971278624334fd"
],
"instances": [
{
"ami_launch_index": "0",
"architecture": "x86_64",
"block_device_mapping": {
"/dev/sda1": {
"delete_on_termination": true,
"status": "attached",
"volume_id": "vol-9999999"
}
},
"dns_name": "",
"ebs_optimized": false,
"groups": {
"sg-redactd": "default"
},
"launch_time": "2016-11-24T20:39:21.000Z",
"private_ip": "y.y.y.y",
"public_dns_name": "",
"public_ip": null,
"ramdisk": null,
"state": "running",
"state_code": 16,
"tags": {
"Name": "serv2",
"Team": "blah"
},
"tenancy": "default",
"virtualization_type": "hvm"
}
],
"invocation": {
"module_args": {
"assign_public_ip": false,
"wait_timeout": "300",
"zone": null
},
"module_name": "ec2"
},
"item": {
"key": "server2",
"value": {
"name": "serv2",
"type": "t2.small"
}
},
"tagged_instances": []
}
]
}
I tried with_items and with_subelements in different ways, but I can't manage to get every IPs of the new EC2. I don't even need to sort them just extract them from the instances part and feed them to add_host so I can proceed.
Anybody knows a clean way to do so, or would be kind enough to explain to me how to deal with a registered variable after a loop properly ?
Answer from the comments:
ec2_infos.results | map(attribute='instances') | sum(start=[]) | map(attribute='private_ip') | list

How to access a mysql database that is into a Docker continer?

I'm working in a windows environment and I have my MySQL database into a Docker container, but how I should access from an external application?
Right now I'm just want to use a regular Java Class to test the connection but what address should I use? I've tried with the boot2docker ip, but that doesn't work. I've also tried with the ip that I got with the docker inspect command but I haven't got any result.
[
{
"Id": "ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16",
"Created": "2015-08-11T16:07:44.597618319Z",
"Path": "/entrypoint.sh",
"Args": [
"mysqld"
],
"State": {
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 2167,
"ExitCode": 0,
"Error": "",
"StartedAt": "2015-08-11T16:38:38.256276665Z",
"FinishedAt": "2015-08-11T16:38:08.508576127Z"
},
"Image": "a128139aadf282a10ec937a60010f837fa1b91dd367e05c7ca6197a84b3115b3",
"NetworkSettings": {
"Bridge": "",
"EndpointID": "a6e0a25891c065dc47b9dccb19c89e0ab7ee358a2b7cf947f6385bb47b99bac4",
"Gateway": "172.17.42.1",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"HairpinMode": false,
"IPAddress": "172.17.0.5",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:11:00:05",
"NetworkID": "5c8e21999516c552412027ab316d3a8d8d813952fb207c1de9ec38040895fc50",
"PortMapping": null,
"Ports": {
"3306/tcp": null
},
"SandboxKey": "/var/run/docker/netns/ee02d1e46305",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null
},
"ResolvConfPath": "/mnt/sda1/var/lib/docker/containers/ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16/resolv.conf",
"HostnamePath": "/mnt/sda1/var/lib/docker/containers/ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16/hostname",
"HostsPath": "/mnt/sda1/var/lib/docker/containers/ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16/hosts",
"LogPath": "/mnt/sda1/var/lib/docker/containers/ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16/ee02d1e463056c8dcd878fa7d9746c39b4377e051d11a8853be274b206b73c16-json.log",
"Name": "/fluxtream",
"RestartCount": 0,
"Driver": "aufs",
"ExecDriver": "native-0.2",
"MountLabel": "",
"ProcessLabel": "",
"Volumes": {
"/var/lib/mysql": "/mnt/sda1/var/lib/docker/volumes/5a05900d9551d22235e51a7d03a7354156fce1bdea3e7378f52df8d33fee1f4f/_data"
},
"VolumesRW": {
"/var/lib/mysql": true
},
"AppArmorProfile": "",
"ExecIDs": [
"8fd70881d9cfed795dfb6850d513e54490432fd1e55e8bf8f6040f71cc1e7a4c",
"d3ca3e35de0222127c7e5da3e6ab45157cb06f5716c0c2bbfdc9a7096230786d",
"b75620585c02840a62169bc5adf0a5a80fa265194ac74136c54dc31cf9edd653",
"a61e680448ad796b1ad113776c5ba46f3b8a03f6601d843b005722748c9d5669"
],
"HostConfig": {
"Binds": null,
"ContainerIDFile": "",
"LxcConf": [],
"Memory": 0,
"MemorySwap": 0,
"CpuShares": 0,
"CpuPeriod": 0,
"CpusetCpus": "",
"CpusetMems": "",
"CpuQuota": 0,
"BlkioWeight": 0,
"OomKillDisable": false,
"Privileged": false,
"PortBindings": {},
"Links": null,
"PublishAllPorts": false,
"Dns": null,
"DnsSearch": null,
"ExtraHosts": null,
"VolumesFrom": null,
"Devices": [],
"NetworkMode": "bridge",
"IpcMode": "",
"PidMode": "",
"UTSMode": "",
"CapAdd": null,
"CapDrop": null,
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"SecurityOpt": null,
"ReadonlyRootfs": false,
"Ulimits": null,
"LogConfig": {
"Type": "json-file",
"Config": {}
},
"CgroupParent": ""
},
"Config": {
"Hostname": "ee02d1e46305",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"PortSpecs": null,
"ExposedPorts": {
"3306/tcp": {}
},
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"MYSQL_ROOT_PASSWORD=fluxtream",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"MYSQL_MAJOR=5.6",
"MYSQL_VERSION=5.6.26"
],
"Cmd": [
"mysqld"
],
"Image": "mysql:5.6.26",
"Volumes": {
"/var/lib/mysql": {}
},
"VolumeDriver": "",
"WorkingDir": "",
"Entrypoint": [
"/entrypoint.sh"
],
"NetworkDisabled": false,
"MacAddress": "",
"OnBuild": null,
"Labels": {}
}
}
]
My dev enviroment:
Windows 7 Pro
Docker 1.7.0
Boot2Docker-cli version 1.7.0
MySQL container 5.6.26
What worked for my is mapping the port when I created the container.
docker run --name YOUR_CONTAINER -p 3306:3306 -e MYSQL_ROOT_PASSWORD=YOUR_PASS -d mysql:5.6.26