summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--filter_plugins/openshift_master.py15
-rw-r--r--inventory/byo/hosts.origin.example10
-rw-r--r--inventory/byo/hosts.ose.example8
-rw-r--r--playbooks/adhoc/openshift_hosted_logging_efk.yaml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml4
l---------playbooks/byo/openshift-preflight/roles1
-rw-r--r--playbooks/byo/rhel_subscribe.yml2
-rw-r--r--playbooks/common/openshift-cluster/config.yml4
-rw-r--r--playbooks/common/openshift-cluster/disable_excluder.yml11
-rw-r--r--playbooks/common/openshift-cluster/openshift_hosted.yml26
-rw-r--r--playbooks/common/openshift-cluster/redeploy-certificates/router.yml31
-rw-r--r--playbooks/common/openshift-cluster/reset_excluder.yml8
-rw-r--r--playbooks/common/openshift-cluster/upgrades/post_control_plane.yml18
-rw-r--r--playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml4
-rw-r--r--roles/lib_openshift/library/oadm_manage_node.py23
-rw-r--r--roles/lib_openshift/library/oc_edit.py23
-rw-r--r--roles/lib_openshift/library/oc_label.py23
-rw-r--r--roles/lib_openshift/library/oc_obj.py23
-rw-r--r--roles/lib_openshift/library/oc_route.py23
-rw-r--r--roles/lib_openshift/library/oc_scale.py23
-rw-r--r--roles/lib_openshift/library/oc_secret.py23
-rw-r--r--roles/lib_openshift/library/oc_service.py23
-rw-r--r--roles/lib_openshift/library/oc_serviceaccount.py25
-rw-r--r--roles/lib_openshift/library/oc_serviceaccount_secret.py1524
-rw-r--r--roles/lib_openshift/library/oc_version.py23
-rw-r--r--roles/lib_openshift/src/ansible/oc_serviceaccount.py2
-rw-r--r--roles/lib_openshift/src/ansible/oc_serviceaccount_secret.py29
-rw-r--r--roles/lib_openshift/src/class/oc_serviceaccount_secret.py138
-rw-r--r--roles/lib_openshift/src/doc/serviceaccount_secret68
-rw-r--r--roles/lib_openshift/src/sources.yml11
-rwxr-xr-xroles/lib_openshift/src/test/integration/oc_serviceaccount_secret.yml79
-rwxr-xr-xroles/lib_openshift/src/test/unit/oc_serviceaccount.py6
-rwxr-xr-xroles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py257
-rw-r--r--roles/lib_utils/library/yedit.py23
-rw-r--r--roles/lib_utils/src/class/yedit.py23
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-persistent-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/mysql-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-ephemeral-template.json5
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-persistent-template.json5
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/redis-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.4/db-templates/redis-persistent-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/README.md5
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql-persistent.json575
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql.json114
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql-persistent.json519
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql.json24
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql-persistent.json532
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql.json20
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb-persistent.json541
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb.json18
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql-persistent.json598
-rw-r--r--roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql.json52
-rw-r--r--roles/openshift_excluder/README.md44
-rw-r--r--roles/openshift_excluder/meta/main.yml15
-rw-r--r--roles/openshift_excluder/tasks/exclude.yml11
-rw-r--r--roles/openshift_excluder/tasks/install.yml16
-rw-r--r--roles/openshift_excluder/tasks/main.yml2
-rw-r--r--roles/openshift_excluder/tasks/reset.yml12
-rw-r--r--roles/openshift_excluder/tasks/status.yml56
-rw-r--r--roles/openshift_excluder/tasks/unexclude.yml12
-rwxr-xr-xroles/openshift_facts/library/openshift_facts.py3
-rw-r--r--roles/openshift_hosted_templates/files/v1.4/enterprise/registry-console.yaml4
-rw-r--r--roles/openshift_hosted_templates/files/v1.5/enterprise/registry-console.yaml4
-rw-r--r--roles/openshift_logging/README.md2
-rw-r--r--roles/openshift_logging/defaults/main.yml2
-rw-r--r--roles/openshift_logging/templates/kibana.j24
-rw-r--r--roles/openshift_metrics/defaults/main.yaml3
-rw-r--r--roles/openshift_metrics/tasks/install_cassandra.yaml1
-rw-r--r--roles/openshift_metrics/tasks/install_hawkular.yaml1
-rw-r--r--roles/openshift_metrics/tasks/install_heapster.yaml1
-rw-r--r--roles/openshift_metrics/templates/hawkular_cassandra_rc.j26
-rw-r--r--roles/openshift_metrics/templates/hawkular_metrics_rc.j26
-rw-r--r--roles/openshift_metrics/templates/heapster.j26
82 files changed, 5540 insertions, 264 deletions
diff --git a/filter_plugins/openshift_master.py b/filter_plugins/openshift_master.py
index 77b0a3dc9..4ccee91f9 100644
--- a/filter_plugins/openshift_master.py
+++ b/filter_plugins/openshift_master.py
@@ -525,13 +525,7 @@ class FilterModule(object):
'admin.key',
'admin.kubeconfig',
'master.kubelet-client.crt',
- 'master.kubelet-client.key',
- 'openshift-registry.crt',
- 'openshift-registry.key',
- 'openshift-registry.kubeconfig',
- 'openshift-router.crt',
- 'openshift-router.key',
- 'openshift-router.kubeconfig']
+ 'master.kubelet-client.key']
if bool(include_ca):
certs += ['ca.crt', 'ca.key']
if bool(include_keys):
@@ -547,6 +541,13 @@ class FilterModule(object):
if bool(hostvars['openshift']['common']['version_gte_3_3_or_1_3']):
certs += ['service-signer.crt',
'service-signer.key']
+ if not bool(hostvars['openshift']['common']['version_gte_3_5_or_1_5']):
+ certs += ['openshift-registry.crt',
+ 'openshift-registry.key',
+ 'openshift-registry.kubeconfig',
+ 'openshift-router.crt',
+ 'openshift-router.key',
+ 'openshift-router.kubeconfig']
return certs
@staticmethod
diff --git a/inventory/byo/hosts.origin.example b/inventory/byo/hosts.origin.example
index 1b89bed07..c5f9db909 100644
--- a/inventory/byo/hosts.origin.example
+++ b/inventory/byo/hosts.origin.example
@@ -474,18 +474,18 @@ openshift_master_identity_providers=[{'name': 'htpasswd_auth', 'login': 'true',
# pods are deleted
#
# Other Logging Options -- Common items you may wish to reconfigure, for the complete
-# list of options please see roles/openshift_hosted_logging/README.md
+# list of options please see roles/openshift_logging/README.md
#
# Configure loggingPublicURL in the master config for aggregate logging, defaults
# to https://kibana.{{ openshift_master_default_subdomain }}
#openshift_master_logging_public_url=https://kibana.example.com
# Configure the number of elastic search nodes, unless you're using dynamic provisioning
# this value must be 1
-#openshift_hosted_logging_elasticsearch_cluster_size=1
-#openshift_hosted_logging_hostname=logging.apps.example.com
+#openshift_logging_es_cluster_size=1
+#openshift_logging_kibana_hostname=logging.apps.example.com
# Configure the prefix and version for the deployer image
-#openshift_hosted_logging_deployer_prefix=registry.example.com:8888/openshift3/
-#openshift_hosted_logging_deployer_version=3.3.0
+#openshift_logging_image_prefix=registry.example.com:8888/openshift3/
+#openshift_logging_image_version=3.3.0
# Configure the multi-tenant SDN plugin (default is 'redhat/openshift-ovs-subnet')
# os_sdn_network_plugin_name='redhat/openshift-ovs-multitenant'
diff --git a/inventory/byo/hosts.ose.example b/inventory/byo/hosts.ose.example
index 855be3db4..cacdbf115 100644
--- a/inventory/byo/hosts.ose.example
+++ b/inventory/byo/hosts.ose.example
@@ -482,11 +482,11 @@ openshift_master_identity_providers=[{'name': 'htpasswd_auth', 'login': 'true',
#openshift_master_logging_public_url=https://kibana.example.com
# Configure the number of elastic search nodes, unless you're using dynamic provisioning
# this value must be 1
-#openshift_hosted_logging_elasticsearch_cluster_size=1
-#openshift_hosted_logging_hostname=logging.apps.example.com
+#openshift_logging_es_cluster_size=1
+#openshift_logging_kibana_hostname=logging.apps.example.com
# Configure the prefix and version for the deployer image
-#openshift_hosted_logging_deployer_prefix=registry.example.com:8888/openshift3/
-#openshift_hosted_logging_deployer_version=3.3.0
+#openshift_logging_image_prefix=registry.example.com:8888/openshift3/
+#openshift_logging_image_version=3.3.0
# Configure the multi-tenant SDN plugin (default is 'redhat/openshift-ovs-subnet')
# os_sdn_network_plugin_name='redhat/openshift-ovs-multitenant'
diff --git a/playbooks/adhoc/openshift_hosted_logging_efk.yaml b/playbooks/adhoc/openshift_hosted_logging_efk.yaml
index 0b30a221d..e83351272 100644
--- a/playbooks/adhoc/openshift_hosted_logging_efk.yaml
+++ b/playbooks/adhoc/openshift_hosted_logging_efk.yaml
@@ -1,7 +1,7 @@
---
- hosts: masters[0]
roles:
- - role: openshift_hosted_logging
+ - role: openshift_logging
openshift_hosted_logging_cleanup: no
- name: Update master-config for publicLoggingURL
@@ -11,6 +11,6 @@
logging_hostname: "{{ openshift_hosted_logging_hostname | default('kibana.' ~ (openshift_master_default_subdomain | default('router.default.svc.cluster.local', true))) }}"
tasks:
- include_role:
- name: openshift_hosted_logging
+ name: openshift_logging
tasks_from: update_master_config
when: openshift_hosted_logging_deploy | default(false) | bool
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
index bb08ca837..b1510e062 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
@@ -46,6 +46,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
index 907196d8f..d791e89f6 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
@@ -51,6 +51,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
index 5e28072da..f0b2a2c75 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
@@ -47,6 +47,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml
index 6b69348b7..82a1d0935 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml
@@ -46,6 +46,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
index 25b669f86..d1c2bd17a 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
@@ -51,6 +51,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
index 9868cb5b4..f6e66c477 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
@@ -47,6 +47,10 @@
tags:
- pre_upgrade
+- include: ../disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
index bef15eaab..e55ab1b16 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
@@ -46,6 +46,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
index dd88dde5f..e18b4280c 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
@@ -51,6 +51,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
index 931a1bcd7..259be6f8e 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
@@ -47,6 +47,10 @@
tags:
- pre_upgrade
+- include: ../../../../common/openshift-cluster/disable_excluder.yml
+ tags:
+ - pre_upgrade
+
- include: ../../../../common/openshift-cluster/initialize_openshift_version.yml
tags:
- pre_upgrade
diff --git a/playbooks/byo/openshift-preflight/roles b/playbooks/byo/openshift-preflight/roles
new file mode 120000
index 000000000..20c4c58cf
--- /dev/null
+++ b/playbooks/byo/openshift-preflight/roles
@@ -0,0 +1 @@
+../../../roles \ No newline at end of file
diff --git a/playbooks/byo/rhel_subscribe.yml b/playbooks/byo/rhel_subscribe.yml
index 62f62680e..65c0b1c01 100644
--- a/playbooks/byo/rhel_subscribe.yml
+++ b/playbooks/byo/rhel_subscribe.yml
@@ -1,5 +1,5 @@
---
-- include: ../../common/openshift-cluster/std_include.yml
+- include: ../common/openshift-cluster/std_include.yml
tags:
- always
diff --git a/playbooks/common/openshift-cluster/config.yml b/playbooks/common/openshift-cluster/config.yml
index a0ba735ab..113b401f9 100644
--- a/playbooks/common/openshift-cluster/config.yml
+++ b/playbooks/common/openshift-cluster/config.yml
@@ -26,6 +26,10 @@
openshift_docker_selinux_enabled: "{{ lookup('oo_option', 'docker_selinux_enabled') }}"
when: openshift_docker_selinux_enabled is not defined
+- include: disable_excluder.yml
+ tags:
+ - always
+
- include: ../openshift-etcd/config.yml
tags:
- etcd
diff --git a/playbooks/common/openshift-cluster/disable_excluder.yml b/playbooks/common/openshift-cluster/disable_excluder.yml
new file mode 100644
index 000000000..eb146bab8
--- /dev/null
+++ b/playbooks/common/openshift-cluster/disable_excluder.yml
@@ -0,0 +1,11 @@
+---
+- name: Record excluder state and disable
+ hosts: l_oo_all_hosts
+ gather_facts: no
+ tasks:
+ - include_role:
+ name: openshift_excluder
+ tasks_from: status
+ - include_role:
+ name: openshift_excluder
+ tasks_from: unexclude
diff --git a/playbooks/common/openshift-cluster/openshift_hosted.yml b/playbooks/common/openshift-cluster/openshift_hosted.yml
index 3c4a99887..7b58eebc3 100644
--- a/playbooks/common/openshift-cluster/openshift_hosted.yml
+++ b/playbooks/common/openshift-cluster/openshift_hosted.yml
@@ -27,21 +27,21 @@
logging_elasticsearch_ops_cluster_size: "{{ openshift_hosted_logging_elasticsearch_ops_cluster_size | default(1) }}"
roles:
- role: openshift_hosted
- - role: openshift_hosted_metrics
+ - role: openshift_metrics
when: openshift_hosted_metrics_deploy | default(false) | bool
- - role: openshift_hosted_logging
+ - role: openshift_logging
when: openshift_hosted_logging_deploy | default(false) | bool
- openshift_hosted_logging_hostname: "{{ logging_hostname }}"
- openshift_hosted_logging_ops_hostname: "{{ logging_ops_hostname }}"
- openshift_hosted_logging_master_public_url: "{{ logging_master_public_url }}"
- openshift_hosted_logging_elasticsearch_cluster_size: "{{ logging_elasticsearch_cluster_size }}"
- openshift_hosted_logging_elasticsearch_pvc_dynamic: "{{ 'true' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
- openshift_hosted_logging_elasticsearch_pvc_size: "{{ openshift.hosted.logging.storage.volume.size if openshift_hosted_logging_storage_kind | default(none) in ['dynamic','nfs'] else '' }}"
- openshift_hosted_logging_elasticsearch_pvc_prefix: "{{ 'logging-es' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
- openshift_hosted_logging_elasticsearch_ops_cluster_size: "{{ logging_elasticsearch_ops_cluster_size }}"
- openshift_hosted_logging_elasticsearch_ops_pvc_dynamic: "{{ 'true' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
- openshift_hosted_logging_elasticsearch_ops_pvc_size: "{{ openshift.hosted.logging.storage.volume.size if openshift_hosted_logging_storage_kind | default(none) in ['dynamic','nfs' ] else '' }}"
- openshift_hosted_logging_elasticsearch_ops_pvc_prefix: "{{ 'logging-es' if openshift_hosted_logging_storage_kind | default(none) =='dynamic' else '' }}"
+ openshift_logging_kibana_hostname: "{{ logging_hostname }}"
+ openshift_logging_kibana_ops_hostname: "{{ logging_ops_hostname }}"
+ openshift_logging_master_public_url: "{{ logging_master_public_url }}"
+ openshift_logging_es_cluster_size: "{{ logging_elasticsearch_cluster_size }}"
+ openshift_logging_es_pvc_dynamic: "{{ 'true' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
+ openshift_logging_es_pvc_size: "{{ openshift.hosted.logging.storage.volume.size if openshift_hosted_logging_storage_kind | default(none) in ['dynamic','nfs'] else '' }}"
+ openshift_logging_es_pvc_prefix: "{{ 'logging-es' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
+ openshift_logging_es_ops_cluster_size: "{{ logging_elasticsearch_ops_cluster_size }}"
+ openshift_logging_es_ops_pvc_dynamic: "{{ 'true' if openshift_hosted_logging_storage_kind | default(none) == 'dynamic' else '' }}"
+ openshift_logging_es_ops_pvc_size: "{{ openshift.hosted.logging.storage.volume.size if openshift_hosted_logging_storage_kind | default(none) in ['dynamic','nfs' ] else '' }}"
+ openshift_logging_es_ops_pvc_prefix: "{{ 'logging-es' if openshift_hosted_logging_storage_kind | default(none) =='dynamic' else '' }}"
- role: cockpit-ui
when: ( openshift.common.version_gte_3_3_or_1_3 | bool ) and ( openshift_hosted_manage_registry | default(true) | bool ) and not (openshift.docker.hosted_registry_insecure | default(false) | bool)
diff --git a/playbooks/common/openshift-cluster/redeploy-certificates/router.yml b/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
index 03d64685d..a9e9f0915 100644
--- a/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
+++ b/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
@@ -44,25 +44,26 @@
when: l_router_dc.rc == 0 and 'OPENSHIFT_CA_DATA' in router_env_vars and 'OPENSHIFT_CERT_DATA' in router_env_vars and 'OPENSHIFT_KEY_DATA' in router_env_vars
- block:
- - name: Generate router certificate
+ - name: Delete existing router certificate secret
command: >
- {{ openshift.common.client_binary }} adm ca create-server-cert
- --hostnames=router.default.svc,router.default.svc.cluster.local
- --signer-cert={{ openshift.common.config_base }}/master/service-signer.crt
- --signer-key={{ openshift.common.config_base }}/master/service-signer.key
- --signer-serial={{ openshift.common.config_base }}/master/ca.serial.txt
- --cert={{ mktemp.stdout }}/tls.crt
- --key={{ mktemp.stdout }}/tls.key
+ {{ openshift.common.client_binary }} delete secret/router-certs
+ --config={{ mktemp.stdout }}/admin.kubeconfig
+ -n default
- - name: Update router certificates secret
- shell: >
- {{ openshift.common.client_binary }} secret new router-certs
- {{ mktemp.stdout }}/tls.crt
- {{ mktemp.stdout }}/tls.key
- --type=kubernetes.io/tls
+ - name: Remove router service annotations
+ command: >
+ {{ openshift.common.client_binary }} annotate service/router
+ service.alpha.openshift.io/serving-cert-secret-name-
+ service.alpha.openshift.io/serving-cert-signed-by-
+ --config={{ mktemp.stdout }}/admin.kubeconfig
+ -n default
+
+ - name: Add serving-cert-secret annotation to router service
+ command: >
+ {{ openshift.common.client_binary }} annotate service/router
+ service.alpha.openshift.io/serving-cert-secret-name=router-certs
--config={{ mktemp.stdout }}/admin.kubeconfig
-n default
- -o json | oc replace -f -
when: l_router_dc.rc == 0 and 'router-certs' in router_secrets
- name: Redeploy router
diff --git a/playbooks/common/openshift-cluster/reset_excluder.yml b/playbooks/common/openshift-cluster/reset_excluder.yml
new file mode 100644
index 000000000..fe86f4c23
--- /dev/null
+++ b/playbooks/common/openshift-cluster/reset_excluder.yml
@@ -0,0 +1,8 @@
+---
+- name: Re-enable excluder if it was previously enabled
+ hosts: l_oo_all_hosts
+ gather_facts: no
+ tasks:
+ - include_role:
+ name: openshift_excluder
+ tasks_from: reset
diff --git a/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
index 2bbcbe1f8..4135f7e94 100644
--- a/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
+++ b/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
@@ -26,21 +26,26 @@
registry_url: "{{ openshift.master.registry_url }}"
openshift_hosted_templates_import_command: replace
pre_tasks:
+
+ # TODO: remove temp_skip_router_registry_upgrade variable. This is a short term hack
+ # to allow ops to use this control plane upgrade, without triggering router/registry
+ # upgrade which has not yet been synced with their process.
- name: Collect all routers
command: >
{{ oc_cmd }} get pods --all-namespaces -l 'router' -o json
register: all_routers
failed_when: false
changed_when: false
+ when: temp_skip_router_registry_upgrade is not defined
- set_fact: haproxy_routers="{{ (all_routers.stdout | from_json)['items'] | oo_pods_match_component(openshift_deployment_type, 'haproxy-router') | oo_select_keys_from_list(['metadata']) }}"
- when: all_routers.rc == 0
+ when: all_routers.rc == 0 and temp_skip_router_registry_upgrade is not defined
- set_fact: haproxy_routers=[]
- when: all_routers.rc != 0
+ when: all_routers.rc != 0 and temp_skip_router_registry_upgrade is not defined
- name: Update router image to current version
- when: all_routers.rc == 0
+ when: all_routers.rc == 0 and temp_skip_router_registry_upgrade is not defined
command: >
{{ oc_cmd }} patch dc/{{ item['labels']['deploymentconfig'] }} -n {{ item['namespace'] }} -p
'{"spec":{"template":{"spec":{"containers":[{"name":"router","image":"{{ router_image }}","livenessProbe":{"tcpSocket":null,"httpGet":{"path": "/healthz", "port": 1936, "host": "localhost", "scheme": "HTTP"},"initialDelaySeconds":10,"timeoutSeconds":1}}]}}}}'
@@ -53,9 +58,10 @@
register: _default_registry
failed_when: false
changed_when: false
+ when: temp_skip_router_registry_upgrade is not defined
- name: Update registry image to current version
- when: _default_registry.rc == 0
+ when: _default_registry.rc == 0 and temp_skip_router_registry_upgrade is not defined
command: >
{{ oc_cmd }} patch dc/docker-registry -n default -p
'{"spec":{"template":{"spec":{"containers":[{"name":"registry","image":"{{ registry_image }}"}]}}}}'
@@ -74,3 +80,7 @@
- name: Warn if pluginOrderOverride is in use in master-config.yaml
debug: msg="WARNING pluginOrderOverride is being deprecated in master-config.yaml, please see https://docs.openshift.com/enterprise/latest/architecture/additional_concepts/admission_controllers.html for more information."
when: not grep_plugin_order_override | skipped and grep_plugin_order_override.rc == 0
+
+- include: ../reset_excluder.yml
+ tags:
+ - always
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
index 59188c570..e45b635f7 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
@@ -51,3 +51,7 @@
until: node_sched.rc == 0
retries: 3
delay: 1
+
+- include: ../reset_excluder.yml
+ tags:
+ - always
diff --git a/roles/lib_openshift/library/oadm_manage_node.py b/roles/lib_openshift/library/oadm_manage_node.py
index 1da88bd0c..93c52e0df 100644
--- a/roles/lib_openshift/library/oadm_manage_node.py
+++ b/roles/lib_openshift/library/oadm_manage_node.py
@@ -310,6 +310,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -318,15 +329,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_edit.py b/roles/lib_openshift/library/oc_edit.py
index bbbefc610..e9011da42 100644
--- a/roles/lib_openshift/library/oc_edit.py
+++ b/roles/lib_openshift/library/oc_edit.py
@@ -338,6 +338,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -346,15 +357,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_label.py b/roles/lib_openshift/library/oc_label.py
index 9481d2a47..f2f5787f5 100644
--- a/roles/lib_openshift/library/oc_label.py
+++ b/roles/lib_openshift/library/oc_label.py
@@ -314,6 +314,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -322,15 +333,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_obj.py b/roles/lib_openshift/library/oc_obj.py
index 9fd16c03a..c55827c47 100644
--- a/roles/lib_openshift/library/oc_obj.py
+++ b/roles/lib_openshift/library/oc_obj.py
@@ -317,6 +317,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -325,15 +336,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_route.py b/roles/lib_openshift/library/oc_route.py
index 38063886c..d1b2ccb14 100644
--- a/roles/lib_openshift/library/oc_route.py
+++ b/roles/lib_openshift/library/oc_route.py
@@ -342,6 +342,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -350,15 +361,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_scale.py b/roles/lib_openshift/library/oc_scale.py
index 30ddc1801..f2b96b6ad 100644
--- a/roles/lib_openshift/library/oc_scale.py
+++ b/roles/lib_openshift/library/oc_scale.py
@@ -292,6 +292,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -300,15 +311,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_secret.py b/roles/lib_openshift/library/oc_secret.py
index b2a689def..d2ba1af51 100644
--- a/roles/lib_openshift/library/oc_secret.py
+++ b/roles/lib_openshift/library/oc_secret.py
@@ -338,6 +338,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -346,15 +357,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_service.py b/roles/lib_openshift/library/oc_service.py
index d2e38b9d7..0979f83c6 100644
--- a/roles/lib_openshift/library/oc_service.py
+++ b/roles/lib_openshift/library/oc_service.py
@@ -344,6 +344,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -352,15 +363,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/library/oc_serviceaccount.py b/roles/lib_openshift/library/oc_serviceaccount.py
index 55ab099f8..0db0d1219 100644
--- a/roles/lib_openshift/library/oc_serviceaccount.py
+++ b/roles/lib_openshift/library/oc_serviceaccount.py
@@ -290,6 +290,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -298,15 +309,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
@@ -1516,7 +1523,7 @@ class OCServiceAccount(OpenShiftCLI):
def main():
'''
- ansible oc module for route
+ ansible oc module for service accounts
'''
module = AnsibleModule(
diff --git a/roles/lib_openshift/library/oc_serviceaccount_secret.py b/roles/lib_openshift/library/oc_serviceaccount_secret.py
new file mode 100644
index 000000000..29985d62f
--- /dev/null
+++ b/roles/lib_openshift/library/oc_serviceaccount_secret.py
@@ -0,0 +1,1524 @@
+#!/usr/bin/env python
+# pylint: disable=missing-docstring
+# flake8: noqa: T001
+# ___ ___ _ _ ___ ___ _ _____ ___ ___
+# / __| __| \| | __| _ \ /_\_ _| __| \
+# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
+# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
+# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
+# | |) | (_) | | .` | (_) || | | _|| |) | | | |
+# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
+#
+# Copyright 2016 Red Hat, Inc. and/or its affiliates
+# and other contributors as indicated by the @author tags.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
+'''
+ OpenShiftCLI class that wraps the oc commands in a subprocess
+'''
+# pylint: disable=too-many-lines
+
+from __future__ import print_function
+import atexit
+import json
+import os
+import re
+import shutil
+import subprocess
+# pylint: disable=import-error
+import ruamel.yaml as yaml
+from ansible.module_utils.basic import AnsibleModule
+
+# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: doc/serviceaccount_secret -*- -*- -*-
+
+DOCUMENTATION = '''
+---
+module: oc_serviceaccount_secret
+short_description: Module to manage openshift service account secrets
+description:
+ - Manage openshift service account secrets programmatically.
+options:
+ state:
+ description:
+ - If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
+ required: false
+ default: present
+ choices: ["present", "absent", "list"]
+ aliases: []
+ kubeconfig:
+ description:
+ - The path for the kubeconfig file to use for authentication
+ required: false
+ default: /etc/origin/master/admin.kubeconfig
+ aliases: []
+ debug:
+ description:
+ - Turn on debug output.
+ required: false
+ default: false
+ aliases: []
+ service_account:
+ description:
+ - Name of the service account.
+ required: true
+ default: None
+ aliases: []
+ namespace:
+ description:
+ - Namespace of the service account and secret.
+ required: true
+ default: None
+ aliases: []
+ secret:
+ description:
+ - The secret that should be linked to the service account.
+ required: false
+ default: None
+ aliases: []
+author:
+- "Kenny Woodson <kwoodson@redhat.com>"
+extends_documentation_fragment: []
+'''
+
+EXAMPLES = '''
+ - name: get secrets of a service account
+ oc_serviceaccount_secret:
+ state: list
+ service_account: builder
+ namespace: default
+ register: sasecretout
+
+
+ - name: Link a service account to a specific secret
+ oc_serviceaccount_secret:
+ service_account: builder
+ secret: mynewsecret
+ namespace: default
+ register: sasecretout
+'''
+
+# -*- -*- -*- End included fragment: doc/serviceaccount_secret -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
+# noqa: E301,E302
+
+
+class YeditException(Exception):
+ ''' Exception class for Yedit '''
+ pass
+
+
+# pylint: disable=too-many-public-methods
+class Yedit(object):
+ ''' Class to modify yaml files '''
+ re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
+ re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
+ com_sep = set(['.', '#', '|', ':'])
+
+ # pylint: disable=too-many-arguments
+ def __init__(self,
+ filename=None,
+ content=None,
+ content_type='yaml',
+ separator='.',
+ backup=False):
+ self.content = content
+ self._separator = separator
+ self.filename = filename
+ self.__yaml_dict = content
+ self.content_type = content_type
+ self.backup = backup
+ self.load(content_type=self.content_type)
+ if self.__yaml_dict is None:
+ self.__yaml_dict = {}
+
+ @property
+ def separator(self):
+ ''' getter method for yaml_dict '''
+ return self._separator
+
+ @separator.setter
+ def separator(self):
+ ''' getter method for yaml_dict '''
+ return self._separator
+
+ @property
+ def yaml_dict(self):
+ ''' getter method for yaml_dict '''
+ return self.__yaml_dict
+
+ @yaml_dict.setter
+ def yaml_dict(self, value):
+ ''' setter method for yaml_dict '''
+ self.__yaml_dict = value
+
+ @staticmethod
+ def parse_key(key, sep='.'):
+ '''parse the key allowing the appropriate separator'''
+ common_separators = list(Yedit.com_sep - set([sep]))
+ return re.findall(Yedit.re_key % ''.join(common_separators), key)
+
+ @staticmethod
+ def valid_key(key, sep='.'):
+ '''validate the incoming key'''
+ common_separators = list(Yedit.com_sep - set([sep]))
+ if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
+ return False
+
+ return True
+
+ @staticmethod
+ def remove_entry(data, key, sep='.'):
+ ''' remove data at location key '''
+ if key == '' and isinstance(data, dict):
+ data.clear()
+ return True
+ elif key == '' and isinstance(data, list):
+ del data[:]
+ return True
+
+ if not (key and Yedit.valid_key(key, sep)) and \
+ isinstance(data, (list, dict)):
+ return None
+
+ key_indexes = Yedit.parse_key(key, sep)
+ for arr_ind, dict_key in key_indexes[:-1]:
+ if dict_key and isinstance(data, dict):
+ data = data.get(dict_key, None)
+ elif (arr_ind and isinstance(data, list) and
+ int(arr_ind) <= len(data) - 1):
+ data = data[int(arr_ind)]
+ else:
+ return None
+
+ # process last index for remove
+ # expected list entry
+ if key_indexes[-1][0]:
+ if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
+ del data[int(key_indexes[-1][0])]
+ return True
+
+ # expected dict entry
+ elif key_indexes[-1][1]:
+ if isinstance(data, dict):
+ del data[key_indexes[-1][1]]
+ return True
+
+ @staticmethod
+ def add_entry(data, key, item=None, sep='.'):
+ ''' Get an item from a dictionary with key notation a.b.c
+ d = {'a': {'b': 'c'}}}
+ key = a#b
+ return c
+ '''
+ if key == '':
+ pass
+ elif (not (key and Yedit.valid_key(key, sep)) and
+ isinstance(data, (list, dict))):
+ return None
+
+ key_indexes = Yedit.parse_key(key, sep)
+ for arr_ind, dict_key in key_indexes[:-1]:
+ if dict_key:
+ if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
+ data = data[dict_key]
+ continue
+
+ elif data and not isinstance(data, dict):
+ return None
+
+ data[dict_key] = {}
+ data = data[dict_key]
+
+ elif (arr_ind and isinstance(data, list) and
+ int(arr_ind) <= len(data) - 1):
+ data = data[int(arr_ind)]
+ else:
+ return None
+
+ if key == '':
+ data = item
+
+ # process last index for add
+ # expected list entry
+ elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
+ data[int(key_indexes[-1][0])] = item
+
+ # expected dict entry
+ elif key_indexes[-1][1] and isinstance(data, dict):
+ data[key_indexes[-1][1]] = item
+
+ return data
+
+ @staticmethod
+ def get_entry(data, key, sep='.'):
+ ''' Get an item from a dictionary with key notation a.b.c
+ d = {'a': {'b': 'c'}}}
+ key = a.b
+ return c
+ '''
+ if key == '':
+ pass
+ elif (not (key and Yedit.valid_key(key, sep)) and
+ isinstance(data, (list, dict))):
+ return None
+
+ key_indexes = Yedit.parse_key(key, sep)
+ for arr_ind, dict_key in key_indexes:
+ if dict_key and isinstance(data, dict):
+ data = data.get(dict_key, None)
+ elif (arr_ind and isinstance(data, list) and
+ int(arr_ind) <= len(data) - 1):
+ data = data[int(arr_ind)]
+ else:
+ return None
+
+ return data
+
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
+ def write(self):
+ ''' write to file '''
+ if not self.filename:
+ raise YeditException('Please specify a filename.')
+
+ if self.backup and self.file_exists():
+ shutil.copy(self.filename, self.filename + '.orig')
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
+
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+
+ return (True, self.yaml_dict)
+
+ def read(self):
+ ''' read from file '''
+ # check if it exists
+ if self.filename is None or not self.file_exists():
+ return None
+
+ contents = None
+ with open(self.filename) as yfd:
+ contents = yfd.read()
+
+ return contents
+
+ def file_exists(self):
+ ''' return whether file exists '''
+ if os.path.exists(self.filename):
+ return True
+
+ return False
+
+ def load(self, content_type='yaml'):
+ ''' return yaml file '''
+ contents = self.read()
+
+ if not contents and not self.content:
+ return None
+
+ if self.content:
+ if isinstance(self.content, dict):
+ self.yaml_dict = self.content
+ return self.yaml_dict
+ elif isinstance(self.content, str):
+ contents = self.content
+
+ # check if it is yaml
+ try:
+ if content_type == 'yaml' and contents:
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
+ elif content_type == 'json' and contents:
+ self.yaml_dict = json.loads(contents)
+ except yaml.YAMLError as err:
+ # Error loading yaml or json
+ raise YeditException('Problem with loading yaml file. %s' % err)
+
+ return self.yaml_dict
+
+ def get(self, key):
+ ''' get a specified key'''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
+ except KeyError:
+ entry = None
+
+ return entry
+
+ def pop(self, path, key_or_item):
+ ''' remove a key, value pair from a dict or an item for a list'''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if entry is None:
+ return (False, self.yaml_dict)
+
+ if isinstance(entry, dict):
+ # pylint: disable=no-member,maybe-no-member
+ if key_or_item in entry:
+ entry.pop(key_or_item)
+ return (True, self.yaml_dict)
+ return (False, self.yaml_dict)
+
+ elif isinstance(entry, list):
+ # pylint: disable=no-member,maybe-no-member
+ ind = None
+ try:
+ ind = entry.index(key_or_item)
+ except ValueError:
+ return (False, self.yaml_dict)
+
+ entry.pop(ind)
+ return (True, self.yaml_dict)
+
+ return (False, self.yaml_dict)
+
+ def delete(self, path):
+ ''' remove path from a dict'''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if entry is None:
+ return (False, self.yaml_dict)
+
+ result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
+ if not result:
+ return (False, self.yaml_dict)
+
+ return (True, self.yaml_dict)
+
+ def exists(self, path, value):
+ ''' check if value exists at path'''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if isinstance(entry, list):
+ if value in entry:
+ return True
+ return False
+
+ elif isinstance(entry, dict):
+ if isinstance(value, dict):
+ rval = False
+ for key, val in value.items():
+ if entry[key] != val:
+ rval = False
+ break
+ else:
+ rval = True
+ return rval
+
+ return value in entry
+
+ return entry == value
+
+ def append(self, path, value):
+ '''append value to a list'''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if entry is None:
+ self.put(path, [])
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ if not isinstance(entry, list):
+ return (False, self.yaml_dict)
+
+ # pylint: disable=no-member,maybe-no-member
+ entry.append(value)
+ return (True, self.yaml_dict)
+
+ # pylint: disable=too-many-arguments
+ def update(self, path, value, index=None, curr_value=None):
+ ''' put path, value into a dict '''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if isinstance(entry, dict):
+ # pylint: disable=no-member,maybe-no-member
+ if not isinstance(value, dict):
+ raise YeditException('Cannot replace key, value entry in ' +
+ 'dict with non-dict type. value=[%s] [%s]' % (value, type(value))) # noqa: E501
+
+ entry.update(value)
+ return (True, self.yaml_dict)
+
+ elif isinstance(entry, list):
+ # pylint: disable=no-member,maybe-no-member
+ ind = None
+ if curr_value:
+ try:
+ ind = entry.index(curr_value)
+ except ValueError:
+ return (False, self.yaml_dict)
+
+ elif index is not None:
+ ind = index
+
+ if ind is not None and entry[ind] != value:
+ entry[ind] = value
+ return (True, self.yaml_dict)
+
+ # see if it exists in the list
+ try:
+ ind = entry.index(value)
+ except ValueError:
+ # doesn't exist, append it
+ entry.append(value)
+ return (True, self.yaml_dict)
+
+ # already exists, return
+ if ind is not None:
+ return (False, self.yaml_dict)
+ return (False, self.yaml_dict)
+
+ def put(self, path, value):
+ ''' put path, value into a dict '''
+ try:
+ entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
+ except KeyError:
+ entry = None
+
+ if entry == value:
+ return (False, self.yaml_dict)
+
+ # deepcopy didn't work
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ result = Yedit.add_entry(tmp_copy, path, value, self.separator)
+ if not result:
+ return (False, self.yaml_dict)
+
+ self.yaml_dict = tmp_copy
+
+ return (True, self.yaml_dict)
+
+ def create(self, path, value):
+ ''' create a yaml file '''
+ if not self.file_exists():
+ # deepcopy didn't work
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ result = Yedit.add_entry(tmp_copy, path, value, self.separator)
+ if result:
+ self.yaml_dict = tmp_copy
+ return (True, self.yaml_dict)
+
+ return (False, self.yaml_dict)
+
+ @staticmethod
+ def get_curr_value(invalue, val_type):
+ '''return the current value'''
+ if invalue is None:
+ return None
+
+ curr_value = invalue
+ if val_type == 'yaml':
+ curr_value = yaml.load(invalue)
+ elif val_type == 'json':
+ curr_value = json.loads(invalue)
+
+ return curr_value
+
+ @staticmethod
+ def parse_value(inc_value, vtype=''):
+ '''determine value type passed'''
+ true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
+ 'on', 'On', 'ON', ]
+ false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
+ 'off', 'Off', 'OFF']
+
+ # It came in as a string but you didn't specify value_type as string
+ # we will convert to bool if it matches any of the above cases
+ if isinstance(inc_value, str) and 'bool' in vtype:
+ if inc_value not in true_bools and inc_value not in false_bools:
+ raise YeditException('Not a boolean type. str=[%s] vtype=[%s]'
+ % (inc_value, vtype))
+ elif isinstance(inc_value, bool) and 'str' in vtype:
+ inc_value = str(inc_value)
+
+ # If vtype is not str then go ahead and attempt to yaml load it.
+ if isinstance(inc_value, str) and 'str' not in vtype:
+ try:
+ inc_value = yaml.load(inc_value)
+ except Exception:
+ raise YeditException('Could not determine type of incoming ' +
+ 'value. value=[%s] vtype=[%s]'
+ % (type(inc_value), vtype))
+
+ return inc_value
+
+ # pylint: disable=too-many-return-statements,too-many-branches
+ @staticmethod
+ def run_ansible(module):
+ '''perform the idempotent crud operations'''
+ yamlfile = Yedit(filename=module.params['src'],
+ backup=module.params['backup'],
+ separator=module.params['separator'])
+
+ if module.params['src']:
+ rval = yamlfile.load()
+
+ if yamlfile.yaml_dict is None and \
+ module.params['state'] != 'present':
+ return {'failed': True,
+ 'msg': 'Error opening file [%s]. Verify that the ' +
+ 'file exists, that it is has correct' +
+ ' permissions, and is valid yaml.'}
+
+ if module.params['state'] == 'list':
+ if module.params['content']:
+ content = Yedit.parse_value(module.params['content'],
+ module.params['content_type'])
+ yamlfile.yaml_dict = content
+
+ if module.params['key']:
+ rval = yamlfile.get(module.params['key']) or {}
+
+ return {'changed': False, 'result': rval, 'state': "list"}
+
+ elif module.params['state'] == 'absent':
+ if module.params['content']:
+ content = Yedit.parse_value(module.params['content'],
+ module.params['content_type'])
+ yamlfile.yaml_dict = content
+
+ if module.params['update']:
+ rval = yamlfile.pop(module.params['key'],
+ module.params['value'])
+ else:
+ rval = yamlfile.delete(module.params['key'])
+
+ if rval[0] and module.params['src']:
+ yamlfile.write()
+
+ return {'changed': rval[0], 'result': rval[1], 'state': "absent"}
+
+ elif module.params['state'] == 'present':
+ # check if content is different than what is in the file
+ if module.params['content']:
+ content = Yedit.parse_value(module.params['content'],
+ module.params['content_type'])
+
+ # We had no edits to make and the contents are the same
+ if yamlfile.yaml_dict == content and \
+ module.params['value'] is None:
+ return {'changed': False,
+ 'result': yamlfile.yaml_dict,
+ 'state': "present"}
+
+ yamlfile.yaml_dict = content
+
+ # we were passed a value; parse it
+ if module.params['value']:
+ value = Yedit.parse_value(module.params['value'],
+ module.params['value_type'])
+ key = module.params['key']
+ if module.params['update']:
+ # pylint: disable=line-too-long
+ curr_value = Yedit.get_curr_value(Yedit.parse_value(module.params['curr_value']), # noqa: E501
+ module.params['curr_value_format']) # noqa: E501
+
+ rval = yamlfile.update(key, value, module.params['index'], curr_value) # noqa: E501
+
+ elif module.params['append']:
+ rval = yamlfile.append(key, value)
+ else:
+ rval = yamlfile.put(key, value)
+
+ if rval[0] and module.params['src']:
+ yamlfile.write()
+
+ return {'changed': rval[0],
+ 'result': rval[1], 'state': "present"}
+
+ # no edits to make
+ if module.params['src']:
+ # pylint: disable=redefined-variable-type
+ rval = yamlfile.write()
+ return {'changed': rval[0],
+ 'result': rval[1],
+ 'state': "present"}
+
+ return {'failed': True, 'msg': 'Unkown state passed'}
+
+# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
+# pylint: disable=too-many-lines
+# noqa: E301,E302,E303,T001
+
+
+class OpenShiftCLIError(Exception):
+ '''Exception class for openshiftcli'''
+ pass
+
+
+# pylint: disable=too-few-public-methods
+class OpenShiftCLI(object):
+ ''' Class to wrap the command line tools '''
+ def __init__(self,
+ namespace,
+ kubeconfig='/etc/origin/master/admin.kubeconfig',
+ verbose=False,
+ all_namespaces=False):
+ ''' Constructor for OpenshiftCLI '''
+ self.namespace = namespace
+ self.verbose = verbose
+ self.kubeconfig = kubeconfig
+ self.all_namespaces = all_namespaces
+
+ # Pylint allows only 5 arguments to be passed.
+ # pylint: disable=too-many-arguments
+ def _replace_content(self, resource, rname, content, force=False, sep='.'):
+ ''' replace the current object with the content '''
+ res = self._get(resource, rname)
+ if not res['results']:
+ return res
+
+ fname = '/tmp/%s' % rname
+ yed = Yedit(fname, res['results'][0], separator=sep)
+ changes = []
+ for key, value in content.items():
+ changes.append(yed.put(key, value))
+
+ if any([change[0] for change in changes]):
+ yed.write()
+
+ atexit.register(Utils.cleanup, [fname])
+
+ return self._replace(fname, force)
+
+ return {'returncode': 0, 'updated': False}
+
+ def _replace(self, fname, force=False):
+ '''replace the current object with oc replace'''
+ cmd = ['replace', '-f', fname]
+ if force:
+ cmd.append('--force')
+ return self.openshift_cmd(cmd)
+
+ def _create_from_content(self, rname, content):
+ '''create a temporary file and then call oc create on it'''
+ fname = '/tmp/%s' % rname
+ yed = Yedit(fname, content=content)
+ yed.write()
+
+ atexit.register(Utils.cleanup, [fname])
+
+ return self._create(fname)
+
+ def _create(self, fname):
+ '''call oc create on a filename'''
+ return self.openshift_cmd(['create', '-f', fname])
+
+ def _delete(self, resource, rname, selector=None):
+ '''call oc delete on a resource'''
+ cmd = ['delete', resource, rname]
+ if selector:
+ cmd.append('--selector=%s' % selector)
+
+ return self.openshift_cmd(cmd)
+
+ def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
+ '''process a template
+
+ template_name: the name of the template to process
+ create: whether to send to oc create after processing
+ params: the parameters for the template
+ template_data: the incoming template's data; instead of a file
+ '''
+ cmd = ['process']
+ if template_data:
+ cmd.extend(['-f', '-'])
+ else:
+ cmd.append(template_name)
+ if params:
+ param_str = ["%s=%s" % (key, value) for key, value in params.items()]
+ cmd.append('-v')
+ cmd.extend(param_str)
+
+ results = self.openshift_cmd(cmd, output=True, input_data=template_data)
+
+ if results['returncode'] != 0 or not create:
+ return results
+
+ fname = '/tmp/%s' % template_name
+ yed = Yedit(fname, results['results'])
+ yed.write()
+
+ atexit.register(Utils.cleanup, [fname])
+
+ return self.openshift_cmd(['create', '-f', fname])
+
+ def _get(self, resource, rname=None, selector=None):
+ '''return a resource by name '''
+ cmd = ['get', resource]
+ if selector:
+ cmd.append('--selector=%s' % selector)
+ elif rname:
+ cmd.append(rname)
+
+ cmd.extend(['-o', 'json'])
+
+ rval = self.openshift_cmd(cmd, output=True)
+
+ # Ensure results are retuned in an array
+ if 'items' in rval:
+ rval['results'] = rval['items']
+ elif not isinstance(rval['results'], list):
+ rval['results'] = [rval['results']]
+
+ return rval
+
+ def _schedulable(self, node=None, selector=None, schedulable=True):
+ ''' perform oadm manage-node scheduable '''
+ cmd = ['manage-node']
+ if node:
+ cmd.extend(node)
+ else:
+ cmd.append('--selector=%s' % selector)
+
+ cmd.append('--schedulable=%s' % schedulable)
+
+ return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
+
+ def _list_pods(self, node=None, selector=None, pod_selector=None):
+ ''' perform oadm list pods
+
+ node: the node in which to list pods
+ selector: the label selector filter if provided
+ pod_selector: the pod selector filter if provided
+ '''
+ cmd = ['manage-node']
+ if node:
+ cmd.extend(node)
+ else:
+ cmd.append('--selector=%s' % selector)
+
+ if pod_selector:
+ cmd.append('--pod-selector=%s' % pod_selector)
+
+ cmd.extend(['--list-pods', '-o', 'json'])
+
+ return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
+
+ # pylint: disable=too-many-arguments
+ def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
+ ''' perform oadm manage-node evacuate '''
+ cmd = ['manage-node']
+ if node:
+ cmd.extend(node)
+ else:
+ cmd.append('--selector=%s' % selector)
+
+ if dry_run:
+ cmd.append('--dry-run')
+
+ if pod_selector:
+ cmd.append('--pod-selector=%s' % pod_selector)
+
+ if grace_period:
+ cmd.append('--grace-period=%s' % int(grace_period))
+
+ if force:
+ cmd.append('--force')
+
+ cmd.append('--evacuate')
+
+ return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
+
+ def _version(self):
+ ''' return the openshift version'''
+ return self.openshift_cmd(['version'], output=True, output_type='raw')
+
+ def _import_image(self, url=None, name=None, tag=None):
+ ''' perform image import '''
+ cmd = ['import-image']
+
+ image = '{0}'.format(name)
+ if tag:
+ image += ':{0}'.format(tag)
+
+ cmd.append(image)
+
+ if url:
+ cmd.append('--from={0}/{1}'.format(url, image))
+
+ cmd.append('-n{0}'.format(self.namespace))
+
+ cmd.append('--confirm')
+ return self.openshift_cmd(cmd)
+
+ def _run(self, cmds, input_data):
+ ''' Actually executes the command. This makes mocking easier. '''
+ curr_env = os.environ.copy()
+ curr_env.update({'KUBECONFIG': self.kubeconfig})
+ proc = subprocess.Popen(cmds,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=curr_env)
+
+ stdout, stderr = proc.communicate(input_data)
+
+ return proc.returncode, stdout, stderr
+
+ # pylint: disable=too-many-arguments,too-many-branches
+ def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
+ '''Base command for oc '''
+ cmds = []
+ if oadm:
+ cmds = ['oadm']
+ else:
+ cmds = ['oc']
+
+ if self.all_namespaces:
+ cmds.extend(['--all-namespaces'])
+ elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
+ cmds.extend(['-n', self.namespace])
+
+ cmds.extend(cmd)
+
+ rval = {}
+ results = ''
+ err = None
+
+ if self.verbose:
+ print(' '.join(cmds))
+
+ returncode, stdout, stderr = self._run(cmds, input_data)
+
+ rval = {"returncode": returncode,
+ "results": results,
+ "cmd": ' '.join(cmds)}
+
+ if returncode == 0:
+ if output:
+ if output_type == 'json':
+ try:
+ rval['results'] = json.loads(stdout)
+ except ValueError as err:
+ if "No JSON object could be decoded" in err.args:
+ err = err.args
+ elif output_type == 'raw':
+ rval['results'] = stdout
+
+ if self.verbose:
+ print("STDOUT: {0}".format(stdout))
+ print("STDERR: {0}".format(stderr))
+
+ if err:
+ rval.update({"err": err,
+ "stderr": stderr,
+ "stdout": stdout,
+ "cmd": cmds})
+
+ else:
+ rval.update({"stderr": stderr,
+ "stdout": stdout,
+ "results": {}})
+
+ return rval
+
+
+class Utils(object):
+ ''' utilities for openshiftcli modules '''
+ @staticmethod
+ def create_file(rname, data, ftype='yaml'):
+ ''' create a file in tmp with name and contents'''
+ path = os.path.join('/tmp', rname)
+ with open(path, 'w') as fds:
+ if ftype == 'yaml':
+ fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper))
+
+ elif ftype == 'json':
+ fds.write(json.dumps(data))
+ else:
+ fds.write(data)
+
+ # Register cleanup when module is done
+ atexit.register(Utils.cleanup, [path])
+ return path
+
+ @staticmethod
+ def create_files_from_contents(content, content_type=None):
+ '''Turn an array of dict: filename, content into a files array'''
+ if not isinstance(content, list):
+ content = [content]
+ files = []
+ for item in content:
+ path = Utils.create_file(item['path'], item['data'], ftype=content_type)
+ files.append({'name': os.path.basename(path), 'path': path})
+ return files
+
+ @staticmethod
+ def cleanup(files):
+ '''Clean up on exit '''
+ for sfile in files:
+ if os.path.exists(sfile):
+ if os.path.isdir(sfile):
+ shutil.rmtree(sfile)
+ elif os.path.isfile(sfile):
+ os.remove(sfile)
+
+ @staticmethod
+ def exists(results, _name):
+ ''' Check to see if the results include the name '''
+ if not results:
+ return False
+
+ if Utils.find_result(results, _name):
+ return True
+
+ return False
+
+ @staticmethod
+ def find_result(results, _name):
+ ''' Find the specified result by name'''
+ rval = None
+ for result in results:
+ if 'metadata' in result and result['metadata']['name'] == _name:
+ rval = result
+ break
+
+ return rval
+
+ @staticmethod
+ def get_resource_file(sfile, sfile_type='yaml'):
+ ''' return the service file '''
+ contents = None
+ with open(sfile) as sfd:
+ contents = sfd.read()
+
+ if sfile_type == 'yaml':
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ elif sfile_type == 'json':
+ contents = json.loads(contents)
+
+ return contents
+
+ @staticmethod
+ def filter_versions(stdout):
+ ''' filter the oc version output '''
+
+ version_dict = {}
+ version_search = ['oc', 'openshift', 'kubernetes']
+
+ for line in stdout.strip().split('\n'):
+ for term in version_search:
+ if not line:
+ continue
+ if line.startswith(term):
+ version_dict[term] = line.split()[-1]
+
+ # horrible hack to get openshift version in Openshift 3.2
+ # By default "oc version in 3.2 does not return an "openshift" version
+ if "openshift" not in version_dict:
+ version_dict["openshift"] = version_dict["oc"]
+
+ return version_dict
+
+ @staticmethod
+ def add_custom_versions(versions):
+ ''' create custom versions strings '''
+
+ versions_dict = {}
+
+ for tech, version in versions.items():
+ # clean up "-" from version
+ if "-" in version:
+ version = version.split("-")[0]
+
+ if version.startswith('v'):
+ versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
+ # "v3.3.0.33" is what we have, we want "3.3"
+ versions_dict[tech + '_short'] = version[1:4]
+
+ return versions_dict
+
+ @staticmethod
+ def openshift_installed():
+ ''' check if openshift is installed '''
+ import yum
+
+ yum_base = yum.YumBase()
+ if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
+ return True
+
+ return False
+
+ # Disabling too-many-branches. This is a yaml dictionary comparison function
+ # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
+ @staticmethod
+ def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
+ ''' Given a user defined definition, compare it with the results given back by our query. '''
+
+ # Currently these values are autogenerated and we do not need to check them
+ skip = ['metadata', 'status']
+ if skip_keys:
+ skip.extend(skip_keys)
+
+ for key, value in result_def.items():
+ if key in skip:
+ continue
+
+ # Both are lists
+ if isinstance(value, list):
+ if key not in user_def:
+ if debug:
+ print('User data does not have key [%s]' % key)
+ print('User data: %s' % user_def)
+ return False
+
+ if not isinstance(user_def[key], list):
+ if debug:
+ print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
+ return False
+
+ if len(user_def[key]) != len(value):
+ if debug:
+ print("List lengths are not equal.")
+ print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
+ print("user_def: %s" % user_def[key])
+ print("value: %s" % value)
+ return False
+
+ for values in zip(user_def[key], value):
+ if isinstance(values[0], dict) and isinstance(values[1], dict):
+ if debug:
+ print('sending list - list')
+ print(type(values[0]))
+ print(type(values[1]))
+ result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
+ if not result:
+ print('list compare returned false')
+ return False
+
+ elif value != user_def[key]:
+ if debug:
+ print('value should be identical')
+ print(value)
+ print(user_def[key])
+ return False
+
+ # recurse on a dictionary
+ elif isinstance(value, dict):
+ if key not in user_def:
+ if debug:
+ print("user_def does not have key [%s]" % key)
+ return False
+ if not isinstance(user_def[key], dict):
+ if debug:
+ print("dict returned false: not instance of dict")
+ return False
+
+ # before passing ensure keys match
+ api_values = set(value.keys()) - set(skip)
+ user_values = set(user_def[key].keys()) - set(skip)
+ if api_values != user_values:
+ if debug:
+ print("keys are not equal in dict")
+ print(api_values)
+ print(user_values)
+ return False
+
+ result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
+ if not result:
+ if debug:
+ print("dict returned false")
+ print(result)
+ return False
+
+ # Verify each key, value pair is the same
+ else:
+ if key not in user_def or value != user_def[key]:
+ if debug:
+ print("value not equal; user_def does not have key")
+ print(key)
+ print(value)
+ if key in user_def:
+ print(user_def[key])
+ return False
+
+ if debug:
+ print('returning true')
+ return True
+
+
+class OpenShiftCLIConfig(object):
+ '''Generic Config'''
+ def __init__(self, rname, namespace, kubeconfig, options):
+ self.kubeconfig = kubeconfig
+ self.name = rname
+ self.namespace = namespace
+ self._options = options
+
+ @property
+ def config_options(self):
+ ''' return config options '''
+ return self._options
+
+ def to_option_list(self):
+ '''return all options as a string'''
+ return self.stringify()
+
+ def stringify(self):
+ ''' return the options hash as cli params in a string '''
+ rval = []
+ for key, data in self.config_options.items():
+ if data['include'] \
+ and (data['value'] or isinstance(data['value'], int)):
+ rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
+
+ return rval
+
+
+# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: lib/serviceaccount.py -*- -*- -*-
+
+class ServiceAccountConfig(object):
+ '''Service account config class
+
+ This class stores the options and returns a default service account
+ '''
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
+ self.name = sname
+ self.kubeconfig = kubeconfig
+ self.namespace = namespace
+ self.secrets = secrets or []
+ self.image_pull_secrets = image_pull_secrets or []
+ self.data = {}
+ self.create_dict()
+
+ def create_dict(self):
+ ''' return a properly structured volume '''
+ self.data['apiVersion'] = 'v1'
+ self.data['kind'] = 'ServiceAccount'
+ self.data['metadata'] = {}
+ self.data['metadata']['name'] = self.name
+ self.data['metadata']['namespace'] = self.namespace
+
+ self.data['secrets'] = []
+ if self.secrets:
+ for sec in self.secrets:
+ self.data['secrets'].append({"name": sec})
+
+ self.data['imagePullSecrets'] = []
+ if self.image_pull_secrets:
+ for sec in self.image_pull_secrets:
+ self.data['imagePullSecrets'].append({"name": sec})
+
+class ServiceAccount(Yedit):
+ ''' Class to wrap the oc command line tools '''
+ image_pull_secrets_path = "imagePullSecrets"
+ secrets_path = "secrets"
+
+ def __init__(self, content):
+ '''ServiceAccount constructor'''
+ super(ServiceAccount, self).__init__(content=content)
+ self._secrets = None
+ self._image_pull_secrets = None
+
+ @property
+ def image_pull_secrets(self):
+ ''' property for image_pull_secrets '''
+ if self._image_pull_secrets is None:
+ self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
+ return self._image_pull_secrets
+
+ @image_pull_secrets.setter
+ def image_pull_secrets(self, secrets):
+ ''' property for secrets '''
+ self._image_pull_secrets = secrets
+
+ @property
+ def secrets(self):
+ ''' property for secrets '''
+ if not self._secrets:
+ self._secrets = self.get(ServiceAccount.secrets_path) or []
+ return self._secrets
+
+ @secrets.setter
+ def secrets(self, secrets):
+ ''' property for secrets '''
+ self._secrets = secrets
+
+ def delete_secret(self, inc_secret):
+ ''' remove a secret '''
+ remove_idx = None
+ for idx, sec in enumerate(self.secrets):
+ if sec['name'] == inc_secret:
+ remove_idx = idx
+ break
+
+ if remove_idx:
+ del self.secrets[remove_idx]
+ return True
+
+ return False
+
+ def delete_image_pull_secret(self, inc_secret):
+ ''' remove a image_pull_secret '''
+ remove_idx = None
+ for idx, sec in enumerate(self.image_pull_secrets):
+ if sec['name'] == inc_secret:
+ remove_idx = idx
+ break
+
+ if remove_idx:
+ del self.image_pull_secrets[remove_idx]
+ return True
+
+ return False
+
+ def find_secret(self, inc_secret):
+ '''find secret'''
+ for secret in self.secrets:
+ if secret['name'] == inc_secret:
+ return secret
+
+ return None
+
+ def find_image_pull_secret(self, inc_secret):
+ '''find secret'''
+ for secret in self.image_pull_secrets:
+ if secret['name'] == inc_secret:
+ return secret
+
+ return None
+
+ def add_secret(self, inc_secret):
+ '''add secret'''
+ if self.secrets:
+ self.secrets.append({"name": inc_secret}) # pylint: disable=no-member
+ else:
+ self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
+
+ def add_image_pull_secret(self, inc_secret):
+ '''add image_pull_secret'''
+ if self.image_pull_secrets:
+ self.image_pull_secrets.append({"name": inc_secret}) # pylint: disable=no-member
+ else:
+ self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
+
+# -*- -*- -*- End included fragment: lib/serviceaccount.py -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
+
+class OCServiceAccountSecret(OpenShiftCLI):
+ ''' Class to wrap the oc command line tools '''
+
+ kind = 'sa'
+ def __init__(self, config, verbose=False):
+ ''' Constructor for OpenshiftOC '''
+ super(OCServiceAccountSecret, self).__init__(config.namespace, config.kubeconfig)
+ self.config = config
+ self.verbose = verbose
+ self._service_account = None
+
+ @property
+ def service_account(self):
+ ''' Property for the service account '''
+ if not self._service_account:
+ self.get()
+ return self._service_account
+
+ @service_account.setter
+ def service_account(self, data):
+ ''' setter for the service account '''
+ self._service_account = data
+
+ def exists(self, in_secret):
+ ''' verifies if secret exists in the service account '''
+ result = self.service_account.find_secret(in_secret)
+ if not result:
+ return False
+ return True
+
+ def get(self):
+ ''' get the service account definition from the master '''
+ sao = self._get(OCServiceAccountSecret.kind, self.config.name)
+ if sao['returncode'] == 0:
+ self.service_account = ServiceAccount(content=sao['results'][0])
+ sao['results'] = self.service_account.get('secrets')
+ return sao
+
+ def delete(self):
+ ''' delete secrets '''
+
+ modified = []
+ for rem_secret in self.config.secrets:
+ modified.append(self.service_account.delete_secret(rem_secret))
+
+ if any(modified):
+ return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
+
+ return {'returncode': 0, 'changed': False}
+
+ def put(self):
+ ''' place secrets into sa '''
+ modified = False
+ for add_secret in self.config.secrets:
+ if not self.service_account.find_secret(add_secret):
+ self.service_account.add_secret(add_secret)
+ modified = True
+
+ if modified:
+ return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
+
+ return {'returncode': 0, 'changed': False}
+
+
+ @staticmethod
+ # pylint: disable=too-many-return-statements,too-many-branches
+ # TODO: This function should be refactored into its individual parts.
+ def run_ansible(params, check_mode):
+ ''' run the ansible idempotent code '''
+
+ sconfig = ServiceAccountConfig(params['service_account'],
+ params['namespace'],
+ params['kubeconfig'],
+ [params['secret']],
+ None)
+
+ oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
+
+ state = params['state']
+
+ api_rval = oc_sa_sec.get()
+
+ #####
+ # Get
+ #####
+ if state == 'list':
+ return {'changed': False, 'results': api_rval['results'], 'state': "list"}
+
+ ########
+ # Delete
+ ########
+ if state == 'absent':
+ if oc_sa_sec.exists(params['secret']):
+
+ if check_mode:
+ return {'changed': True, 'msg': 'Would have removed the " + \
+ "secret from the service account.'}
+
+ api_rval = oc_sa_sec.delete()
+
+ return {'changed': True, 'results': api_rval, 'state': "absent"}
+
+ return {'changed': False, 'state': "absent"}
+
+ if state == 'present':
+ ########
+ # Create
+ ########
+ if not oc_sa_sec.exists(params['secret']):
+
+ if check_mode:
+ return {'changed': True, 'msg': 'Would have added the ' + \
+ 'secret to the service account.'}
+
+ # Create it here
+ api_rval = oc_sa_sec.put()
+ if api_rval['returncode'] != 0:
+ return {'failed': True, 'msg': api_rval}
+
+ # return the created object
+ api_rval = oc_sa_sec.get()
+
+ if api_rval['returncode'] != 0:
+ return {'failed': True, 'msg': api_rval}
+
+ return {'changed': True, 'results': api_rval, 'state': "present"}
+
+
+ return {'changed': False, 'results': api_rval, 'state': "present"}
+
+
+ return {'failed': True,
+ 'changed': False,
+ 'msg': 'Unknown state passed. %s' % state,
+ 'state': 'unknown'}
+
+# -*- -*- -*- End included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
+
+# -*- -*- -*- Begin included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
+
+def main():
+ '''
+ ansible oc module to manage service account secrets.
+ '''
+
+ module = AnsibleModule(
+ argument_spec=dict(
+ kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
+ state=dict(default='present', type='str',
+ choices=['present', 'absent', 'list']),
+ debug=dict(default=False, type='bool'),
+ namespace=dict(default=None, required=True, type='str'),
+ secret=dict(default=None, type='str'),
+ service_account=dict(required=True, type='str'),
+ ),
+ supports_check_mode=True,
+ )
+
+ rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
+ if 'failed' in rval:
+ module.fail_json(**rval)
+
+ module.exit_json(**rval)
+
+if __name__ == '__main__':
+ main()
+
+# -*- -*- -*- End included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
diff --git a/roles/lib_openshift/library/oc_version.py b/roles/lib_openshift/library/oc_version.py
index 63d16d451..5c528b399 100644
--- a/roles/lib_openshift/library/oc_version.py
+++ b/roles/lib_openshift/library/oc_version.py
@@ -262,6 +262,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -270,15 +281,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_openshift/src/ansible/oc_serviceaccount.py b/roles/lib_openshift/src/ansible/oc_serviceaccount.py
index ea9bdb455..bc9332f9a 100644
--- a/roles/lib_openshift/src/ansible/oc_serviceaccount.py
+++ b/roles/lib_openshift/src/ansible/oc_serviceaccount.py
@@ -3,7 +3,7 @@
def main():
'''
- ansible oc module for route
+ ansible oc module for service accounts
'''
module = AnsibleModule(
diff --git a/roles/lib_openshift/src/ansible/oc_serviceaccount_secret.py b/roles/lib_openshift/src/ansible/oc_serviceaccount_secret.py
new file mode 100644
index 000000000..554d9a6a2
--- /dev/null
+++ b/roles/lib_openshift/src/ansible/oc_serviceaccount_secret.py
@@ -0,0 +1,29 @@
+# pylint: skip-file
+# flake8: noqa
+
+def main():
+ '''
+ ansible oc module to manage service account secrets.
+ '''
+
+ module = AnsibleModule(
+ argument_spec=dict(
+ kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
+ state=dict(default='present', type='str',
+ choices=['present', 'absent', 'list']),
+ debug=dict(default=False, type='bool'),
+ namespace=dict(default=None, required=True, type='str'),
+ secret=dict(default=None, type='str'),
+ service_account=dict(required=True, type='str'),
+ ),
+ supports_check_mode=True,
+ )
+
+ rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
+ if 'failed' in rval:
+ module.fail_json(**rval)
+
+ module.exit_json(**rval)
+
+if __name__ == '__main__':
+ main()
diff --git a/roles/lib_openshift/src/class/oc_serviceaccount_secret.py b/roles/lib_openshift/src/class/oc_serviceaccount_secret.py
new file mode 100644
index 000000000..750a74d33
--- /dev/null
+++ b/roles/lib_openshift/src/class/oc_serviceaccount_secret.py
@@ -0,0 +1,138 @@
+# pylint: skip-file
+# flake8: noqa
+
+class OCServiceAccountSecret(OpenShiftCLI):
+ ''' Class to wrap the oc command line tools '''
+
+ kind = 'sa'
+ def __init__(self, config, verbose=False):
+ ''' Constructor for OpenshiftOC '''
+ super(OCServiceAccountSecret, self).__init__(config.namespace, config.kubeconfig)
+ self.config = config
+ self.verbose = verbose
+ self._service_account = None
+
+ @property
+ def service_account(self):
+ ''' Property for the service account '''
+ if not self._service_account:
+ self.get()
+ return self._service_account
+
+ @service_account.setter
+ def service_account(self, data):
+ ''' setter for the service account '''
+ self._service_account = data
+
+ def exists(self, in_secret):
+ ''' verifies if secret exists in the service account '''
+ result = self.service_account.find_secret(in_secret)
+ if not result:
+ return False
+ return True
+
+ def get(self):
+ ''' get the service account definition from the master '''
+ sao = self._get(OCServiceAccountSecret.kind, self.config.name)
+ if sao['returncode'] == 0:
+ self.service_account = ServiceAccount(content=sao['results'][0])
+ sao['results'] = self.service_account.get('secrets')
+ return sao
+
+ def delete(self):
+ ''' delete secrets '''
+
+ modified = []
+ for rem_secret in self.config.secrets:
+ modified.append(self.service_account.delete_secret(rem_secret))
+
+ if any(modified):
+ return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
+
+ return {'returncode': 0, 'changed': False}
+
+ def put(self):
+ ''' place secrets into sa '''
+ modified = False
+ for add_secret in self.config.secrets:
+ if not self.service_account.find_secret(add_secret):
+ self.service_account.add_secret(add_secret)
+ modified = True
+
+ if modified:
+ return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
+
+ return {'returncode': 0, 'changed': False}
+
+
+ @staticmethod
+ # pylint: disable=too-many-return-statements,too-many-branches
+ # TODO: This function should be refactored into its individual parts.
+ def run_ansible(params, check_mode):
+ ''' run the ansible idempotent code '''
+
+ sconfig = ServiceAccountConfig(params['service_account'],
+ params['namespace'],
+ params['kubeconfig'],
+ [params['secret']],
+ None)
+
+ oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
+
+ state = params['state']
+
+ api_rval = oc_sa_sec.get()
+
+ #####
+ # Get
+ #####
+ if state == 'list':
+ return {'changed': False, 'results': api_rval['results'], 'state': "list"}
+
+ ########
+ # Delete
+ ########
+ if state == 'absent':
+ if oc_sa_sec.exists(params['secret']):
+
+ if check_mode:
+ return {'changed': True, 'msg': 'Would have removed the " + \
+ "secret from the service account.'}
+
+ api_rval = oc_sa_sec.delete()
+
+ return {'changed': True, 'results': api_rval, 'state': "absent"}
+
+ return {'changed': False, 'state': "absent"}
+
+ if state == 'present':
+ ########
+ # Create
+ ########
+ if not oc_sa_sec.exists(params['secret']):
+
+ if check_mode:
+ return {'changed': True, 'msg': 'Would have added the ' + \
+ 'secret to the service account.'}
+
+ # Create it here
+ api_rval = oc_sa_sec.put()
+ if api_rval['returncode'] != 0:
+ return {'failed': True, 'msg': api_rval}
+
+ # return the created object
+ api_rval = oc_sa_sec.get()
+
+ if api_rval['returncode'] != 0:
+ return {'failed': True, 'msg': api_rval}
+
+ return {'changed': True, 'results': api_rval, 'state': "present"}
+
+
+ return {'changed': False, 'results': api_rval, 'state': "present"}
+
+
+ return {'failed': True,
+ 'changed': False,
+ 'msg': 'Unknown state passed. %s' % state,
+ 'state': 'unknown'}
diff --git a/roles/lib_openshift/src/doc/serviceaccount_secret b/roles/lib_openshift/src/doc/serviceaccount_secret
new file mode 100644
index 000000000..ab90a3f7c
--- /dev/null
+++ b/roles/lib_openshift/src/doc/serviceaccount_secret
@@ -0,0 +1,68 @@
+# flake8: noqa
+# pylint: skip-file
+
+DOCUMENTATION = '''
+---
+module: oc_serviceaccount_secret
+short_description: Module to manage openshift service account secrets
+description:
+ - Manage openshift service account secrets programmatically.
+options:
+ state:
+ description:
+ - If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
+ required: false
+ default: present
+ choices: ["present", "absent", "list"]
+ aliases: []
+ kubeconfig:
+ description:
+ - The path for the kubeconfig file to use for authentication
+ required: false
+ default: /etc/origin/master/admin.kubeconfig
+ aliases: []
+ debug:
+ description:
+ - Turn on debug output.
+ required: false
+ default: false
+ aliases: []
+ service_account:
+ description:
+ - Name of the service account.
+ required: true
+ default: None
+ aliases: []
+ namespace:
+ description:
+ - Namespace of the service account and secret.
+ required: true
+ default: None
+ aliases: []
+ secret:
+ description:
+ - The secret that should be linked to the service account.
+ required: false
+ default: None
+ aliases: []
+author:
+- "Kenny Woodson <kwoodson@redhat.com>"
+extends_documentation_fragment: []
+'''
+
+EXAMPLES = '''
+ - name: get secrets of a service account
+ oc_serviceaccount_secret:
+ state: list
+ service_account: builder
+ namespace: default
+ register: sasecretout
+
+
+ - name: Link a service account to a specific secret
+ oc_serviceaccount_secret:
+ service_account: builder
+ secret: mynewsecret
+ namespace: default
+ register: sasecretout
+'''
diff --git a/roles/lib_openshift/src/sources.yml b/roles/lib_openshift/src/sources.yml
index 6438ff4bb..aa02ce120 100644
--- a/roles/lib_openshift/src/sources.yml
+++ b/roles/lib_openshift/src/sources.yml
@@ -84,6 +84,17 @@ oc_serviceaccount.py:
- class/oc_serviceaccount.py
- ansible/oc_serviceaccount.py
+oc_serviceaccount_secret.py:
+- doc/generated
+- doc/license
+- lib/import.py
+- doc/serviceaccount_secret
+- ../../lib_utils/src/class/yedit.py
+- lib/base.py
+- lib/serviceaccount.py
+- class/oc_serviceaccount_secret.py
+- ansible/oc_serviceaccount_secret.py
+
oc_service.py:
- doc/generated
- doc/license
diff --git a/roles/lib_openshift/src/test/integration/oc_serviceaccount_secret.yml b/roles/lib_openshift/src/test/integration/oc_serviceaccount_secret.yml
new file mode 100755
index 000000000..d3bd9f3aa
--- /dev/null
+++ b/roles/lib_openshift/src/test/integration/oc_serviceaccount_secret.yml
@@ -0,0 +1,79 @@
+#!/usr/bin/ansible-playbook --module-path=../../../library/
+---
+- hosts: "{{ cli_master_test }}"
+ gather_facts: no
+ user: root
+
+ vars:
+ namespace: default
+ service_account_name: someserviceaccountname
+ secret_name: somesecretname
+
+ vars_prompt:
+ - name: cli_master_test
+ prompt: "Master to run against"
+ private: false
+ default: localhost
+
+ post_tasks:
+ - name: create service account to test with - Arrange
+ oc_serviceaccount:
+ namespace: "{{ namespace }}"
+ name: "{{ service_account_name }}"
+
+ - name: create secret to test with - Arrange
+ oc_secret:
+ namespace: "{{ namespace }}"
+ name: "{{ secret_name }}"
+ contents:
+ - path: blah
+ data: blahdeblah
+
+ - name: Ensure the service account and secret are not linked - Arrange
+ oc_serviceaccount_secret:
+ state: absent
+ service_account: "{{ service_account_name }}"
+ secret: "{{ secret_name }}"
+ namespace: "{{ namespace }}"
+
+ - name: get secrets of a service account - Act
+ oc_serviceaccount_secret:
+ state: list
+ service_account: builder
+ namespace: "{{ namespace }}"
+ register: sasecretout
+
+ - name: get secrets of a service account - Assert
+ assert:
+ that:
+ - "sasecretout.changed == False"
+ - "sasecretout.state == 'list'"
+ - "sasecretout.results | length > 0"
+
+ - name: Test linking a service account and secret - Act
+ oc_serviceaccount_secret:
+ service_account: "{{ service_account_name }}"
+ secret: "{{ secret_name }}"
+ namespace: "{{ namespace }}"
+ register: sasecretout
+
+ - name: Test linking a service account and secret - Assert
+ assert:
+ that:
+ - "sasecretout.changed == True"
+ - "sasecretout.state == 'present'"
+ - "sasecretout.results.returncode == 0"
+ - "sasecretout.results.results | length > 0"
+
+ - name: Test linking a service account and secret - idempotency - Act
+ oc_serviceaccount_secret:
+ service_account: "{{ service_account_name }}"
+ secret: "{{ secret_name }}"
+ namespace: "{{ namespace }}"
+ register: sasecretout
+
+ - name: Test linking a service account and secret - idempotency - Assert
+ assert:
+ that:
+ - "sasecretout.changed == False"
+ - "sasecretout.state == 'present'"
diff --git a/roles/lib_openshift/src/test/unit/oc_serviceaccount.py b/roles/lib_openshift/src/test/unit/oc_serviceaccount.py
index faf0bfeb5..dab751bb9 100755
--- a/roles/lib_openshift/src/test/unit/oc_serviceaccount.py
+++ b/roles/lib_openshift/src/test/unit/oc_serviceaccount.py
@@ -100,9 +100,9 @@ class OCServiceAccountTest(unittest.TestCase):
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
- mock.call(['/usr/bin/oc', '-n', 'default', 'get', 'sa', 'testserviceaccountname', '-o', 'json'], None),
- mock.call(['/usr/bin/oc', '-n', 'default', 'create', '-f', '/tmp/testserviceaccountname'], None),
- mock.call(['/usr/bin/oc', '-n', 'default', 'get', 'sa', 'testserviceaccountname', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'testserviceaccountname', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'create', '-f', mock.ANY], None),
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'testserviceaccountname', '-o', 'json'], None),
])
def tearDown(self):
diff --git a/roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py b/roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py
new file mode 100755
index 000000000..342da961b
--- /dev/null
+++ b/roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py
@@ -0,0 +1,257 @@
+#!/usr/bin/env python2
+'''
+ Unit tests for oc secret add
+'''
+# To run:
+# ./oc_serviceaccount_secret.py
+#
+# .
+# Ran 1 test in 0.002s
+#
+# OK
+
+import os
+import sys
+import unittest
+import mock
+
+# Removing invalid variable names for tests so that I can
+# keep them brief
+# pylint: disable=invalid-name,no-name-in-module
+# Disable import-error b/c our libraries aren't loaded in jenkins
+# pylint: disable=import-error,wrong-import-position
+# place class in our python path
+module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
+sys.path.insert(0, module_path)
+from oc_serviceaccount_secret import OCServiceAccountSecret # noqa: E402
+
+
+class OCServiceAccountSecretTest(unittest.TestCase):
+ '''
+ Test class for OCServiceAccountSecret
+ '''
+
+ def setUp(self):
+ ''' setup method will create a file and set to known configuration '''
+ pass
+
+ @mock.patch('oc_serviceaccount_secret.Yedit._write')
+ @mock.patch('oc_serviceaccount_secret.OCServiceAccountSecret._run')
+ def test_adding_a_secret_to_a_serviceaccount(self, mock_cmd, mock_write):
+ ''' Testing adding a secret to a service account '''
+
+ # Arrange
+
+ # run_ansible input parameters
+ params = {
+ 'state': 'present',
+ 'namespace': 'default',
+ 'secret': 'newsecret',
+ 'service_account': 'builder',
+ 'kubeconfig': '/etc/origin/master/admin.kubeconfig',
+ 'debug': False,
+ }
+
+ oc_get_sa_before = '''{
+ "kind": "ServiceAccount",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "builder",
+ "namespace": "default",
+ "selfLink": "/api/v1/namespaces/default/serviceaccounts/builder",
+ "uid": "cf47bca7-ebc4-11e6-b041-0ed9df7abc38",
+ "resourceVersion": "302879",
+ "creationTimestamp": "2017-02-05T17:02:00Z"
+ },
+ "secrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ },
+ {
+ "name": "builder-token-akqxi"
+ }
+
+ ],
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ]
+ }
+ '''
+
+ oc_get_sa_after = '''{
+ "kind": "ServiceAccount",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "builder",
+ "namespace": "default",
+ "selfLink": "/api/v1/namespaces/default/serviceaccounts/builder",
+ "uid": "cf47bca7-ebc4-11e6-b041-0ed9df7abc38",
+ "resourceVersion": "302879",
+ "creationTimestamp": "2017-02-05T17:02:00Z"
+ },
+ "secrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ },
+ {
+ "name": "builder-token-akqxi"
+ },
+ {
+ "name": "newsecret"
+ }
+
+ ],
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ]
+ }
+ '''
+
+ builder_yaml_file = '''\
+secrets:
+- name: builder-dockercfg-rsrua
+- name: builder-token-akqxi
+- name: newsecret
+kind: ServiceAccount
+imagePullSecrets:
+- name: builder-dockercfg-rsrua
+apiVersion: v1
+metadata:
+ name: builder
+ namespace: default
+ resourceVersion: '302879'
+ creationTimestamp: '2017-02-05T17:02:00Z'
+ selfLink: /api/v1/namespaces/default/serviceaccounts/builder
+ uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
+'''
+
+ # Return values of our mocked function call. These get returned once per call.
+ mock_cmd.side_effect = [
+ (0, oc_get_sa_before, ''), # First call to the mock
+ (0, oc_get_sa_before, ''), # Second call to the mock
+ (0, 'serviceaccount "builder" replaced', ''), # Third call to the mock
+ (0, oc_get_sa_after, ''), # Fourth call to the mock
+ ]
+
+ # Act
+ results = OCServiceAccountSecret.run_ansible(params, False)
+
+ # Assert
+ self.assertTrue(results['changed'])
+ self.assertEqual(results['results']['returncode'], 0)
+ self.assertEqual(results['state'], 'present')
+
+ # Making sure our mocks were called as we expected
+ mock_cmd.assert_has_calls([
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'replace', '-f', '/tmp/builder'], None),
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None)
+ ])
+
+ mock_write.assert_has_calls([
+ mock.call('/tmp/builder', builder_yaml_file)
+ ])
+
+ @mock.patch('oc_serviceaccount_secret.Yedit._write')
+ @mock.patch('oc_serviceaccount_secret.OCServiceAccountSecret._run')
+ def test_removing_a_secret_to_a_serviceaccount(self, mock_cmd, mock_write):
+ ''' Testing adding a secret to a service account '''
+
+ # Arrange
+
+ # run_ansible input parameters
+ params = {
+ 'state': 'absent',
+ 'namespace': 'default',
+ 'secret': 'newsecret',
+ 'service_account': 'builder',
+ 'kubeconfig': '/etc/origin/master/admin.kubeconfig',
+ 'debug': False,
+ }
+
+ oc_get_sa_before = '''{
+ "kind": "ServiceAccount",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "builder",
+ "namespace": "default",
+ "selfLink": "/api/v1/namespaces/default/serviceaccounts/builder",
+ "uid": "cf47bca7-ebc4-11e6-b041-0ed9df7abc38",
+ "resourceVersion": "302879",
+ "creationTimestamp": "2017-02-05T17:02:00Z"
+ },
+ "secrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ },
+ {
+ "name": "builder-token-akqxi"
+ },
+ {
+ "name": "newsecret"
+ }
+
+ ],
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ]
+ }
+ '''
+
+ builder_yaml_file = '''\
+secrets:
+- name: builder-dockercfg-rsrua
+- name: builder-token-akqxi
+kind: ServiceAccount
+imagePullSecrets:
+- name: builder-dockercfg-rsrua
+apiVersion: v1
+metadata:
+ name: builder
+ namespace: default
+ resourceVersion: '302879'
+ creationTimestamp: '2017-02-05T17:02:00Z'
+ selfLink: /api/v1/namespaces/default/serviceaccounts/builder
+ uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
+'''
+
+ # Return values of our mocked function call. These get returned once per call.
+ mock_cmd.side_effect = [
+ (0, oc_get_sa_before, ''), # First call to the mock
+ (0, oc_get_sa_before, ''), # Second call to the mock
+ (0, 'serviceaccount "builder" replaced', ''), # Third call to the mock
+ ]
+
+ # Act
+ results = OCServiceAccountSecret.run_ansible(params, False)
+
+ # Assert
+ self.assertTrue(results['changed'])
+ self.assertEqual(results['results']['returncode'], 0)
+ self.assertEqual(results['state'], 'absent')
+
+ # Making sure our mocks were called as we expected
+ mock_cmd.assert_has_calls([
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None),
+ mock.call(['oc', '-n', 'default', 'replace', '-f', '/tmp/builder'], None),
+ ])
+
+ mock_write.assert_has_calls([
+ mock.call('/tmp/builder', builder_yaml_file)
+ ])
+
+ def tearDown(self):
+ '''TearDown method'''
+ pass
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/roles/lib_utils/library/yedit.py b/roles/lib_utils/library/yedit.py
index 7ad2b7181..1c74b4d3f 100644
--- a/roles/lib_utils/library/yedit.py
+++ b/roles/lib_utils/library/yedit.py
@@ -356,6 +356,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -364,15 +375,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/lib_utils/src/class/yedit.py b/roles/lib_utils/src/class/yedit.py
index b1644f9b2..8542fe5c7 100644
--- a/roles/lib_utils/src/class/yedit.py
+++ b/roles/lib_utils/src/class/yedit.py
@@ -175,6 +175,17 @@ class Yedit(object):
return data
+ @staticmethod
+ def _write(filename, contents):
+ ''' Actually write the file contents to disk. This helps with mocking. '''
+
+ tmp_filename = filename + '.yedit'
+
+ with open(tmp_filename, 'w') as yfd:
+ yfd.write(contents)
+
+ os.rename(tmp_filename, filename)
+
def write(self):
''' write to file '''
if not self.filename:
@@ -183,15 +194,11 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- tmp_filename = self.filename + '.yedit'
- with open(tmp_filename, 'w') as yfd:
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
-
- yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- os.rename(tmp_filename, self.filename)
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
return (True, self.yaml_dict)
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-ephemeral-template.json
index 8b8fcb58b..c38d2680b 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-ephemeral-template.json
@@ -196,7 +196,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-persistent-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-persistent-template.json
index 72d3a8556..e8853d8ff 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/mongodb-persistent-template.json
@@ -213,7 +213,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/mysql-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/mysql-ephemeral-template.json
index 34dd2ed78..f7bcfe2ed 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/mysql-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/mysql-ephemeral-template.json
@@ -196,7 +196,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-ephemeral-template.json
index 0d0a2a629..64d5e2b32 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-ephemeral-template.json
@@ -11,7 +11,7 @@
"tags": "database,postgresql"
}
},
- "message": "The following service(s) have been created in your project: ${DATABASE_SERVICE_NAME}.\n\n Username: ${POSTGRESQL_USER}\n Password: ${POSTGRESQL_PASSWORD}\n Database Name: ${POSTGRESQL_DATABASE}\n Connection URL: mysql://${DATABASE_SERVICE_NAME}:5432/\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/sclorg/postgresql-container/blob/master/9.5.",
+ "message": "The following service(s) have been created in your project: ${DATABASE_SERVICE_NAME}.\n\n Username: ${POSTGRESQL_USER}\n Password: ${POSTGRESQL_PASSWORD}\n Database Name: ${POSTGRESQL_DATABASE}\n Connection URL: postgresql://${DATABASE_SERVICE_NAME}:5432/\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/sclorg/postgresql-container/blob/master/9.5.",
"labels": {
"template": "postgresql-ephemeral-template"
},
@@ -186,7 +186,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-persistent-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-persistent-template.json
index 257726cfd..6c101f9d2 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/postgresql-persistent-template.json
@@ -11,7 +11,7 @@
"tags": "database,postgresql"
}
},
- "message": "The following service(s) have been created in your project: ${DATABASE_SERVICE_NAME}.\n\n Username: ${POSTGRESQL_USER}\n Password: ${POSTGRESQL_PASSWORD}\n Database Name: ${POSTGRESQL_DATABASE}\n Connection URL: mysql://${DATABASE_SERVICE_NAME}:5432/\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/sclorg/postgresql-container/blob/master/9.5.",
+ "message": "The following service(s) have been created in your project: ${DATABASE_SERVICE_NAME}.\n\n Username: ${POSTGRESQL_USER}\n Password: ${POSTGRESQL_PASSWORD}\n Database Name: ${POSTGRESQL_DATABASE}\n Connection URL: postgresql://${DATABASE_SERVICE_NAME}:5432/\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/sclorg/postgresql-container/blob/master/9.5.",
"labels": {
"template": "postgresql-persistent-template"
},
@@ -203,7 +203,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/redis-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/redis-ephemeral-template.json
index c9ae8a539..b97e1fd29 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/redis-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/redis-ephemeral-template.json
@@ -157,7 +157,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/db-templates/redis-persistent-template.json b/roles/openshift_examples/files/examples/v1.4/db-templates/redis-persistent-template.json
index e9db9ec9d..dc167da41 100644
--- a/roles/openshift_examples/files/examples/v1.4/db-templates/redis-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.4/db-templates/redis-persistent-template.json
@@ -174,7 +174,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/README.md b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/README.md
index 62765e03d..f48d8d4a8 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/README.md
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/README.md
@@ -12,10 +12,15 @@ reference and supply your forked repository as the source-repository when
instantiating them.
* [CakePHP](https://raw.githubusercontent.com/openshift/cakephp-ex/master/openshift/templates/cakephp-mysql.json) - Provides a basic CakePHP application with a MySQL database. For more information see the [source repository](https://github.com/openshift/cakephp-ex).
+* [CakePHP persistent](https://raw.githubusercontent.com/openshift/cakephp-ex/master/openshift/templates/cakephp-mysql-persistent.json) - Provides a basic CakePHP application with a persistent MySQL database. Note: requires available persistent volumes. For more information see the [source repository](https://github.com/openshift/cakephp-ex).
* [Dancer](https://raw.githubusercontent.com/openshift/dancer-ex/master/openshift/templates/dancer-mysql.json) - Provides a basic Dancer (Perl) application with a MySQL database. For more information see the [source repository](https://github.com/openshift/dancer-ex).
+* [Dancer persistent](https://raw.githubusercontent.com/openshift/dancer-ex/master/openshift/templates/dancer-mysql-persistent.json) - Provides a basic Dancer (Perl) application with a persistent MySQL database. Note: requires available persistent volumes. For more information see the [source repository](https://github.com/openshift/dancer-ex).
* [Django](https://raw.githubusercontent.com/openshift/django-ex/master/openshift/templates/django-postgresql.json) - Provides a basic Django (Python) application with a PostgreSQL database. For more information see the [source repository](https://github.com/openshift/django-ex).
+* [Django persistent](https://raw.githubusercontent.com/openshift/django-ex/master/openshift/templates/django-postgresql-persistent.json) - Provides a basic Django (Python) application with a persistent PostgreSQL database. Note: requires available persistent volumes. For more information see the [source repository](https://github.com/openshift/django-ex).
* [NodeJS](https://raw.githubusercontent.com/openshift/nodejs-ex/master/openshift/templates/nodejs-mongodb.json) - Provides a basic NodeJS application with a MongoDB database. For more information see the [source repository](https://github.com/openshift/nodejs-ex).
+* [NodeJS persistent](https://raw.githubusercontent.com/openshift/nodejs-ex/master/openshift/templates/nodejs-mongodb-persistent.json) - Provides a basic NodeJS application with a persistent MongoDB database. Note: requires available persistent volumes. For more information see the [source repository](https://github.com/openshift/nodejs-ex).
* [Rails](https://raw.githubusercontent.com/openshift/rails-ex/master/openshift/templates/rails-postgresql.json) - Provides a basic Rails (Ruby) application with a PostgreSQL database. For more information see the [source repository](https://github.com/openshift/rails-ex).
+* [Rails persistent](https://raw.githubusercontent.com/openshift/rails-ex/master/openshift/templates/rails-postgresql-persistent.json) - Provides a basic Rails (Ruby) application with a persistent PostgreSQL database. Note: requires available persistent volumes. For more information see the [source repository](https://github.com/openshift/rails-ex).
Note: This file is processed by `hack/update-external-examples.sh`. New examples
must follow the exact syntax of the existing entries. Files in this directory
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql-persistent.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql-persistent.json
new file mode 100644
index 000000000..0ba57864e
--- /dev/null
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql-persistent.json
@@ -0,0 +1,575 @@
+{
+ "kind": "Template",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "cakephp-mysql-persistent",
+ "annotations": {
+ "openshift.io/display-name": "CakePHP + MySQL (Persistent)",
+ "description": "An example CakePHP application with a MySQL database. For more information about using this template, including OpenShift considerations, see https://github.com/openshift/cakephp-ex/blob/master/README.md.",
+ "tags": "quickstart,php,cakephp",
+ "iconClass": "icon-php"
+ }
+ },
+ "message": "The following service(s) have been created in your project: ${NAME}, ${DATABASE_SERVICE_NAME}.\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/openshift/cake-ex/blob/master/README.md.",
+ "labels": {
+ "template": "cakephp-mysql-persistent"
+ },
+ "objects": [
+ {
+ "kind": "Secret",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "stringData" : {
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "cakephp-secret-token" : "${CAKEPHP_SECRET_TOKEN}",
+ "cakephp-security-salt" : "${CAKEPHP_SECURITY_SALT}",
+ "cakephp-security-cipher-seed" : "${CAKEPHP_SECURITY_CIPHER_SEED}"
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Exposes and load balances the application pods",
+ "service.alpha.openshift.io/dependencies": "[{\"name\": \"${DATABASE_SERVICE_NAME}\", \"kind\": \"Service\"}]"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "web",
+ "port": 8080,
+ "targetPort": 8080
+ }
+ ],
+ "selector": {
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "Route",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "spec": {
+ "host": "${APPLICATION_DOMAIN}",
+ "to": {
+ "kind": "Service",
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "ImageStream",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Keeps track of changes in the application image"
+ }
+ }
+ },
+ {
+ "kind": "BuildConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to build the application"
+ }
+ },
+ "spec": {
+ "source": {
+ "type": "Git",
+ "git": {
+ "uri": "${SOURCE_REPOSITORY_URL}",
+ "ref": "${SOURCE_REPOSITORY_REF}"
+ },
+ "contextDir": "${CONTEXT_DIR}"
+ },
+ "strategy": {
+ "type": "Source",
+ "sourceStrategy": {
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "php:7.0"
+ },
+ "env": [
+ {
+ "name": "COMPOSER_MIRROR",
+ "value": "${COMPOSER_MIRROR}"
+ }
+ ]
+ }
+ },
+ "output": {
+ "to": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange"
+ },
+ {
+ "type": "ConfigChange"
+ },
+ {
+ "type": "GitHub",
+ "github": {
+ "secret": "${GITHUB_WEBHOOK_SECRET}"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the application server"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate",
+ "recreateParams": {
+ "pre": {
+ "failurePolicy": "Retry",
+ "execNewPod": {
+ "command": [
+ "./migrate-database.sh"
+ ],
+ "containerName": "cakephp-mysql-persistent"
+ }
+ }
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "cakephp-mysql-persistent"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${NAME}",
+ "labels": {
+ "name": "${NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "cakephp-mysql-persistent",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 8080
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 3,
+ "httpGet": {
+ "path": "/health.php",
+ "port": 8080
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 30,
+ "httpGet": {
+ "path": "/",
+ "port": 8080
+ }
+ },
+ "env": [
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "value": "${DATABASE_SERVICE_NAME}"
+ },
+ {
+ "name": "DATABASE_ENGINE",
+ "value": "${DATABASE_ENGINE}"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "DATABASE_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "CAKEPHP_SECRET_TOKEN",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-secret-token"
+ }
+ }
+ },
+ {
+ "name": "CAKEPHP_SECURITY_SALT",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-security-salt"
+ }
+ }
+ },
+ {
+ "name": "CAKEPHP_SECURITY_CIPHER_SEED",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-security-cipher-seed"
+ }
+ }
+ },
+ {
+ "name": "OPCACHE_REVALIDATE_FREQ",
+ "value": "${OPCACHE_REVALIDATE_FREQ}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "kind": "PersistentVolumeClaim",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "${VOLUME_CAPACITY}"
+ }
+ }
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Exposes the database server"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "mysql",
+ "port": 3306,
+ "targetPort": 3306
+ }
+ ],
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the database"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "mysql"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "mysql:5.7"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "labels": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ },
+ "spec": {
+ "volumes": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "persistentVolumeClaim": {
+ "claimName": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ ],
+ "containers": [
+ {
+ "name": "mysql",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 3306
+ }
+ ],
+ "volumeMounts": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "mountPath": "/var/lib/mysql/data"
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 5,
+ "exec": {
+ "command": [ "/bin/sh", "-i", "-c", "MYSQL_PWD='${DATABASE_PASSWORD}' mysql -h 127.0.0.1 -u ${DATABASE_USER} -D ${DATABASE_NAME} -e 'SELECT 1'" ]
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 3306
+ }
+ },
+ "env": [
+ {
+ "name": "MYSQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_MYSQL_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "parameters": [
+ {
+ "name": "NAME",
+ "displayName": "Name",
+ "description": "The name assigned to all of the frontend objects defined in this template.",
+ "required": true,
+ "value": "cakephp-mysql-persistent"
+ },
+ {
+ "name": "NAMESPACE",
+ "displayName": "Namespace",
+ "description": "The OpenShift Namespace where the ImageStream resides.",
+ "required": true,
+ "value": "openshift"
+ },
+ {
+ "name": "MEMORY_LIMIT",
+ "displayName": "Memory Limit",
+ "description": "Maximum amount of memory the CakePHP container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "MEMORY_MYSQL_LIMIT",
+ "displayName": "Memory Limit (MySQL)",
+ "description": "Maximum amount of memory the MySQL container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "VOLUME_CAPACITY",
+ "displayName": "Volume Capacity",
+ "description": "Volume space available for data, e.g. 512Mi, 2Gi",
+ "value": "1Gi",
+ "required": true
+ },
+ {
+ "name": "SOURCE_REPOSITORY_URL",
+ "displayName": "Git Repository URL",
+ "description": "The URL of the repository with your application source code.",
+ "required": true,
+ "value": "https://github.com/openshift/cakephp-ex.git"
+ },
+ {
+ "name": "SOURCE_REPOSITORY_REF",
+ "displayName": "Git Reference",
+ "description": "Set this to a branch name, tag or other ref of your repository if you are not using the default branch."
+ },
+ {
+ "name": "CONTEXT_DIR",
+ "displayName": "Context Directory",
+ "description": "Set this to the relative path to your project if it is not in the root of your repository."
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "displayName": "Application Hostname",
+ "description": "The exposed hostname that will route to the CakePHP service, if left blank a value will be defaulted.",
+ "value": ""
+ },
+ {
+ "name": "GITHUB_WEBHOOK_SECRET",
+ "displayName": "GitHub Webhook Secret",
+ "description": "A secret string used to configure the GitHub webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "displayName": "Database Service Name",
+ "required": true,
+ "value": "mysql"
+ },
+ {
+ "name": "DATABASE_ENGINE",
+ "displayName": "Database Engine",
+ "description": "Database engine: postgresql, mysql or sqlite (default).",
+ "required": true,
+ "value": "mysql"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "displayName": "Database Name",
+ "required": true,
+ "value": "default"
+ },
+ {
+ "name": "DATABASE_USER",
+ "displayName": "Database User",
+ "required": true,
+ "value": "cakephp"
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "displayName": "Database Password",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{16}"
+ },
+ {
+ "name": "CAKEPHP_SECRET_TOKEN",
+ "displayName": "CakePHP secret token",
+ "description": "Set this to a long random string.",
+ "generate": "expression",
+ "from": "[\\w]{50}"
+ },
+ {
+ "name": "CAKEPHP_SECURITY_SALT",
+ "displayName": "CakePHP Security Salt",
+ "description": "Security salt for session hash.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "CAKEPHP_SECURITY_CIPHER_SEED",
+ "displayName": "CakePHP Security Cipher Seed",
+ "description": "Security cipher seed for session hash.",
+ "generate": "expression",
+ "from": "[0-9]{30}"
+ },
+ {
+ "name": "OPCACHE_REVALIDATE_FREQ",
+ "displayName": "OPcache Revalidation Frequency",
+ "description": "How often to check script timestamps for updates, in seconds. 0 will result in OPcache checking for updates on every request.",
+ "value": "2"
+ },
+ {
+ "name": "COMPOSER_MIRROR",
+ "displayName": "Custom Composer Mirror URL",
+ "description": "The custom Composer mirror URL",
+ "value": ""
+ }
+ ]
+}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql.json
index 9dbbf89d1..9732e59e1 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql.json
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/cakephp-mysql.json
@@ -22,8 +22,11 @@
"name": "${NAME}"
},
"stringData" : {
- "databaseUser" : "${DATABASE_USER}",
- "databasePassword" : "${DATABASE_PASSWORD}"
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "cakephp-secret-token" : "${CAKEPHP_SECRET_TOKEN}",
+ "cakephp-security-salt" : "${CAKEPHP_SECURITY_SALT}",
+ "cakephp-security-cipher-seed" : "${CAKEPHP_SECURITY_CIPHER_SEED}"
}
},
{
@@ -97,12 +100,12 @@
"from": {
"kind": "ImageStreamTag",
"namespace": "${NAMESPACE}",
- "name": "php:5.6"
+ "name": "php:7.0"
},
"env": [
{
- "name": "COMPOSER_MIRROR",
- "value": "${COMPOSER_MIRROR}"
+ "name": "COMPOSER_MIRROR",
+ "value": "${COMPOSER_MIRROR}"
}
]
}
@@ -201,12 +204,12 @@
}
},
"livenessProbe": {
- "timeoutSeconds": 3,
- "initialDelaySeconds": 30,
- "httpGet": {
- "path": "/",
- "port": 8080
- }
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 30,
+ "httpGet": {
+ "path": "/",
+ "port": 8080
+ }
},
"env": [
{
@@ -226,7 +229,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -235,21 +238,36 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
{
"name": "CAKEPHP_SECRET_TOKEN",
- "value": "${CAKEPHP_SECRET_TOKEN}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-secret-token"
+ }
+ }
},
{
"name": "CAKEPHP_SECURITY_SALT",
- "value": "${CAKEPHP_SECURITY_SALT}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-security-salt"
+ }
+ }
},
{
"name": "CAKEPHP_SECURITY_CIPHER_SEED",
- "value": "${CAKEPHP_SECURITY_CIPHER_SEED}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "cakephp-security-cipher-seed"
+ }
+ }
},
{
"name": "OPCACHE_REVALIDATE_FREQ",
@@ -257,9 +275,9 @@
}
],
"resources": {
- "limits": {
- "memory": "${MEMORY_LIMIT}"
- }
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
}
}
]
@@ -313,7 +331,7 @@
"from": {
"kind": "ImageStreamTag",
"namespace": "${NAMESPACE}",
- "name": "mysql:5.6"
+ "name": "mysql:5.7"
}
}
},
@@ -362,40 +380,40 @@
}
},
"livenessProbe": {
- "timeoutSeconds": 1,
- "initialDelaySeconds": 30,
- "tcpSocket": {
- "port": 3306
- }
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 3306
+ }
},
"env": [
- {
- "name": "MYSQL_USER",
- "valueFrom": {
- "secretKeyRef" : {
- "name" : "${NAME}",
- "key" : "databaseUser"
- }
+ {
+ "name": "MYSQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
}
- },
- {
- "name": "MYSQL_PASSWORD",
- "valueFrom": {
- "secretKeyRef" : {
- "name" : "${NAME}",
- "key" : "databasePassword"
- }
- }
- },
- {
- "name": "MYSQL_DATABASE",
- "value": "${DATABASE_NAME}"
}
+ },
+ {
+ "name": "MYSQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ }
],
"resources": {
- "limits": {
- "memory": "${MEMORY_MYSQL_LIMIT}"
- }
+ "limits": {
+ "memory": "${MEMORY_MYSQL_LIMIT}"
+ }
}
}
]
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql-persistent.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql-persistent.json
new file mode 100644
index 000000000..074561550
--- /dev/null
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql-persistent.json
@@ -0,0 +1,519 @@
+{
+ "kind": "Template",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "dancer-mysql-persistent",
+ "annotations": {
+ "openshift.io/display-name": "Dancer + MySQL (Persistent)",
+ "description": "An example Dancer application with a MySQL database. For more information about using this template, including OpenShift considerations, see https://github.com/openshift/dancer-ex/blob/master/README.md.",
+ "tags": "quickstart,perl,dancer",
+ "iconClass": "icon-perl"
+ }
+ },
+ "message": "The following service(s) have been created in your project: ${NAME}, ${DATABASE_SERVICE_NAME}.\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/openshift/dancer-ex/blob/master/README.md.",
+ "labels": {
+ "template": "dancer-mysql-persistent"
+ },
+ "objects": [
+ {
+ "kind": "Secret",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "stringData" : {
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "keybase" : "${SECRET_KEY_BASE}"
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Exposes and load balances the application pods",
+ "service.alpha.openshift.io/dependencies": "[{\"name\": \"${DATABASE_SERVICE_NAME}\", \"kind\": \"Service\"}]"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "web",
+ "port": 8080,
+ "targetPort": 8080
+ }
+ ],
+ "selector": {
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "Route",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "spec": {
+ "host": "${APPLICATION_DOMAIN}",
+ "to": {
+ "kind": "Service",
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "ImageStream",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Keeps track of changes in the application image"
+ }
+ }
+ },
+ {
+ "kind": "BuildConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to build the application"
+ }
+ },
+ "spec": {
+ "source": {
+ "type": "Git",
+ "git": {
+ "uri": "${SOURCE_REPOSITORY_URL}",
+ "ref": "${SOURCE_REPOSITORY_REF}"
+ },
+ "contextDir": "${CONTEXT_DIR}"
+ },
+ "strategy": {
+ "type": "Source",
+ "sourceStrategy": {
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "perl:5.24"
+ },
+ "env": [
+ {
+ "name": "CPAN_MIRROR",
+ "value": "${CPAN_MIRROR}"
+ }
+ ]
+ }
+ },
+ "output": {
+ "to": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange"
+ },
+ {
+ "type": "ConfigChange"
+ },
+ {
+ "type": "GitHub",
+ "github": {
+ "secret": "${GITHUB_WEBHOOK_SECRET}"
+ }
+ }
+ ],
+ "postCommit": {
+ "script": "perl -I extlib/lib/perl5 -I lib t/*"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the application server"
+ }
+ },
+ "spec": {
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "dancer-mysql-persistent"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${NAME}",
+ "labels": {
+ "name": "${NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "dancer-mysql-persistent",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 8080
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 3,
+ "httpGet": {
+ "path": "/health",
+ "port": 8080
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 30,
+ "httpGet": {
+ "path": "/",
+ "port": 8080
+ }
+ },
+ "env": [
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "value": "${DATABASE_SERVICE_NAME}"
+ },
+ {
+ "name": "MYSQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "SECRET_KEY_BASE",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "keybase"
+ }
+ }
+ },
+ {
+ "name": "PERL_APACHE2_RELOAD",
+ "value": "${PERL_APACHE2_RELOAD}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "kind": "PersistentVolumeClaim",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "${VOLUME_CAPACITY}"
+ }
+ }
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Exposes the database server"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "mysql",
+ "port": 3306,
+ "targetPort": 3306
+ }
+ ],
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the database"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "mysql"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "mysql:5.7"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "labels": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ },
+ "spec": {
+ "volumes": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "persistentVolumeClaim": {
+ "claimName": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ ],
+ "containers": [
+ {
+ "name": "mysql",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 3306
+ }
+ ],
+ "volumeMounts": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "mountPath": "/var/lib/mysql/data"
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 5,
+ "exec": {
+ "command": [ "/bin/sh", "-i", "-c", "MYSQL_PWD='${DATABASE_PASSWORD}' mysql -h 127.0.0.1 -u ${DATABASE_USER} -D ${DATABASE_NAME} -e 'SELECT 1'" ]
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 3306
+ }
+ },
+ "env": [
+ {
+ "name": "MYSQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "MYSQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_MYSQL_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "parameters": [
+ {
+ "name": "NAME",
+ "displayName": "Name",
+ "description": "The name assigned to all of the frontend objects defined in this template.",
+ "required": true,
+ "value": "dancer-mysql-persistent"
+ },
+ {
+ "name": "NAMESPACE",
+ "displayName": "Namespace",
+ "description": "The OpenShift Namespace where the ImageStream resides.",
+ "required": true,
+ "value": "openshift"
+ },
+ {
+ "name": "MEMORY_LIMIT",
+ "displayName": "Memory Limit",
+ "description": "Maximum amount of memory the Perl Dancer container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "MEMORY_MYSQL_LIMIT",
+ "displayName": "Memory Limit (MySQL)",
+ "description": "Maximum amount of memory the MySQL container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "VOLUME_CAPACITY",
+ "displayName": "Volume Capacity",
+ "description": "Volume space available for data, e.g. 512Mi, 2Gi",
+ "value": "1Gi",
+ "required": true
+ },
+ {
+ "name": "SOURCE_REPOSITORY_URL",
+ "displayName": "Git Repository URL",
+ "description": "The URL of the repository with your application source code.",
+ "required": true,
+ "value": "https://github.com/openshift/dancer-ex.git"
+ },
+ {
+ "name": "SOURCE_REPOSITORY_REF",
+ "displayName": "Git Reference",
+ "description": "Set this to a branch name, tag or other ref of your repository if you are not using the default branch."
+ },
+ {
+ "name": "CONTEXT_DIR",
+ "displayName": "Context Directory",
+ "description": "Set this to the relative path to your project if it is not in the root of your repository."
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "displayName": "Application Hostname",
+ "description": "The exposed hostname that will route to the Dancer service, if left blank a value will be defaulted.",
+ "value": ""
+ },
+ {
+ "name": "GITHUB_WEBHOOK_SECRET",
+ "displayName": "GitHub Webhook Secret",
+ "description": "A secret string used to configure the GitHub webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "displayName": "Database Service Name",
+ "required": true,
+ "value": "database"
+ },
+ {
+ "name": "DATABASE_USER",
+ "displayName": "Database Username",
+ "generate": "expression",
+ "from": "user[A-Z0-9]{3}"
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "displayName": "Database Password",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{8}"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "displayName": "Database Name",
+ "required": true,
+ "value": "sampledb"
+ },
+ {
+ "name": "PERL_APACHE2_RELOAD",
+ "displayName": "Perl Module Reload",
+ "description": "Set this to \"true\" to enable automatic reloading of modified Perl modules.",
+ "value": ""
+ },
+ {
+ "name": "SECRET_KEY_BASE",
+ "displayName": "Secret Key",
+ "description": "Your secret key for verifying the integrity of signed cookies.",
+ "generate": "expression",
+ "from": "[a-z0-9]{127}"
+ },
+ {
+ "name": "CPAN_MIRROR",
+ "displayName": "Custom CPAN Mirror URL",
+ "description": "The custom CPAN mirror URL",
+ "value": ""
+ }
+ ]
+}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql.json
index dccb8bf7f..18100974b 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql.json
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/dancer-mysql.json
@@ -22,8 +22,9 @@
"name": "${NAME}"
},
"stringData" : {
- "databaseUser" : "${DATABASE_USER}",
- "databasePassword" : "${DATABASE_PASSWORD}"
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "keybase" : "${SECRET_KEY_BASE}"
}
},
{
@@ -97,7 +98,7 @@
"from": {
"kind": "ImageStreamTag",
"namespace": "${NAMESPACE}",
- "name": "perl:5.20"
+ "name": "perl:5.24"
},
"env": [
{
@@ -207,7 +208,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -216,7 +217,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
@@ -226,7 +227,12 @@
},
{
"name": "SECRET_KEY_BASE",
- "value": "${SECRET_KEY_BASE}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "keybase"
+ }
+ }
},
{
"name": "PERL_APACHE2_RELOAD",
@@ -290,7 +296,7 @@
"from": {
"kind": "ImageStreamTag",
"namespace": "${NAMESPACE}",
- "name": "mysql:5.6"
+ "name": "mysql:5.7"
}
}
},
@@ -351,7 +357,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -360,7 +366,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql-persistent.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql-persistent.json
new file mode 100644
index 000000000..b39771bd8
--- /dev/null
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql-persistent.json
@@ -0,0 +1,532 @@
+{
+ "kind": "Template",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "django-psql-persistent",
+ "annotations": {
+ "openshift.io/display-name": "Django + PostgreSQL (Persistent)",
+ "description": "An example Django application with a PostgreSQL database. For more information about using this template, including OpenShift considerations, see https://github.com/openshift/django-ex/blob/master/README.md.",
+ "tags": "quickstart,python,django",
+ "iconClass": "icon-python"
+ }
+ },
+ "message": "The following service(s) have been created in your project: ${NAME}, ${DATABASE_SERVICE_NAME}.\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/openshift/django-ex/blob/master/README.md.",
+ "labels": {
+ "template": "django-psql-persistent"
+ },
+ "objects": [
+ {
+ "kind": "Secret",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "stringData" : {
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "django-secret-key" : "${DJANGO_SECRET_KEY}"
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Exposes and load balances the application pods",
+ "service.alpha.openshift.io/dependencies": "[{\"name\": \"${DATABASE_SERVICE_NAME}\", \"kind\": \"Service\"}]"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "web",
+ "port": 8080,
+ "targetPort": 8080
+ }
+ ],
+ "selector": {
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "Route",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "spec": {
+ "host": "${APPLICATION_DOMAIN}",
+ "to": {
+ "kind": "Service",
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "ImageStream",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Keeps track of changes in the application image"
+ }
+ }
+ },
+ {
+ "kind": "BuildConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to build the application"
+ }
+ },
+ "spec": {
+ "source": {
+ "type": "Git",
+ "git": {
+ "uri": "${SOURCE_REPOSITORY_URL}",
+ "ref": "${SOURCE_REPOSITORY_REF}"
+ },
+ "contextDir": "${CONTEXT_DIR}"
+ },
+ "strategy": {
+ "type": "Source",
+ "sourceStrategy": {
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "python:3.5"
+ },
+ "env": [
+ {
+ "name": "PIP_INDEX_URL",
+ "value": "${PIP_INDEX_URL}"
+ }
+ ]
+ }
+ },
+ "output": {
+ "to": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange"
+ },
+ {
+ "type": "ConfigChange"
+ },
+ {
+ "type": "GitHub",
+ "github": {
+ "secret": "${GITHUB_WEBHOOK_SECRET}"
+ }
+ }
+ ],
+ "postCommit": {
+ "script": "./manage.py test"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the application server"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Rolling"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "django-psql-persistent"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${NAME}",
+ "labels": {
+ "name": "${NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "django-psql-persistent",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 8080
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 3,
+ "httpGet": {
+ "path": "/health",
+ "port": 8080
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 30,
+ "httpGet": {
+ "path": "/health",
+ "port": 8080
+ }
+ },
+ "env": [
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "value": "${DATABASE_SERVICE_NAME}"
+ },
+ {
+ "name": "DATABASE_ENGINE",
+ "value": "${DATABASE_ENGINE}"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "DATABASE_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "APP_CONFIG",
+ "value": "${APP_CONFIG}"
+ },
+ {
+ "name": "DJANGO_SECRET_KEY",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "django-secret-key"
+ }
+ }
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "kind": "PersistentVolumeClaim",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "${VOLUME_CAPACITY}"
+ }
+ }
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Exposes the database server"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "postgresql",
+ "port": 5432,
+ "targetPort": 5432
+ }
+ ],
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the database"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "postgresql"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "postgresql:9.5"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "labels": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ },
+ "spec": {
+ "volumes": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "persistentVolumeClaim": {
+ "claimName": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ ],
+ "containers": [
+ {
+ "name": "postgresql",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 5432
+ }
+ ],
+ "env": [
+ {
+ "name": "POSTGRESQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ }
+ ],
+ "volumeMounts": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "mountPath": "/var/lib/pgsql/data"
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 5,
+ "exec": {
+ "command": [ "/bin/sh", "-i", "-c", "psql -h 127.0.0.1 -U ${POSTGRESQL_USER} -q -d ${POSTGRESQL_DATABASE} -c 'SELECT 1'"]
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 5432
+ }
+ },
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_POSTGRESQL_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "parameters": [
+ {
+ "name": "NAME",
+ "displayName": "Name",
+ "description": "The name assigned to all of the frontend objects defined in this template.",
+ "required": true,
+ "value": "django-psql-persistent"
+ },
+ {
+ "name": "NAMESPACE",
+ "displayName": "Namespace",
+ "required": true,
+ "description": "The OpenShift Namespace where the ImageStream resides.",
+ "value": "openshift"
+ },
+ {
+ "name": "MEMORY_LIMIT",
+ "displayName": "Memory Limit",
+ "required": true,
+ "description": "Maximum amount of memory the Django container can use.",
+ "value": "512Mi"
+ },
+ {
+ "name": "MEMORY_POSTGRESQL_LIMIT",
+ "displayName": "Memory Limit (PostgreSQL)",
+ "required": true,
+ "description": "Maximum amount of memory the PostgreSQL container can use.",
+ "value": "512Mi"
+ },
+ {
+ "name": "VOLUME_CAPACITY",
+ "displayName": "Volume Capacity",
+ "description": "Volume space available for data, e.g. 512Mi, 2Gi",
+ "value": "1Gi",
+ "required": true
+ },
+ {
+ "name": "SOURCE_REPOSITORY_URL",
+ "displayName": "Git Repository URL",
+ "required": true,
+ "description": "The URL of the repository with your application source code.",
+ "value": "https://github.com/openshift/django-ex.git"
+ },
+ {
+ "name": "SOURCE_REPOSITORY_REF",
+ "displayName": "Git Reference",
+ "description": "Set this to a branch name, tag or other ref of your repository if you are not using the default branch."
+ },
+ {
+ "name": "CONTEXT_DIR",
+ "displayName": "Context Directory",
+ "description": "Set this to the relative path to your project if it is not in the root of your repository."
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "displayName": "Application Hostname",
+ "description": "The exposed hostname that will route to the Django service, if left blank a value will be defaulted.",
+ "value": ""
+ },
+ {
+ "name": "GITHUB_WEBHOOK_SECRET",
+ "displayName": "GitHub Webhook Secret",
+ "description": "A secret string used to configure the GitHub webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "displayName": "Database Service Name",
+ "required": true,
+ "value": "postgresql"
+ },
+ {
+ "name": "DATABASE_ENGINE",
+ "displayName": "Database Engine",
+ "required": true,
+ "description": "Database engine: postgresql, mysql or sqlite (default).",
+ "value": "postgresql"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "displayName": "Database Name",
+ "required": true,
+ "value": "default"
+ },
+ {
+ "name": "DATABASE_USER",
+ "displayName": "Database Username",
+ "required": true,
+ "value": "django"
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "displayName": "Database User Password",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{16}"
+ },
+ {
+ "name": "APP_CONFIG",
+ "displayName": "Application Configuration File Path",
+ "description": "Relative path to Gunicorn configuration file (optional)."
+ },
+ {
+ "name": "DJANGO_SECRET_KEY",
+ "displayName": "Django Secret Key",
+ "description": "Set this to a long random string.",
+ "generate": "expression",
+ "from": "[\\w]{50}"
+ },
+ {
+ "name": "PIP_INDEX_URL",
+ "displayName": "Custom PyPi Index URL",
+ "description": "The custom PyPi index URL",
+ "value": ""
+ }
+ ]
+}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql.json
index 59ff8a988..64b914e61 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql.json
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/django-postgresql.json
@@ -22,8 +22,9 @@
"name": "${NAME}"
},
"stringData" : {
- "databaseUser" : "${DATABASE_USER}",
- "databasePassword" : "${DATABASE_PASSWORD}"
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "django-secret-key" : "${DJANGO_SECRET_KEY}"
}
},
{
@@ -218,7 +219,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -227,7 +228,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
@@ -237,7 +238,12 @@
},
{
"name": "DJANGO_SECRET_KEY",
- "value": "${DJANGO_SECRET_KEY}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "django-secret-key"
+ }
+ }
}
],
"resources": {
@@ -338,7 +344,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -347,7 +353,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb-persistent.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb-persistent.json
new file mode 100644
index 000000000..fecb84662
--- /dev/null
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb-persistent.json
@@ -0,0 +1,541 @@
+{
+ "kind": "Template",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "nodejs-mongo-persistent",
+ "annotations": {
+ "openshift.io/display-name": "Node.js + MongoDB (Persistent)",
+ "description": "An example Node.js application with a MongoDB database. For more information about using this template, including OpenShift considerations, see https://github.com/openshift/nodejs-ex/blob/master/README.md.",
+ "tags": "quickstart,nodejs",
+ "iconClass": "icon-nodejs"
+ }
+ },
+ "message": "The following service(s) have been created in your project: ${NAME}, ${DATABASE_SERVICE_NAME}.\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/openshift/nodejs-ex/blob/master/README.md.",
+ "labels": {
+ "template": "nodejs-mongo-persistent"
+ },
+ "objects": [
+ {
+ "kind": "Secret",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "stringData": {
+ "database-user": "${DATABASE_USER}",
+ "database-password": "${DATABASE_PASSWORD}",
+ "database-admin-password" : "${DATABASE_ADMIN_PASSWORD}"
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Exposes and load balances the application pods",
+ "service.alpha.openshift.io/dependencies": "[{\"name\": \"${DATABASE_SERVICE_NAME}\", \"kind\": \"Service\"}]"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "web",
+ "port": 8080,
+ "targetPort": 8080
+ }
+ ],
+ "selector": {
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "Route",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "spec": {
+ "host": "${APPLICATION_DOMAIN}",
+ "to": {
+ "kind": "Service",
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "ImageStream",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Keeps track of changes in the application image"
+ }
+ }
+ },
+ {
+ "kind": "BuildConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to build the application"
+ }
+ },
+ "spec": {
+ "source": {
+ "type": "Git",
+ "git": {
+ "uri": "${SOURCE_REPOSITORY_URL}",
+ "ref": "${SOURCE_REPOSITORY_REF}"
+ },
+ "contextDir": "${CONTEXT_DIR}"
+ },
+ "strategy": {
+ "type": "Source",
+ "sourceStrategy": {
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "nodejs:4"
+ },
+ "env": [
+ {
+ "name": "NPM_MIRROR",
+ "value": "${NPM_MIRROR}"
+ }
+ ]
+ }
+ },
+ "output": {
+ "to": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange"
+ },
+ {
+ "type": "ConfigChange"
+ },
+ {
+ "type": "GitHub",
+ "github": {
+ "secret": "${GITHUB_WEBHOOK_SECRET}"
+ }
+ },
+ {
+ "type": "Generic",
+ "generic": {
+ "secret": "${GENERIC_WEBHOOK_SECRET}"
+ }
+ }
+ ],
+ "postCommit": {
+ "script": "npm test"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the application server"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Rolling"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "nodejs-mongo-persistent"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${NAME}",
+ "labels": {
+ "name": "${NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "nodejs-mongo-persistent",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 8080
+ }
+ ],
+ "env": [
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "value": "${DATABASE_SERVICE_NAME}"
+ },
+ {
+ "name": "MONGODB_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MONGODB_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "MONGODB_DATABASE",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "MONGODB_ADMIN_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-admin-password"
+ }
+ }
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 3,
+ "httpGet": {
+ "path": "/pagecount",
+ "port": 8080
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 30,
+ "httpGet": {
+ "path": "/pagecount",
+ "port": 8080
+ }
+ },
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "kind": "PersistentVolumeClaim",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "${VOLUME_CAPACITY}"
+ }
+ }
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Exposes the database server"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "mongodb",
+ "port": 27017,
+ "targetPort": 27017
+ }
+ ],
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the database"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "mongodb"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "mongodb:3.2"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "labels": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "mongodb",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 27017
+ }
+ ],
+ "env": [
+ {
+ "name": "MONGODB_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "MONGODB_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "MONGODB_DATABASE",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "MONGODB_ADMIN_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-admin-password"
+ }
+ }
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 3,
+ "exec": {
+ "command": [
+ "/bin/sh",
+ "-i",
+ "-c",
+ "mongo 127.0.0.1:27017/$MONGODB_DATABASE -u $MONGODB_USER -p $MONGODB_PASSWORD --eval=\"quit()\""
+ ]
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 27017
+ }
+ },
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_MONGODB_LIMIT}"
+ }
+ },
+ "volumeMounts": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "mountPath": "/var/lib/mongodb/data"
+ }
+ ]
+ }
+ ],
+ "volumes": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "persistentVolumeClaim": {
+ "claimName": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "parameters": [
+ {
+ "name": "NAME",
+ "displayName": "Name",
+ "description": "The name assigned to all of the frontend objects defined in this template.",
+ "required": true,
+ "value": "nodejs-mongo-persistent"
+ },
+ {
+ "name": "NAMESPACE",
+ "displayName": "Namespace",
+ "description": "The OpenShift Namespace where the ImageStream resides.",
+ "required": true,
+ "value": "openshift"
+ },
+ {
+ "name": "MEMORY_LIMIT",
+ "displayName": "Memory Limit",
+ "description": "Maximum amount of memory the Node.js container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "MEMORY_MONGODB_LIMIT",
+ "displayName": "Memory Limit (MongoDB)",
+ "description": "Maximum amount of memory the MongoDB container can use.",
+ "required": true,
+ "value": "512Mi"
+ },
+ {
+ "name": "VOLUME_CAPACITY",
+ "displayName": "Volume Capacity",
+ "description": "Volume space available for data, e.g. 512Mi, 2Gi",
+ "value": "1Gi",
+ "required": true
+ },
+ {
+ "name": "SOURCE_REPOSITORY_URL",
+ "displayName": "Git Repository URL",
+ "description": "The URL of the repository with your application source code.",
+ "required": true,
+ "value": "https://github.com/openshift/nodejs-ex.git"
+ },
+ {
+ "name": "SOURCE_REPOSITORY_REF",
+ "displayName": "Git Reference",
+ "description": "Set this to a branch name, tag or other ref of your repository if you are not using the default branch."
+ },
+ {
+ "name": "CONTEXT_DIR",
+ "displayName": "Context Directory",
+ "description": "Set this to the relative path to your project if it is not in the root of your repository."
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "displayName": "Application Hostname",
+ "description": "The exposed hostname that will route to the Node.js service, if left blank a value will be defaulted.",
+ "value": ""
+ },
+ {
+ "name": "GITHUB_WEBHOOK_SECRET",
+ "displayName": "GitHub Webhook Secret",
+ "description": "A secret string used to configure the GitHub webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "GENERIC_WEBHOOK_SECRET",
+ "displayName": "Generic Webhook Secret",
+ "description": "A secret string used to configure the Generic webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "displayName": "Database Service Name",
+ "required": true,
+ "value": "mongodb"
+ },
+ {
+ "name": "DATABASE_USER",
+ "displayName": "MongoDB Username",
+ "description": "Username for MongoDB user that will be used for accessing the database.",
+ "generate": "expression",
+ "from": "user[A-Z0-9]{3}"
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "displayName": "MongoDB Password",
+ "description": "Password for the MongoDB user.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{16}"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "displayName": "Database Name",
+ "required": true,
+ "value": "sampledb"
+ },
+ {
+ "name": "DATABASE_ADMIN_PASSWORD",
+ "displayName": "Database Administrator Password",
+ "description": "Password for the database admin user.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{16}"
+ },
+ {
+ "name": "NPM_MIRROR",
+ "displayName": "Custom NPM Mirror URL",
+ "description": "The custom NPM mirror URL",
+ "value": ""
+ }
+ ]
+}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb.json
index 91f9ec7b3..6a55f0251 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb.json
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/nodejs-mongodb.json
@@ -22,9 +22,9 @@
"name": "${NAME}"
},
"stringData": {
- "databaseUser": "${DATABASE_USER}",
- "databasePassword": "${DATABASE_PASSWORD}",
- "databaseAdminPassword" : "${DATABASE_ADMIN_PASSWORD}"
+ "database-user": "${DATABASE_USER}",
+ "database-password": "${DATABASE_PASSWORD}",
+ "database-admin-password" : "${DATABASE_ADMIN_PASSWORD}"
}
},
{
@@ -201,7 +201,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -210,7 +210,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
@@ -223,7 +223,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseAdminPassword"
+ "key" : "database-admin-password"
}
}
}
@@ -336,7 +336,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -345,7 +345,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
@@ -358,7 +358,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseAdminPassword"
+ "key" : "database-admin-password"
}
}
}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql-persistent.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql-persistent.json
new file mode 100644
index 000000000..6c0a484b5
--- /dev/null
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql-persistent.json
@@ -0,0 +1,598 @@
+{
+ "kind": "Template",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "rails-pgsql-persistent",
+ "annotations": {
+ "openshift.io/display-name": "Rails + PostgreSQL (Persistent)",
+ "description": "An example Rails application with a PostgreSQL database. For more information about using this template, including OpenShift considerations, see https://github.com/openshift/rails-ex/blob/master/README.md.",
+ "tags": "quickstart,ruby,rails",
+ "iconClass": "icon-ruby"
+ }
+ },
+ "message": "The following service(s) have been created in your project: ${NAME}, ${DATABASE_SERVICE_NAME}.\n\nFor more information about using this template, including OpenShift considerations, see https://github.com/openshift/rails-ex/blob/master/README.md.",
+ "labels": {
+ "template": "rails-pgsql-persistent"
+ },
+ "objects": [
+ {
+ "kind": "Secret",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "stringData" : {
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "application-user" : "${APPLICATION_USER}",
+ "application-password" : "${APPLICATION_PASSWORD}",
+ "keybase" : "${SECRET_KEY_BASE}"
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Exposes and load balances the application pods",
+ "service.alpha.openshift.io/dependencies": "[{\"name\": \"${DATABASE_SERVICE_NAME}\", \"kind\": \"Service\"}]"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "web",
+ "port": 8080,
+ "targetPort": 8080
+ }
+ ],
+ "selector": {
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "Route",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}"
+ },
+ "spec": {
+ "host": "${APPLICATION_DOMAIN}",
+ "to": {
+ "kind": "Service",
+ "name": "${NAME}"
+ }
+ }
+ },
+ {
+ "kind": "ImageStream",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Keeps track of changes in the application image"
+ }
+ }
+ },
+ {
+ "kind": "BuildConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to build the application"
+ }
+ },
+ "spec": {
+ "source": {
+ "type": "Git",
+ "git": {
+ "uri": "${SOURCE_REPOSITORY_URL}",
+ "ref": "${SOURCE_REPOSITORY_REF}"
+ },
+ "contextDir": "${CONTEXT_DIR}"
+ },
+ "strategy": {
+ "type": "Source",
+ "sourceStrategy": {
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "ruby:2.3"
+ },
+ "env": [
+ {
+ "name": "RUBYGEM_MIRROR",
+ "value": "${RUBYGEM_MIRROR}"
+ }
+ ]
+ }
+ },
+ "output": {
+ "to": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange"
+ },
+ {
+ "type": "ConfigChange"
+ },
+ {
+ "type": "GitHub",
+ "github": {
+ "secret": "${GITHUB_WEBHOOK_SECRET}"
+ }
+ }
+ ],
+ "postCommit": {
+ "script": "bundle exec rake test"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the application server"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate",
+ "recreateParams": {
+ "pre": {
+ "failurePolicy": "Abort",
+ "execNewPod": {
+ "command": [
+ "./migrate-database.sh"
+ ],
+ "containerName": "${NAME}"
+ }
+ }
+ }
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "rails-pgsql-persistent"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "name": "${NAME}:latest"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${NAME}",
+ "labels": {
+ "name": "${NAME}"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "name": "rails-pgsql-persistent",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 8080
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 5,
+ "httpGet": {
+ "path": "/articles",
+ "port": 8080
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 3,
+ "initialDelaySeconds": 10,
+ "httpGet": {
+ "path": "/articles",
+ "port": 8080
+ }
+ },
+ "env": [
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "value": "${DATABASE_SERVICE_NAME}"
+ },
+ {
+ "name": "POSTGRESQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "SECRET_KEY_BASE",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "keybase"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "POSTGRESQL_MAX_CONNECTIONS",
+ "value": "${POSTGRESQL_MAX_CONNECTIONS}"
+ },
+ {
+ "name": "POSTGRESQL_SHARED_BUFFERS",
+ "value": "${POSTGRESQL_SHARED_BUFFERS}"
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "value": "${APPLICATION_DOMAIN}"
+ },
+ {
+ "name": "APPLICATION_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "application-user"
+ }
+ }
+ },
+ {
+ "name": "APPLICATION_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "application-password"
+ }
+ }
+ },
+ {
+ "name": "RAILS_ENV",
+ "value": "${RAILS_ENV}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "kind": "PersistentVolumeClaim",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "${VOLUME_CAPACITY}"
+ }
+ }
+ }
+ },
+ {
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Exposes the database server"
+ }
+ },
+ "spec": {
+ "ports": [
+ {
+ "name": "postgresql",
+ "port": 5432,
+ "targetPort": 5432
+ }
+ ],
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ },
+ {
+ "kind": "DeploymentConfig",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "annotations": {
+ "description": "Defines how to deploy the database"
+ }
+ },
+ "spec": {
+ "strategy": {
+ "type": "Recreate"
+ },
+ "triggers": [
+ {
+ "type": "ImageChange",
+ "imageChangeParams": {
+ "automatic": true,
+ "containerNames": [
+ "postgresql"
+ ],
+ "from": {
+ "kind": "ImageStreamTag",
+ "namespace": "${NAMESPACE}",
+ "name": "postgresql:9.5"
+ }
+ }
+ },
+ {
+ "type": "ConfigChange"
+ }
+ ],
+ "replicas": 1,
+ "selector": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ },
+ "template": {
+ "metadata": {
+ "name": "${DATABASE_SERVICE_NAME}",
+ "labels": {
+ "name": "${DATABASE_SERVICE_NAME}"
+ }
+ },
+ "spec": {
+ "volumes": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "persistentVolumeClaim": {
+ "claimName": "${DATABASE_SERVICE_NAME}"
+ }
+ }
+ ],
+ "containers": [
+ {
+ "name": "postgresql",
+ "image": " ",
+ "ports": [
+ {
+ "containerPort": 5432
+ }
+ ],
+ "readinessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 5,
+ "exec": {
+ "command": [ "/bin/sh", "-i", "-c", "psql -h 127.0.0.1 -U ${POSTGRESQL_USER} -q -d ${POSTGRESQL_DATABASE} -c 'SELECT 1'"]
+ }
+ },
+ "livenessProbe": {
+ "timeoutSeconds": 1,
+ "initialDelaySeconds": 30,
+ "tcpSocket": {
+ "port": 5432
+ }
+ },
+ "volumeMounts": [
+ {
+ "name": "${DATABASE_SERVICE_NAME}-data",
+ "mountPath": "/var/lib/pgsql/data"
+ }
+ ],
+ "env": [
+ {
+ "name": "POSTGRESQL_USER",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_PASSWORD",
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
+ },
+ {
+ "name": "POSTGRESQL_DATABASE",
+ "value": "${DATABASE_NAME}"
+ },
+ {
+ "name": "POSTGRESQL_MAX_CONNECTIONS",
+ "value": "${POSTGRESQL_MAX_CONNECTIONS}"
+ },
+ {
+ "name": "POSTGRESQL_SHARED_BUFFERS",
+ "value": "${POSTGRESQL_SHARED_BUFFERS}"
+ }
+ ],
+ "resources": {
+ "limits": {
+ "memory": "${MEMORY_POSTGRESQL_LIMIT}"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "parameters": [
+ {
+ "name": "NAME",
+ "displayName": "Name",
+ "description": "The name assigned to all of the frontend objects defined in this template.",
+ "required": true,
+ "value": "rails-pgsql-persistent"
+ },
+ {
+ "name": "NAMESPACE",
+ "displayName": "Namespace",
+ "required": true,
+ "description": "The OpenShift Namespace where the ImageStream resides.",
+ "value": "openshift"
+ },
+ {
+ "name": "MEMORY_LIMIT",
+ "displayName": "Memory Limit",
+ "required": true,
+ "description": "Maximum amount of memory the Rails container can use.",
+ "value": "512Mi"
+ },
+ {
+ "name": "MEMORY_POSTGRESQL_LIMIT",
+ "displayName": "Memory Limit (PostgreSQL)",
+ "required": true,
+ "description": "Maximum amount of memory the PostgreSQL container can use.",
+ "value": "512Mi"
+ },
+ {
+ "name": "VOLUME_CAPACITY",
+ "displayName": "Volume Capacity",
+ "description": "Volume space available for data, e.g. 512Mi, 2Gi",
+ "value": "1Gi",
+ "required": true
+ },
+ {
+ "name": "SOURCE_REPOSITORY_URL",
+ "displayName": "Git Repository URL",
+ "required": true,
+ "description": "The URL of the repository with your application source code.",
+ "value": "https://github.com/openshift/rails-ex.git"
+ },
+ {
+ "name": "SOURCE_REPOSITORY_REF",
+ "displayName": "Git Reference",
+ "description": "Set this to a branch name, tag or other ref of your repository if you are not using the default branch."
+ },
+ {
+ "name": "CONTEXT_DIR",
+ "displayName": "Context Directory",
+ "description": "Set this to the relative path to your project if it is not in the root of your repository."
+ },
+ {
+ "name": "APPLICATION_DOMAIN",
+ "displayName": "Application Hostname",
+ "description": "The exposed hostname that will route to the Rails service, if left blank a value will be defaulted.",
+ "value": ""
+ },
+ {
+ "name": "GITHUB_WEBHOOK_SECRET",
+ "displayName": "GitHub Webhook Secret",
+ "description": "A secret string used to configure the GitHub webhook.",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{40}"
+ },
+ {
+ "name": "SECRET_KEY_BASE",
+ "displayName": "Secret Key",
+ "description": "Your secret key for verifying the integrity of signed cookies.",
+ "generate": "expression",
+ "from": "[a-z0-9]{127}"
+ },
+ {
+ "name": "APPLICATION_USER",
+ "displayName": "Application Username",
+ "required": true,
+ "description": "The application user that is used within the sample application to authorize access on pages.",
+ "value": "openshift"
+ },
+ {
+ "name": "APPLICATION_PASSWORD",
+ "displayName": "Application Password",
+ "required": true,
+ "description": "The application password that is used within the sample application to authorize access on pages.",
+ "value": "secret"
+ },
+ {
+ "name": "RAILS_ENV",
+ "displayName": "Rails Environment",
+ "required": true,
+ "description": "Environment under which the sample application will run. Could be set to production, development or test.",
+ "value": "production"
+ },
+ {
+ "name": "DATABASE_SERVICE_NAME",
+ "required": true,
+ "displayName": "Database Service Name",
+ "value": "postgresql"
+ },
+ {
+ "name": "DATABASE_USER",
+ "displayName": "Database Username",
+ "generate": "expression",
+ "from": "user[A-Z0-9]{3}"
+ },
+ {
+ "name": "DATABASE_PASSWORD",
+ "displayName": "Database Password",
+ "generate": "expression",
+ "from": "[a-zA-Z0-9]{8}"
+ },
+ {
+ "name": "DATABASE_NAME",
+ "required": true,
+ "displayName": "Database Name",
+ "value": "root"
+ },
+ {
+ "name": "POSTGRESQL_MAX_CONNECTIONS",
+ "displayName": "Maximum Database Connections",
+ "value": "100"
+ },
+ {
+ "name": "POSTGRESQL_SHARED_BUFFERS",
+ "displayName": "Shared Buffer Amount",
+ "value": "12MB"
+ },
+ {
+ "name": "RUBYGEM_MIRROR",
+ "displayName": "Custom RubyGems Mirror URL",
+ "description": "The custom RubyGems mirror URL",
+ "value": ""
+ }
+ ]
+}
diff --git a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql.json b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql.json
index 6373562c4..043554c79 100644
--- a/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql.json
+++ b/roles/openshift_examples/files/examples/v1.4/quickstart-templates/rails-postgresql.json
@@ -22,11 +22,11 @@
"name": "${NAME}"
},
"stringData" : {
- "databaseUser" : "${DATABASE_USER}",
- "databasePassword" : "${DATABASE_PASSWORD}",
- "applicationUser" : "${APPLICATION_USER}",
- "applicationPassword" : "${APPLICATION_PASSWORD}",
- "keyBase" : "${SECRET_KEY_BASE}"
+ "database-user" : "${DATABASE_USER}",
+ "database-password" : "${DATABASE_PASSWORD}",
+ "application-user" : "${APPLICATION_USER}",
+ "application-password" : "${APPLICATION_PASSWORD}",
+ "keybase" : "${SECRET_KEY_BASE}"
}
},
{
@@ -104,8 +104,8 @@
},
"env": [
{
- "name": "RUBYGEM_MIRROR",
- "value": "${RUBYGEM_MIRROR}"
+ "name": "RUBYGEM_MIRROR",
+ "value": "${RUBYGEM_MIRROR}"
}
]
}
@@ -148,7 +148,7 @@
"strategy": {
"type": "Recreate",
"recreateParams": {
- "pre": {
+ "pre": {
"failurePolicy": "Abort",
"execNewPod": {
"command": [
@@ -224,7 +224,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databaseUser"
+ "key" : "database-user"
}
}
},
@@ -233,7 +233,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "databasePassword"
+ "key" : "database-password"
}
}
},
@@ -246,7 +246,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "keyBase"
+ "key" : "keybase"
}
}
},
@@ -267,7 +267,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "applicationUser"
+ "key" : "application-user"
}
}
},
@@ -276,7 +276,7 @@
"valueFrom": {
"secretKeyRef" : {
"name" : "${NAME}",
- "key" : "applicationPassword"
+ "key" : "application-password"
}
}
},
@@ -286,9 +286,9 @@
}
],
"resources": {
- "limits": {
- "memory": "${MEMORY_LIMIT}"
- }
+ "limits": {
+ "memory": "${MEMORY_LIMIT}"
+ }
}
}
]
@@ -400,11 +400,21 @@
"env": [
{
"name": "POSTGRESQL_USER",
- "value": "${DATABASE_USER}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-user"
+ }
+ }
},
{
"name": "POSTGRESQL_PASSWORD",
- "value": "${DATABASE_PASSWORD}"
+ "valueFrom": {
+ "secretKeyRef" : {
+ "name" : "${NAME}",
+ "key" : "database-password"
+ }
+ }
},
{
"name": "POSTGRESQL_DATABASE",
@@ -420,9 +430,9 @@
}
],
"resources": {
- "limits": {
- "memory": "${MEMORY_POSTGRESQL_LIMIT}"
- }
+ "limits": {
+ "memory": "${MEMORY_POSTGRESQL_LIMIT}"
+ }
}
}
]
diff --git a/roles/openshift_excluder/README.md b/roles/openshift_excluder/README.md
new file mode 100644
index 000000000..6c90b4e96
--- /dev/null
+++ b/roles/openshift_excluder/README.md
@@ -0,0 +1,44 @@
+OpenShift Excluder
+================
+
+Manages the excluder packages which add yum and dnf exclusions ensuring that
+the packages we care about are not inadvertantly updated. See
+https://github.com/openshift/origin/tree/master/contrib/excluder
+
+Requirements
+------------
+openshift_facts
+
+
+Facts
+-----
+
+| Name | Default Value | Description |
+-----------------------------|---------------|----------------------------------------|
+| docker_excluder_enabled | none | Records the status of docker excluder |
+| openshift_excluder_enabled | none | Records the status of the openshift excluder |
+
+Role Variables
+--------------
+None
+
+Dependencies
+------------
+
+Example Playbook
+----------------
+
+
+TODO
+----
+It should be possible to manage the two excluders independently though that's not a hard requirement. However it should be done to manage docker on RHEL Containerized hosts.
+
+License
+-------
+
+Apache License, Version 2.0
+
+Author Information
+------------------
+
+Scott Dodson (sdodson@redhat.com)
diff --git a/roles/openshift_excluder/meta/main.yml b/roles/openshift_excluder/meta/main.yml
new file mode 100644
index 000000000..8bca38e77
--- /dev/null
+++ b/roles/openshift_excluder/meta/main.yml
@@ -0,0 +1,15 @@
+---
+galaxy_info:
+ author: Scott Dodson
+ description: OpenShift Examples
+ company: Red Hat, Inc.
+ license: Apache License, Version 2.0
+ min_ansible_version: 2.2
+ platforms:
+ - name: EL
+ versions:
+ - 7
+ categories:
+ - cloud
+dependencies:
+- { role: openshift_facts }
diff --git a/roles/openshift_excluder/tasks/exclude.yml b/roles/openshift_excluder/tasks/exclude.yml
new file mode 100644
index 000000000..570183aef
--- /dev/null
+++ b/roles/openshift_excluder/tasks/exclude.yml
@@ -0,0 +1,11 @@
+---
+- include: install.yml
+ when: not openshift.common.is_containerized | bool
+
+- name: Enable docker excluder
+ command: "{{ openshift.common.service_type }}-docker-excluder exclude"
+ when: not openshift.common.is_containerized | bool
+
+- name: Enable excluder
+ command: "{{ openshift.common.service_type }}-excluder exclude"
+ when: not openshift.common.is_containerized | bool
diff --git a/roles/openshift_excluder/tasks/install.yml b/roles/openshift_excluder/tasks/install.yml
new file mode 100644
index 000000000..ee4cb2c05
--- /dev/null
+++ b/roles/openshift_excluder/tasks/install.yml
@@ -0,0 +1,16 @@
+---
+- name: Install latest excluder
+ package:
+ name: "{{ openshift.common.service_type }}-excluder"
+ state: latest
+ when:
+ - openshift_excluder_enabled | default(false) | bool
+ - not openshift.common.is_containerized | bool
+
+- name: Install latest docker excluder
+ package:
+ name: "{{ openshift.common.service_type }}-excluder"
+ state: latest
+ when:
+ - docker_excluder_enabled | default(false) | bool
+ - not openshift.common.is_containerized | bool
diff --git a/roles/openshift_excluder/tasks/main.yml b/roles/openshift_excluder/tasks/main.yml
new file mode 100644
index 000000000..78a3d37cb
--- /dev/null
+++ b/roles/openshift_excluder/tasks/main.yml
@@ -0,0 +1,2 @@
+---
+include: status.yml
diff --git a/roles/openshift_excluder/tasks/reset.yml b/roles/openshift_excluder/tasks/reset.yml
new file mode 100644
index 000000000..486a23fd0
--- /dev/null
+++ b/roles/openshift_excluder/tasks/reset.yml
@@ -0,0 +1,12 @@
+---
+- name: Enable docker excluder
+ command: "{{ openshift.common.service_type }}-docker-excluder exclude"
+ when:
+ - docker_excluder_enabled | default(false) | bool
+ - not openshift.common.is_containerized | bool
+
+- name: Enable excluder
+ command: "{{ openshift.common.service_type }}-excluder exclude"
+ when:
+ - openshift_excluder_enabled | default(false) | bool
+ - not openshift.common.is_containerized | bool
diff --git a/roles/openshift_excluder/tasks/status.yml b/roles/openshift_excluder/tasks/status.yml
new file mode 100644
index 000000000..6ef4af22d
--- /dev/null
+++ b/roles/openshift_excluder/tasks/status.yml
@@ -0,0 +1,56 @@
+---
+# Latest versions of the excluders include a status function, old packages dont
+# So, if packages are installed, upgrade them to the latest so we get the status
+# If they're not installed when we should assume they're disabled
+
+- name: Determine if excluder packages are installed
+ rpm_q:
+ name: "{{ openshift.common.service_type }}-excluder"
+ state: present
+ register: openshift_excluder_installed
+ failed_when: false
+
+- name: Determine if docker packages are installed
+ rpm_q:
+ name: "{{ openshift.common.service_type }}-excluder"
+ state: present
+ register: docker_excluder_installed
+ failed_when: false
+
+- name: Update to latest excluder packages
+ package:
+ name: "{{ openshift.common.service_type }}-excluder"
+ when:
+ - "{{ openshift_excluder_installed.installed_versions | default([]) | length > 0 }}"
+ - not openshift.common.is_containerized | bool
+
+- name: Update to the latest docker-excluder packages
+ package:
+ name: "{{ openshift.common.service_type }}-docker-excluder"
+ when:
+ - "{{ docker_excluder_installed.installed_versions | default([]) | length > 0 }}"
+ - not openshift.common.is_containerized | bool
+
+- name: Record excluder status
+ command: "{{ openshift.common.service_type }}-excluder"
+ register: excluder_status
+ when:
+ - "{{ openshift_excluder_installed.installed_versions | default([]) | length > 0 }}"
+ - not openshift.common.is_containerized | bool
+ failed_when: false
+
+- name: Record docker excluder status
+ command: "{{ openshift.common.service_type }}-docker-excluder"
+ register: docker_excluder_status
+ when:
+ - "{{ docker_excluder_installed.installed_versions | default([]) | length > 0 }}"
+ - not openshift.common.is_containerized | bool
+ failed_when: false
+
+- name: Set excluder status facts
+ set_fact:
+ docker_excluder_enabled: "{{ 'false' if docker_excluder_status.rc | default(0) == 0 or docker_excluder_installed.installed_versions | default(0) | length == 0 else 'true' }}"
+ openshift_excluder_enabled: "{{ 'false' if docker_excluder_status.rc | default(0) == 0 or openshift_excluder_installed.installed_versions | default(0) | length == 0 else 'true' }}"
+
+- debug: var=docker_excluder_enabled
+- debug: var=openshift_excluder_enabled
diff --git a/roles/openshift_excluder/tasks/unexclude.yml b/roles/openshift_excluder/tasks/unexclude.yml
new file mode 100644
index 000000000..38f0759aa
--- /dev/null
+++ b/roles/openshift_excluder/tasks/unexclude.yml
@@ -0,0 +1,12 @@
+---
+- name: disable docker excluder
+ command: "{{ openshift.common.service_type }}-docker-excluder unexclude"
+ when:
+ - docker_excluder_enabled | bool
+ - not openshift.common.is_containerized | bool
+
+- name: disable excluder
+ command: "{{ openshift.common.service_type }}-excluder unexclude"
+ when:
+ - openshift_excluder_enabled | bool
+ - not openshift.common.is_containerized | bool
diff --git a/roles/openshift_facts/library/openshift_facts.py b/roles/openshift_facts/library/openshift_facts.py
index e72ab26fc..ec2942b69 100755
--- a/roles/openshift_facts/library/openshift_facts.py
+++ b/roles/openshift_facts/library/openshift_facts.py
@@ -1382,7 +1382,8 @@ def get_container_openshift_version(facts):
tag = line[len("IMAGE_VERSION="):].strip()
# Remove leading "v" and any trailing release info, we just want
# a version number here:
- version = tag[1:].split("-")[0]
+ no_v_version = tag[1:] if tag[0] == 'v' else tag
+ version = no_v_version.split("-")[0]
return version
return None
diff --git a/roles/openshift_hosted_templates/files/v1.4/enterprise/registry-console.yaml b/roles/openshift_hosted_templates/files/v1.4/enterprise/registry-console.yaml
index 11478263c..0e3d006a7 100644
--- a/roles/openshift_hosted_templates/files/v1.4/enterprise/registry-console.yaml
+++ b/roles/openshift_hosted_templates/files/v1.4/enterprise/registry-console.yaml
@@ -103,9 +103,9 @@ parameters:
- description: 'Specify "registry/repository" prefix for container image; e.g. for "registry.access.redhat.com/openshift3/registry-console:latest", set prefix "registry.access.redhat.com/openshift3/"'
name: IMAGE_PREFIX
value: "registry.access.redhat.com/openshift3/"
- - description: 'Specify image version; e.g. for "registry.access.redhat.com/openshift3/registry-console:3.3", set version "3.3"'
+ - description: 'Specify image version; e.g. for "registry.access.redhat.com/openshift3/registry-console:3.4", set version "3.4"'
name: IMAGE_VERSION
- value: "3.3"
+ value: "3.4"
- description: "The public URL for the Openshift OAuth Provider, e.g. https://openshift.example.com:8443"
name: OPENSHIFT_OAUTH_PROVIDER_URL
required: true
diff --git a/roles/openshift_hosted_templates/files/v1.5/enterprise/registry-console.yaml b/roles/openshift_hosted_templates/files/v1.5/enterprise/registry-console.yaml
index 11478263c..28feac4e6 100644
--- a/roles/openshift_hosted_templates/files/v1.5/enterprise/registry-console.yaml
+++ b/roles/openshift_hosted_templates/files/v1.5/enterprise/registry-console.yaml
@@ -103,9 +103,9 @@ parameters:
- description: 'Specify "registry/repository" prefix for container image; e.g. for "registry.access.redhat.com/openshift3/registry-console:latest", set prefix "registry.access.redhat.com/openshift3/"'
name: IMAGE_PREFIX
value: "registry.access.redhat.com/openshift3/"
- - description: 'Specify image version; e.g. for "registry.access.redhat.com/openshift3/registry-console:3.3", set version "3.3"'
+ - description: 'Specify image version; e.g. for "registry.access.redhat.com/openshift3/registry-console:3.5", set version "3.5"'
name: IMAGE_VERSION
- value: "3.3"
+ value: "3.5"
- description: "The public URL for the Openshift OAuth Provider, e.g. https://openshift.example.com:8443"
name: OPENSHIFT_OAUTH_PROVIDER_URL
required: true
diff --git a/roles/openshift_logging/README.md b/roles/openshift_logging/README.md
index 8651e06e7..9394977c0 100644
--- a/roles/openshift_logging/README.md
+++ b/roles/openshift_logging/README.md
@@ -25,7 +25,7 @@ When both `openshift_logging_install_logging` and `openshift_logging_upgrade_log
- `openshift_logging_image_version`: The image version for the logging images to use. Defaults to 'latest'.
- `openshift_logging_use_ops`: If 'True', set up a second ES and Kibana cluster for infrastructure logs. Defaults to 'False'.
- `master_url`: The URL for the Kubernetes master, this does not need to be public facing but should be accessible from within the cluster. Defaults to 'https://kubernetes.default.svc.cluster.local'.
-- `public_master_url`: The public facing URL for the Kubernetes master, this is used for Authentication redirection. Defaults to 'https://localhost:8443'.
+- `openshift_logging_master_public_url`: The public facing URL for the Kubernetes master, this is used for Authentication redirection. Defaults to 'https://localhost:8443'.
- `openshift_logging_namespace`: The namespace that Aggregated Logging will be installed in. Defaults to 'logging'.
- `openshift_logging_curator_default_days`: The default minimum age (in days) Curator uses for deleting log records. Defaults to '30'.
- `openshift_logging_curator_run_hour`: The hour of the day that Curator will run at. Defaults to '0'.
diff --git a/roles/openshift_logging/defaults/main.yml b/roles/openshift_logging/defaults/main.yml
index 919c53787..ead59c029 100644
--- a/roles/openshift_logging/defaults/main.yml
+++ b/roles/openshift_logging/defaults/main.yml
@@ -3,7 +3,7 @@ openshift_logging_image_prefix: docker.io/openshift/origin-
openshift_logging_image_version: latest
openshift_logging_use_ops: False
master_url: "https://kubernetes.default.svc.{{ openshift.common.dns_domain }}"
-public_master_url: "https://{{openshift.common.public_hostname}}:8443"
+openshift_logging_master_public_url: "https://{{openshift.common.public_hostname}}:8443"
openshift_logging_namespace: logging
openshift_logging_install_logging: True
diff --git a/roles/openshift_logging/templates/kibana.j2 b/roles/openshift_logging/templates/kibana.j2
index b42f62850..3a9e03768 100644
--- a/roles/openshift_logging/templates/kibana.j2
+++ b/roles/openshift_logging/templates/kibana.j2
@@ -93,10 +93,10 @@ spec:
value: {{master_url}}
-
name: "OAP_PUBLIC_MASTER_URL"
- value: {{public_master_url}}
+ value: {{openshift_logging_master_public_url}}
-
name: "OAP_LOGOUT_REDIRECT"
- value: {{public_master_url}}/console/logout
+ value: {{openshift_logging_master_public_url}}/console/logout
-
name: "OAP_MASTER_CA_FILE"
value: "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
diff --git a/roles/openshift_metrics/defaults/main.yaml b/roles/openshift_metrics/defaults/main.yaml
index b99adf779..dd5a20d5b 100644
--- a/roles/openshift_metrics/defaults/main.yaml
+++ b/roles/openshift_metrics/defaults/main.yaml
@@ -13,6 +13,7 @@ openshift_metrics_hawkular_requests_cpu: null
openshift_metrics_hawkular_cert: ""
openshift_metrics_hawkular_key: ""
openshift_metrics_hawkular_ca: ""
+openshift_metrics_hawkular_nodeselector: ""
openshift_metrics_cassandra_replicas: 1
openshift_metrics_cassandra_storage_type: emptydir
@@ -21,12 +22,14 @@ openshift_metrics_cassandra_limits_memory: 2G
openshift_metrics_cassandra_limits_cpu: null
openshift_metrics_cassandra_requests_memory: 1G
openshift_metrics_cassandra_requests_cpu: null
+openshift_metrics_cassandra_nodeselector: ""
openshift_metrics_heapster_standalone: False
openshift_metrics_heapster_limits_memory: 3.75G
openshift_metrics_heapster_limits_cpu: null
openshift_metrics_heapster_requests_memory: 0.9375G
openshift_metrics_heapster_requests_cpu: null
+openshift_metrics_heapster_nodeselector: ""
openshift_metrics_duration: 7
openshift_metrics_resolution: 15s
diff --git a/roles/openshift_metrics/tasks/install_cassandra.yaml b/roles/openshift_metrics/tasks/install_cassandra.yaml
index a9340acc3..9e25071af 100644
--- a/roles/openshift_metrics/tasks/install_cassandra.yaml
+++ b/roles/openshift_metrics/tasks/install_cassandra.yaml
@@ -18,6 +18,7 @@
node: "{{ item }}"
master: "{{ (item == '1')|string|lower }}"
replica_count: "{{cassandra_replica_count.results[item|int - 1].stdout}}"
+ node_selector: "{{openshift_metrics_cassandra_nodeselector | default('') }}"
with_sequence: count={{ openshift_metrics_cassandra_replicas }}
changed_when: false
diff --git a/roles/openshift_metrics/tasks/install_hawkular.yaml b/roles/openshift_metrics/tasks/install_hawkular.yaml
index 00f7b2554..1ba11efa8 100644
--- a/roles/openshift_metrics/tasks/install_hawkular.yaml
+++ b/roles/openshift_metrics/tasks/install_hawkular.yaml
@@ -13,6 +13,7 @@
dest: "{{ mktemp.stdout }}/templates/hawkular_metrics_rc.yaml"
vars:
replica_count: "{{hawkular_metrics_replica_count.stdout | default(0)}}"
+ node_selector: "{{openshift_metrics_hawkular_nodeselector | default('') }}"
changed_when: false
- name: read hawkular-metrics route destination ca certificate
diff --git a/roles/openshift_metrics/tasks/install_heapster.yaml b/roles/openshift_metrics/tasks/install_heapster.yaml
index 39df797ab..44bab8ace 100644
--- a/roles/openshift_metrics/tasks/install_heapster.yaml
+++ b/roles/openshift_metrics/tasks/install_heapster.yaml
@@ -11,4 +11,5 @@
template: src=heapster.j2 dest={{mktemp.stdout}}/templates/metrics-heapster-rc.yaml
vars:
replica_count: "{{heapster_replica_count.stdout | default(0)}}"
+ node_selector: "{{openshift_metrics_heapster_nodeselector | default('') }}"
changed_when: no
diff --git a/roles/openshift_metrics/templates/hawkular_cassandra_rc.j2 b/roles/openshift_metrics/templates/hawkular_cassandra_rc.j2
index abd4ff939..876cb1915 100644
--- a/roles/openshift_metrics/templates/hawkular_cassandra_rc.j2
+++ b/roles/openshift_metrics/templates/hawkular_cassandra_rc.j2
@@ -19,6 +19,12 @@ spec:
type: hawkular-cassandra
spec:
serviceAccount: cassandra
+{% if node_selector is iterable and node_selector | length > 0 %}
+ nodeSelector:
+{% for key, value in node_selector.iteritems() %}
+ {{key}}: "{{value}}"
+{% endfor %}
+{% endif %}
containers:
- image: "{{ openshift_metrics_image_prefix }}metrics-cassandra:{{ openshift_metrics_image_version }}"
name: hawkular-cassandra-{{ node }}
diff --git a/roles/openshift_metrics/templates/hawkular_metrics_rc.j2 b/roles/openshift_metrics/templates/hawkular_metrics_rc.j2
index f78621674..d39f1b43a 100644
--- a/roles/openshift_metrics/templates/hawkular_metrics_rc.j2
+++ b/roles/openshift_metrics/templates/hawkular_metrics_rc.j2
@@ -17,6 +17,12 @@ spec:
name: hawkular-metrics
spec:
serviceAccount: hawkular
+{% if node_selector is iterable and node_selector | length > 0 %}
+ nodeSelector:
+{% for key, value in node_selector.iteritems() %}
+ {{key}}: "{{value}}"
+{% endfor %}
+{% endif %}
containers:
- image: {{openshift_metrics_image_prefix}}metrics-hawkular-metrics:{{openshift_metrics_image_version}}
name: hawkular-metrics
diff --git a/roles/openshift_metrics/templates/heapster.j2 b/roles/openshift_metrics/templates/heapster.j2
index eeca03be0..7c837db4d 100644
--- a/roles/openshift_metrics/templates/heapster.j2
+++ b/roles/openshift_metrics/templates/heapster.j2
@@ -18,6 +18,12 @@ spec:
name: heapster
spec:
serviceAccountName: heapster
+{% if node_selector is iterable and node_selector | length > 0 %}
+ nodeSelector:
+{% for key, value in node_selector.iteritems() %}
+ {{key}}: "{{value}}"
+{% endfor %}
+{% endif %}
containers:
- name: heapster
image: {{openshift_metrics_image_prefix}}metrics-heapster:{{openshift_metrics_image_version}}