summaryrefslogtreecommitdiffstats
path: root/playbooks
diff options
context:
space:
mode:
Diffstat (limited to 'playbooks')
-rw-r--r--playbooks/adhoc/uninstall.yml73
-rw-r--r--playbooks/aws/openshift-cluster/hosted.yml22
-rw-r--r--playbooks/aws/openshift-cluster/install.yml28
-rw-r--r--playbooks/aws/openshift-cluster/provision_install.yml7
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/README.md3
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml7
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml16
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml9
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_4/README.md18
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml9
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/README.md18
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml7
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml16
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_7/README.md2
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_8/README.md (renamed from playbooks/byo/openshift-cluster/upgrades/v3_3/README.md)12
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade.yml (renamed from playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml)2
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml (renamed from playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml)2
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml (renamed from playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml)2
-rw-r--r--playbooks/common/openshift-checks/install.yml16
-rw-r--r--playbooks/common/openshift-cluster/openshift_hosted.yml16
-rw-r--r--playbooks/common/openshift-cluster/openshift_logging.yml16
-rw-r--r--playbooks/common/openshift-cluster/openshift_metrics.yml16
-rw-r--r--playbooks/common/openshift-cluster/openshift_prometheus.yml16
-rw-r--r--playbooks/common/openshift-cluster/service_catalog.yml16
-rw-r--r--playbooks/common/openshift-cluster/std_include.yml16
-rw-r--r--playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml36
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_3/master_config_upgrade.yml66
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_3/node_config_upgrade.yml20
l---------playbooks/common/openshift-cluster/upgrades/v3_3/roles1
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_3/upgrade.yml118
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml118
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml113
l---------playbooks/common/openshift-cluster/upgrades/v3_4/roles1
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_4/upgrade.yml116
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml118
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml111
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_5/master_config_upgrade.yml10
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_5/validator.yml67
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_7/validator.yml8
l---------playbooks/common/openshift-cluster/upgrades/v3_8/filter_plugins (renamed from playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins)0
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_8/master_config_upgrade.yml (renamed from playbooks/common/openshift-cluster/upgrades/v3_4/master_config_upgrade.yml)10
l---------playbooks/common/openshift-cluster/upgrades/v3_8/roles (renamed from playbooks/common/openshift-cluster/upgrades/v3_5/roles)0
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_8/upgrade.yml (renamed from playbooks/common/openshift-cluster/upgrades/v3_5/upgrade.yml)28
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml (renamed from playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml)28
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml (renamed from playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml)8
-rw-r--r--playbooks/common/openshift-cluster/upgrades/v3_8/validator.yml7
-rw-r--r--playbooks/common/openshift-etcd/config.yml16
-rw-r--r--playbooks/common/openshift-glusterfs/config.yml16
-rw-r--r--playbooks/common/openshift-loadbalancer/config.yml16
-rw-r--r--playbooks/common/openshift-management/config.yml16
-rw-r--r--playbooks/common/openshift-master/additional_config.yml16
-rw-r--r--playbooks/common/openshift-master/config.yml16
-rw-r--r--playbooks/common/openshift-master/files/openshift-ansible-catalog-console.js2
-rw-r--r--playbooks/common/openshift-master/tasks/wire_aggregator.yml18
-rw-r--r--playbooks/common/openshift-master/templates/openshift-ansible-catalog-console.js1
-rw-r--r--playbooks/common/openshift-nfs/config.yml16
-rw-r--r--playbooks/common/openshift-node/config.yml16
57 files changed, 381 insertions, 1117 deletions
diff --git a/playbooks/adhoc/uninstall.yml b/playbooks/adhoc/uninstall.yml
index 07f10d48c..5ed55a817 100644
--- a/playbooks/adhoc/uninstall.yml
+++ b/playbooks/adhoc/uninstall.yml
@@ -151,6 +151,14 @@
- lbr0
- vlinuxbr
- vovsbr
+
+ - name: Remove virtual devices
+ command: nmcli delete device "{{ item }}"
+ failed_when: False
+ with_items:
+ - tun0
+ - docker0
+
when: openshift_remove_all | default(true) | bool
- shell: atomic uninstall "{{ item }}"-master-api
@@ -264,12 +272,30 @@
- "{{ directories.results | default([]) }}"
- files
+ - shell: systemctl daemon-reload
+ changed_when: False
+
+ - name: restart container-engine
+ service: name=container-engine state=stopped enabled=no
+ failed_when: false
+ register: container_engine
+
+ - name: restart docker
+ service: name=docker state=stopped enabled=no
+ failed_when: false
+ when: not (container_engine | changed)
+ register: l_docker_restart_docker_in_pb_result
+ until: not l_docker_restart_docker_in_pb_result | failed
+ retries: 3
+ delay: 30
+
- name: Remove remaining files
file: path={{ item }} state=absent
with_items:
- /etc/ansible/facts.d/openshift.fact
- /etc/openshift
- /etc/openshift-sdn
+ - /etc/pki/ca-trust/source/anchors/openshift-ca.crt
- /etc/sysconfig/atomic-openshift-node
- /etc/sysconfig/atomic-openshift-node-dep
- /etc/sysconfig/openshift-node-dep
@@ -284,23 +310,38 @@
- /etc/systemd/system/origin-node-dep.service
- /etc/systemd/system/origin-node.service
- /etc/systemd/system/origin-node.service.wants
+ - /var/lib/docker
+
+ - name: Rebuild ca-trust
+ command: update-ca-trust
+
+ - name: Reset Docker proxy configuration
+ lineinfile:
+ state=absent
+ dest=/etc/sysconfig/docker
+ regexp='(NO_PROXY|HTTP_PROXY|HTTPS_PROXY)=.*'
+
+ - name: Reset Docker registry configuration
+ lineinfile:
+ state=absent
+ dest=/etc/sysconfig/docker
+ regexp='(ADD_REGISTRY|BLOCK_REGISTRY|INSECURE_REGISTRY)=.*'
+
+ - name: Detect Docker storage configuration
+ shell: vgs -o name | grep docker
+ register: docker_vg_name
+ failed_when: false
+ changed_when: false
- - shell: systemctl daemon-reload
- changed_when: False
+ - name: Wipe out Docker storage contents
+ command: vgremove -f {{ item }}
+ with_items: "{{ docker_vg_name.stdout_lines }}"
+ when: docker_vg_name.rc == 0
- - name: restart container-engine
- service: name=container-engine state=restarted
- failed_when: false
- register: container_engine
+ - name: Wipe out Docker storage configuration
+ file: path=/etc/sysconfig/docker-storage state=absent
+ when: docker_vg_name.rc == 0
- - name: restart docker
- service: name=docker state=restarted
- failed_when: false
- when: not (container_engine | changed)
- register: l_docker_restart_docker_in_pb_result
- until: not l_docker_restart_docker_in_pb_result | failed
- retries: 3
- delay: 30
- hosts: masters
become: yes
@@ -525,3 +566,7 @@
with_items:
- /etc/ansible/facts.d/openshift.fact
- /var/lib/haproxy/stats
+ # Here we remove only limits.conf rather than directory, as users may put their files.
+ # - /etc/systemd/system/haproxy.service.d
+ - /etc/systemd/system/haproxy.service.d/limits.conf
+ - /etc/systemd/system/haproxy.service
diff --git a/playbooks/aws/openshift-cluster/hosted.yml b/playbooks/aws/openshift-cluster/hosted.yml
new file mode 100644
index 000000000..db6e3b8e1
--- /dev/null
+++ b/playbooks/aws/openshift-cluster/hosted.yml
@@ -0,0 +1,22 @@
+---
+- include: ../../common/openshift-cluster/openshift_hosted.yml
+
+- include: ../../common/openshift-cluster/openshift_metrics.yml
+ when: openshift_metrics_install_metrics | default(false) | bool
+
+- include: ../../common/openshift-cluster/openshift_logging.yml
+ when: openshift_logging_install_logging | default(false) | bool
+
+- include: ../../common/openshift-cluster/service_catalog.yml
+ when: openshift_enable_service_catalog | default(false) | bool
+
+- include: ../../common/openshift-management/config.yml
+ when: openshift_management_install_management | default(false) | bool
+
+- name: Print deprecated variable warning message if necessary
+ hosts: oo_first_master
+ gather_facts: no
+ tasks:
+ - debug: msg="{{__deprecation_message}}"
+ when:
+ - __deprecation_message | default ('') | length > 0
diff --git a/playbooks/aws/openshift-cluster/install.yml b/playbooks/aws/openshift-cluster/install.yml
index 4d0bf9531..1e8118490 100644
--- a/playbooks/aws/openshift-cluster/install.yml
+++ b/playbooks/aws/openshift-cluster/install.yml
@@ -21,5 +21,29 @@
- name: run the std_include
include: ../../common/openshift-cluster/std_include.yml
-- name: run the config
- include: ../../common/openshift-cluster/config.yml
+- name: perform the installer openshift-checks
+ include: ../../common/openshift-checks/install.yml
+
+- name: etcd install
+ include: ../../common/openshift-etcd/config.yml
+
+- name: include nfs
+ include: ../../common/openshift-nfs/config.yml
+ when: groups.oo_nfs_to_config | default([]) | count > 0
+
+- name: include loadbalancer
+ include: ../../common/openshift-loadbalancer/config.yml
+ when: groups.oo_lb_to_config | default([]) | count > 0
+
+- name: include openshift-master config
+ include: ../../common/openshift-master/config.yml
+
+- name: include master additional config
+ include: ../../common/openshift-master/additional_config.yml
+
+- name: include master additional config
+ include: ../../common/openshift-node/config.yml
+
+- name: include openshift-glusterfs
+ include: ../../common/openshift-glusterfs/config.yml
+ when: groups.oo_glusterfs_to_config | default([]) | count > 0
diff --git a/playbooks/aws/openshift-cluster/provision_install.yml b/playbooks/aws/openshift-cluster/provision_install.yml
index e787deced..78dd6a49b 100644
--- a/playbooks/aws/openshift-cluster/provision_install.yml
+++ b/playbooks/aws/openshift-cluster/provision_install.yml
@@ -6,11 +6,14 @@
- name: Include the provision.yml playbook to create cluster
include: provision.yml
-- name: Include the install.yml playbook to install cluster
+- name: Include the install.yml playbook to install cluster on masters
include: install.yml
-- name: Include the install.yml playbook to install cluster
+- name: provision the infra/compute playbook to install node resources
include: provision_nodes.yml
- name: Include the accept.yml playbook to accept nodes into the cluster
include: accept.yml
+
+- name: Include the hosted.yml playbook to finish the hosted configuration
+ include: hosted.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/README.md b/playbooks/byo/openshift-cluster/upgrades/README.md
index 0f64f40f3..d9b1fc2ca 100644
--- a/playbooks/byo/openshift-cluster/upgrades/README.md
+++ b/playbooks/byo/openshift-cluster/upgrades/README.md
@@ -4,6 +4,5 @@ cluster. Additional notes for the associated upgrade playbooks are
provided in their respective directories.
# Upgrades available
+- [OpenShift Container Platform 3.6 to 3.7](v3_7/README.md) (works also to upgrade OpenShift Origin from 3.6.x to 3.7.x)
- [OpenShift Container Platform 3.5 to 3.6](v3_6/README.md) (works also to upgrade OpenShift Origin from 1.5.x to 3.6.x)
-- [OpenShift Container Platform 3.4 to 3.5](v3_5/README.md) (works also to upgrade OpenShift Origin from 1.4.x to 1.5.x)
-- [OpenShift Container Platform 3.3 to 3.4](v3_4/README.md) (works also to upgrade OpenShift Origin from 1.3.x to 1.4.x)
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
deleted file mode 100644
index 697a18c4d..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-#
-# Full Control Plane + Nodes Upgrade
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_3/upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
deleted file mode 100644
index 4d284c279..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-#
-# Control Plane Upgrade Playbook
-#
-# Upgrades masters and Docker (only on standalone etcd hosts)
-#
-# This upgrade does not include:
-# - node service running on masters
-# - docker running on masters
-# - node service running on dedicated nodes
-#
-# You can run the upgrade_nodes.yml playbook after this to upgrade these components separately.
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
deleted file mode 100644
index 180a2821f..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-#
-# Node Upgrade Playbook
-#
-# Upgrades nodes only, but requires the control plane to have already been upgraded.
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/README.md b/playbooks/byo/openshift-cluster/upgrades/v3_4/README.md
deleted file mode 100644
index 85b807dc6..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# v3.4 Major and Minor Upgrade Playbook
-
-## Overview
-This playbook currently performs the
-following steps.
-
- * Upgrade and restart master services
- * Unschedule node.
- * Upgrade and restart docker
- * Upgrade and restart node services
- * Modifies the subset of the configuration necessary
- * Applies the latest cluster policies
- * Updates the default router if one exists
- * Updates the default registry if one exists
- * Updates image streams and quickstarts
-
-## Usage
-ansible-playbook -i ~/ansible-inventory openshift-ansible/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
deleted file mode 100644
index d5329b858..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-#
-# Node Upgrade Playbook
-#
-# Upgrades nodes only, but requires the control plane to have already been upgraded.
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/README.md b/playbooks/byo/openshift-cluster/upgrades/v3_5/README.md
deleted file mode 100644
index 53eebe65e..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# v3.5 Major and Minor Upgrade Playbook
-
-## Overview
-This playbook currently performs the
-following steps.
-
- * Upgrade and restart master services
- * Unschedule node.
- * Upgrade and restart docker
- * Upgrade and restart node services
- * Modifies the subset of the configuration necessary
- * Applies the latest cluster policies
- * Updates the default router if one exists
- * Updates the default registry if one exists
- * Updates image streams and quickstarts
-
-## Usage
-ansible-playbook -i ~/ansible-inventory openshift-ansible/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
deleted file mode 100644
index f44d55ad2..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-#
-# Full Control Plane + Nodes Upgrade
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_5/upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
deleted file mode 100644
index 2377713fa..000000000
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-#
-# Control Plane Upgrade Playbook
-#
-# Upgrades masters and Docker (only on standalone etcd hosts)
-#
-# This upgrade does not include:
-# - node service running on masters
-# - docker running on masters
-# - node service running on dedicated nodes
-#
-# You can run the upgrade_nodes.yml playbook after this to upgrade these components separately.
-#
-- include: ../../initialize_groups.yml
-
-- include: ../../../../common/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_7/README.md b/playbooks/byo/openshift-cluster/upgrades/v3_7/README.md
index 4bf53be81..914e0f5b2 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_7/README.md
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_7/README.md
@@ -1,4 +1,4 @@
-# v3.6 Major and Minor Upgrade Playbook
+# v3.7 Major and Minor Upgrade Playbook
## Overview
This playbook currently performs the following steps.
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_3/README.md b/playbooks/byo/openshift-cluster/upgrades/v3_8/README.md
index 6892f6324..d9be6ae3b 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_3/README.md
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_8/README.md
@@ -1,11 +1,10 @@
-# v3.3 Major and Minor Upgrade Playbook
+# v3.6 Major and Minor Upgrade Playbook
## Overview
-This playbook currently performs the
-following steps.
+This playbook currently performs the following steps.
* Upgrade and restart master services
- * Unschedule node.
+ * Unschedule node
* Upgrade and restart docker
* Upgrade and restart node services
* Modifies the subset of the configuration necessary
@@ -15,4 +14,7 @@ following steps.
* Updates image streams and quickstarts
## Usage
-ansible-playbook -i ~/ansible-inventory openshift-ansible/playbooks/byo/openshift-cluster/upgrades/v3_3/upgrade.yml
+
+```
+ansible-playbook -i ~/ansible-inventory openshift-ansible/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade.yml
+```
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade.yml
index 8cce91b3f..3d4e6a790 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade.yml
@@ -4,4 +4,4 @@
#
- include: ../../initialize_groups.yml
-- include: ../../../../common/openshift-cluster/upgrades/v3_4/upgrade.yml
+- include: ../../../../common/openshift-cluster/upgrades/v3_8/upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml
index 8e5d0f5f9..d83305119 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml
@@ -13,4 +13,4 @@
#
- include: ../../initialize_groups.yml
-- include: ../../../../common/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
+- include: ../../../../common/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml
index 5b3f6ab06..a972bb7a6 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml
@@ -6,4 +6,4 @@
#
- include: ../../initialize_groups.yml
-- include: ../../../../common/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
+- include: ../../../../common/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml
diff --git a/playbooks/common/openshift-checks/install.yml b/playbooks/common/openshift-checks/install.yml
index 6701a2e15..93cf6c359 100644
--- a/playbooks/common/openshift-checks/install.yml
+++ b/playbooks/common/openshift-checks/install.yml
@@ -1,13 +1,15 @@
---
- name: Health Check Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Health Check 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_health: "In Progress"
- aggregate: false
+ installer_phase_health:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: OpenShift Health Checks
hosts: oo_all_hosts
@@ -37,11 +39,13 @@
- docker_image_availability
- name: Health Check Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Health Check 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_health: "Complete"
- aggregate: false
+ installer_phase_health:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/openshift_hosted.yml b/playbooks/common/openshift-cluster/openshift_hosted.yml
index 281ccce2e..15ee60dc0 100644
--- a/playbooks/common/openshift-cluster/openshift_hosted.yml
+++ b/playbooks/common/openshift-cluster/openshift_hosted.yml
@@ -1,13 +1,15 @@
---
- name: Hosted Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Hosted install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_hosted: "In Progress"
- aggregate: false
+ installer_phase_hosted:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- include: create_persistent_volumes.yml
@@ -30,11 +32,13 @@
- openshift_crio_enable_docker_gc | default(False) | bool
- name: Hosted Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Hosted install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_hosted: "Complete"
- aggregate: false
+ installer_phase_hosted:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/openshift_logging.yml b/playbooks/common/openshift-cluster/openshift_logging.yml
index 529a4c939..bc59bd95a 100644
--- a/playbooks/common/openshift-cluster/openshift_logging.yml
+++ b/playbooks/common/openshift-cluster/openshift_logging.yml
@@ -1,13 +1,15 @@
---
- name: Logging Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Logging install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_logging: "In Progress"
- aggregate: false
+ installer_phase_logging:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: OpenShift Aggregated Logging
hosts: oo_first_master
@@ -23,11 +25,13 @@
tasks_from: update_master_config
- name: Logging Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Logging install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_logging: "Complete"
- aggregate: false
+ installer_phase_logging:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/openshift_metrics.yml b/playbooks/common/openshift-cluster/openshift_metrics.yml
index 9c0bd489b..80cd93e5f 100644
--- a/playbooks/common/openshift-cluster/openshift_metrics.yml
+++ b/playbooks/common/openshift-cluster/openshift_metrics.yml
@@ -1,13 +1,15 @@
---
- name: Metrics Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Metrics install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_metrics: "In Progress"
- aggregate: false
+ installer_phase_metrics:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: OpenShift Metrics
hosts: oo_first_master
@@ -24,11 +26,13 @@
tasks_from: update_master_config.yaml
- name: Metrics Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Metrics install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_metrics: "Complete"
- aggregate: false
+ installer_phase_metrics:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/openshift_prometheus.yml b/playbooks/common/openshift-cluster/openshift_prometheus.yml
index a73b294a5..7aa9a16e6 100644
--- a/playbooks/common/openshift-cluster/openshift_prometheus.yml
+++ b/playbooks/common/openshift-cluster/openshift_prometheus.yml
@@ -1,13 +1,15 @@
---
- name: Prometheus Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Prometheus install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_prometheus: "In Progress"
- aggregate: false
+ installer_phase_prometheus:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Create Hosted Resources - openshift_prometheus
hosts: oo_first_master
@@ -15,11 +17,13 @@
- role: openshift_prometheus
- name: Prometheus Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Prometheus install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_prometheus: "Complete"
- aggregate: false
+ installer_phase_prometheus:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/service_catalog.yml b/playbooks/common/openshift-cluster/service_catalog.yml
index bd964b2ce..7bb8511f6 100644
--- a/playbooks/common/openshift-cluster/service_catalog.yml
+++ b/playbooks/common/openshift-cluster/service_catalog.yml
@@ -1,13 +1,15 @@
---
- name: Service Catalog Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Service Catalog install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_servicecatalog: "In Progress"
- aggregate: false
+ installer_phase_servicecatalog:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Service Catalog
hosts: oo_first_master
@@ -19,11 +21,13 @@
first_master: "{{ groups.oo_first_master[0] }}"
- name: Service Catalog Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Service Catalog install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_servicecatalog: "Complete"
- aggregate: false
+ installer_phase_servicecatalog:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/std_include.yml b/playbooks/common/openshift-cluster/std_include.yml
index 45b34c8bd..fe376fe31 100644
--- a/playbooks/common/openshift-cluster/std_include.yml
+++ b/playbooks/common/openshift-cluster/std_include.yml
@@ -1,15 +1,17 @@
---
- name: Initialization Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
roles:
- installer_checkpoint
tasks:
- name: Set install initialization 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_initialize: "In Progress"
- aggregate: false
+ installer_phase_initialize:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- include: evaluate_groups.yml
tags:
@@ -36,11 +38,13 @@
- always
- name: Initialization Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set install initialization 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_initialize: "Complete"
- aggregate: false
+ installer_phase_initialize:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
index a5e2f7940..8783ade99 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
@@ -3,22 +3,6 @@
# Upgrade Masters
###############################################################################
-# oc adm migrate storage should be run prior to etcd v3 upgrade
-# See: https://github.com/openshift/origin/pull/14625#issuecomment-308467060
-- name: Pre master upgrade - Upgrade all storage
- hosts: oo_first_master
- tasks:
- - name: Upgrade all storage
- command: >
- {{ openshift.common.client_binary }} adm --config={{ openshift.common.config_base }}/master/admin.kubeconfig
- migrate storage --include=* --confirm
- register: l_pb_upgrade_control_plane_pre_upgrade_storage
- when: openshift_upgrade_pre_storage_migration_enabled | default(true) | bool
- failed_when:
- - openshift_upgrade_pre_storage_migration_enabled | default(true) | bool
- - l_pb_upgrade_control_plane_pre_upgrade_storage.rc != 0
- - openshift_upgrade_pre_storage_migration_fatal | default(true) | bool
-
# If facts cache were for some reason deleted, this fact may not be set, and if not set
# it will always default to true. This causes problems for the etcd data dir fact detection
# so we must first make sure this is set correctly before attempting the backup.
@@ -48,6 +32,22 @@
- include: create_service_signer_cert.yml
+# oc adm migrate storage should be run prior to etcd v3 upgrade
+# See: https://github.com/openshift/origin/pull/14625#issuecomment-308467060
+- name: Pre master upgrade - Upgrade all storage
+ hosts: oo_first_master
+ tasks:
+ - name: Upgrade all storage
+ command: >
+ {{ openshift.common.client_binary }} adm --config={{ openshift.common.config_base }}/master/admin.kubeconfig
+ migrate storage --include=* --confirm
+ register: l_pb_upgrade_control_plane_pre_upgrade_storage
+ when: openshift_upgrade_pre_storage_migration_enabled | default(true) | bool
+ failed_when:
+ - openshift_upgrade_pre_storage_migration_enabled | default(true) | bool
+ - l_pb_upgrade_control_plane_pre_upgrade_storage.rc != 0
+ - openshift_upgrade_pre_storage_migration_fatal | default(true) | bool
+
# Set openshift_master_facts separately. In order to reconcile
# admission_config's, we currently must run openshift_master_facts and
# then run openshift_facts.
@@ -153,7 +153,9 @@
{{ openshift.common.client_binary }} adm --config={{ openshift.common.config_base }}/master/admin.kubeconfig
migrate storage --include=clusterpolicies --confirm
register: l_pb_upgrade_control_plane_post_upgrade_storage
- when: openshift_upgrade_post_storage_migration_enabled | default(true) | bool
+ when:
+ - openshift_upgrade_post_storage_migration_enabled | default(true) | bool
+ - openshift_version | version_compare('3.7','<')
failed_when:
- openshift_upgrade_post_storage_migration_enabled | default(true) | bool
- l_pb_upgrade_control_plane_post_upgrade_storage.rc != 0
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/master_config_upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_3/master_config_upgrade.yml
deleted file mode 100644
index 5e7a66171..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/master_config_upgrade.yml
+++ /dev/null
@@ -1,66 +0,0 @@
----
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.externalKubernetesClientConnectionOverrides.acceptContentTypes'
- yaml_value: 'application/vnd.kubernetes.protobuf,application/json'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.externalKubernetesClientConnectionOverrides.contentType'
- yaml_value: 'application/vnd.kubernetes.protobuf'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.externalKubernetesClientConnectionOverrides.burst'
- yaml_value: 400
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.externalKubernetesClientConnectionOverrides.qps'
- yaml_value: 200
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.openshiftLoopbackClientConnectionOverrides.acceptContentTypes'
- yaml_value: 'application/vnd.kubernetes.protobuf,application/json'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.openshiftLoopbackClientConnectionOverrides.contentType'
- yaml_value: 'application/vnd.kubernetes.protobuf'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.openshiftLoopbackClientConnectionOverrides.burst'
- yaml_value: 600
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'masterClients.openshiftLoopbackClientConnectionOverrides.qps'
- yaml_value: 300
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'controllerConfig.serviceServingCert.signer.certFile'
- yaml_value: service-signer.crt
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'controllerConfig.serviceServingCert.signer.keyFile'
- yaml_value: service-signer.key
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'admissionConfig.pluginConfig'
- yaml_value: "{{ openshift.master.admission_plugin_config }}"
- when: "'admission_plugin_config' in openshift.master"
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'admissionConfig.pluginOrderOverride'
- yaml_value:
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'kubernetesMasterConfig.admissionConfig'
- yaml_value:
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/node_config_upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_3/node_config_upgrade.yml
deleted file mode 100644
index 89b524f14..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/node_config_upgrade.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/node/node-config.yaml"
- yaml_key: 'masterClientConnectionOverrides.acceptContentTypes'
- yaml_value: 'application/vnd.kubernetes.protobuf,application/json'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/node/node-config.yaml"
- yaml_key: 'masterClientConnectionOverrides.contentType'
- yaml_value: 'application/vnd.kubernetes.protobuf'
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/node/node-config.yaml"
- yaml_key: 'masterClientConnectionOverrides.burst'
- yaml_value: 40
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/node/node-config.yaml"
- yaml_key: 'masterClientConnectionOverrides.qps'
- yaml_value: 20
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/roles b/playbooks/common/openshift-cluster/upgrades/v3_3/roles
deleted file mode 120000
index 6bc1a7aef..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/roles
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../roles \ No newline at end of file
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade.yml
deleted file mode 100644
index a241ef039..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade.yml
+++ /dev/null
@@ -1,118 +0,0 @@
----
-#
-# Full Control Plane + Nodes Upgrade
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
- openshift_upgrade_min: "{{ '1.2' if deployment_type == 'origin' else '3.2' }}"
-
-# Pre-upgrade
-
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos and initialize facts on all hosts
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config:oo_lb_to_config
- tags:
- - pre_upgrade
- roles:
- - openshift_repos
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_config']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_master_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_node_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- include: ../../../openshift-master/validate_restart.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_control_plane.yml
- vars:
- master_config_hook: "v3_3/master_config_upgrade.yml"
-
-- include: ../upgrade_nodes.yml
- vars:
- node_config_hook: "v3_3/node_config_upgrade.yml"
-
-- include: ../post_control_plane.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
deleted file mode 100644
index 54c85f0fb..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_control_plane.yml
+++ /dev/null
@@ -1,118 +0,0 @@
----
-#
-# Control Plane Upgrade Playbook
-#
-# Upgrades masters and Docker (only on standalone etcd hosts)
-#
-# This upgrade does not include:
-# - node service running on masters
-# - docker running on masters
-# - node service running on dedicated nodes
-#
-# You can run the upgrade_nodes.yml playbook after this to upgrade these components separately.
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
- openshift_upgrade_min: "{{ '1.2' if deployment_type == 'origin' else '3.2' }}"
-
-# Pre-upgrade
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos on control plane hosts
- hosts: oo_masters_to_config:oo_etcd_to_config:oo_lb_to_config
- tags:
- - pre_upgrade
- roles:
- - openshift_repos
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_config']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_master_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- include: ../../../openshift-master/validate_restart.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_masters_to_config
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_masters_to_config:oo_etcd_to_config
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_masters_to_config:oo_etcd_to_config
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_control_plane.yml
- vars:
- master_config_hook: "v3_3/master_config_upgrade.yml"
-
-- include: ../post_control_plane.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
deleted file mode 100644
index cee4e9087..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_3/upgrade_nodes.yml
+++ /dev/null
@@ -1,113 +0,0 @@
----
-#
-# Node Upgrade Playbook
-#
-# Upgrades nodes only, but requires the control plane to have already been upgraded.
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
- openshift_upgrade_min: "{{ '1.2' if deployment_type == 'origin' else '3.2' }}"
-
-# Pre-upgrade
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos on nodes
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config:oo_lb_to_config
- roles:
- - openshift_repos
- tags:
- - pre_upgrade
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_upgrade']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_node_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- name: Verify masters are already upgraded
- hosts: oo_masters_to_config
- tags:
- - pre_upgrade
- tasks:
- - fail: msg="Master running {{ openshift.common.version }} must be upgraded to {{ openshift_version }} before node upgrade can be run."
- when: openshift.common.version != openshift_version
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_nodes.yml
- vars:
- node_config_hook: "v3_3/node_config_upgrade.yml"
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_4/roles b/playbooks/common/openshift-cluster/upgrades/v3_4/roles
deleted file mode 120000
index 6bc1a7aef..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_4/roles
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../roles \ No newline at end of file
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade.yml
deleted file mode 100644
index ae217ba2e..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade.yml
+++ /dev/null
@@ -1,116 +0,0 @@
----
-#
-# Full Control Plane + Nodes Upgrade
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
- openshift_upgrade_min: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
-
-# Pre-upgrade
-
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos and initialize facts on all hosts
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config:oo_lb_to_config
- tags:
- - pre_upgrade
- roles:
- - openshift_repos
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_config']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_master_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_node_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- include: ../../../openshift-master/validate_restart.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_control_plane.yml
- vars:
- master_config_hook: "v3_4/master_config_upgrade.yml"
-
-- include: ../upgrade_nodes.yml
-
-- include: ../post_control_plane.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
deleted file mode 100644
index d7cb38d03..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_control_plane.yml
+++ /dev/null
@@ -1,118 +0,0 @@
----
-#
-# Control Plane Upgrade Playbook
-#
-# Upgrades masters and Docker (only on standalone etcd hosts)
-#
-# This upgrade does not include:
-# - node service running on masters
-# - docker running on masters
-# - node service running on dedicated nodes
-#
-# You can run the upgrade_nodes.yml playbook after this to upgrade these components separately.
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
- openshift_upgrade_min: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
-
-# Pre-upgrade
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos on control plane hosts
- hosts: oo_masters_to_config:oo_etcd_to_config:oo_lb_to_config
- tags:
- - pre_upgrade
- roles:
- - openshift_repos
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_config']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_master_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- include: ../../../openshift-master/validate_restart.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_masters_to_config
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_masters_to_config:oo_etcd_to_config
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_masters_to_config:oo_etcd_to_config
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_control_plane.yml
- vars:
- master_config_hook: "v3_4/master_config_upgrade.yml"
-
-- include: ../post_control_plane.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
deleted file mode 100644
index 8531e6045..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_4/upgrade_nodes.yml
+++ /dev/null
@@ -1,111 +0,0 @@
----
-#
-# Node Upgrade Playbook
-#
-# Upgrades nodes only, but requires the control plane to have already been upgraded.
-#
-- include: ../init.yml
- tags:
- - pre_upgrade
-
-- name: Configure the upgrade target for the common upgrade tasks
- hosts: oo_all_hosts
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_upgrade_target: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
- openshift_upgrade_min: "{{ '1.3' if deployment_type == 'origin' else '3.3' }}"
-
-# Pre-upgrade
-- include: ../initialize_nodes_to_upgrade.yml
- tags:
- - pre_upgrade
-
-- name: Update repos on nodes
- hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config:oo_lb_to_config
- roles:
- - openshift_repos
- tags:
- - pre_upgrade
-
-- name: Set openshift_no_proxy_internal_hostnames
- hosts: oo_masters_to_config:oo_nodes_to_upgrade
- tags:
- - pre_upgrade
- tasks:
- - set_fact:
- openshift_no_proxy_internal_hostnames: "{{ hostvars | oo_select_keys(groups['oo_nodes_to_upgrade']
- | union(groups['oo_masters_to_config'])
- | union(groups['oo_etcd_to_config'] | default([])))
- | oo_collect('openshift.common.hostname') | default([]) | join (',')
- }}"
- when:
- - openshift_http_proxy is defined or openshift_https_proxy is defined
- - openshift_generate_no_proxy_hosts | default(True) | bool
-
-- include: ../pre/verify_inventory_vars.yml
- tags:
- - pre_upgrade
-
-- include: ../disable_node_excluders.yml
- tags:
- - pre_upgrade
-
-- include: ../../initialize_openshift_version.yml
- tags:
- - pre_upgrade
- vars:
- # Request specific openshift_release and let the openshift_version role handle converting this
- # to a more specific version, respecting openshift_image_tag and openshift_pkg_version if
- # defined, and overriding the normal behavior of protecting the installed version
- openshift_release: "{{ openshift_upgrade_target }}"
- openshift_protect_installed_version: False
-
- # We skip the docker role at this point in upgrade to prevent
- # unintended package, container, or config upgrades which trigger
- # docker restarts. At this early stage of upgrade we can assume
- # docker is configured and running.
- skip_docker_role: True
-
-- name: Verify masters are already upgraded
- hosts: oo_masters_to_config
- tags:
- - pre_upgrade
- tasks:
- - fail: msg="Master running {{ openshift.common.version }} must be upgraded to {{ openshift_version }} before node upgrade can be run."
- when: openshift.common.version != openshift_version
-
-- include: ../pre/verify_control_plane_running.yml
- tags:
- - pre_upgrade
-
-- name: Verify upgrade targets
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../pre/verify_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- name: Verify docker upgrade targets
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../pre/tasks/verify_docker_upgrade_targets.yml
- tags:
- - pre_upgrade
-
-- include: ../pre/gate_checks.yml
- tags:
- - pre_upgrade
-
-# Pre-upgrade completed, nothing after this should be tagged pre_upgrade.
-
-# Separate step so we can execute in parallel and clear out anything unused
-# before we get into the serialized upgrade process which will then remove
-# remaining images if possible.
-- name: Cleanup unused Docker images
- hosts: oo_nodes_to_upgrade
- tasks:
- - include: ../cleanup_unused_images.yml
-
-- include: ../upgrade_nodes.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/master_config_upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_5/master_config_upgrade.yml
deleted file mode 100644
index 52458e03c..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/master_config_upgrade.yml
+++ /dev/null
@@ -1,10 +0,0 @@
----
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'controllerConfig.serviceServingCert.signer.certFile'
- yaml_value: service-signer.crt
-
-- modify_yaml:
- dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
- yaml_key: 'controllerConfig.serviceServingCert.signer.keyFile'
- yaml_value: service-signer.key
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/validator.yml b/playbooks/common/openshift-cluster/upgrades/v3_5/validator.yml
deleted file mode 100644
index ae63c9ca9..000000000
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/validator.yml
+++ /dev/null
@@ -1,67 +0,0 @@
----
-###############################################################################
-# Pre upgrade checks for known data problems, if this playbook fails you should
-# contact support. If you're not supported contact users@lists.openshift.com
-#
-# oc_objectvalidator provides these two checks
-# 1 - SDN Data issues, never seen in the wild but known possible due to code audits
-# https://github.com/openshift/origin/issues/12697
-# 2 - Namespace protections, https://bugzilla.redhat.com/show_bug.cgi?id=1428934
-#
-###############################################################################
-- name: Verify 3.5 specific upgrade checks
- hosts: oo_first_master
- roles:
- - { role: lib_openshift }
- tasks:
- - name: Check for invalid namespaces and SDN errors
- oc_objectvalidator:
-
- # What's all this PetSet business about?
- #
- # 'PetSets' were ALPHA resources in Kube <= 3.4. In >= 3.5 they are
- # no longer supported. The BETA resource 'StatefulSets' replaces
- # them. We can't migrate clients PetSets to
- # StatefulSets. Additionally, Red Hat has never officially supported
- # these resource types. Sorry users, but if you were using
- # unsupported resources from the Kube documentation then we can't
- # help you at this time.
- #
- # Reference: https://bugzilla.redhat.com/show_bug.cgi?id=1428229
- - name: Check if legacy PetSets exist
- oc_obj:
- state: list
- all_namespaces: true
- kind: petsets
- register: l_do_petsets_exist
-
- - name: Fail on unsupported resource migration 'PetSets'
- fail:
- msg: >
- PetSet objects were detected in your cluster. These are an
- Alpha feature in upstream Kubernetes 1.4 and are not supported
- by Red Hat. In Kubernetes 1.5, they are replaced by the Beta
- feature StatefulSets. Red Hat currently does not offer support
- for either PetSets or StatefulSets.
-
- Automatically migrating PetSets to StatefulSets in OpenShift
- Container Platform (OCP) 3.5 is not supported. See the
- Kubernetes "Upgrading from PetSets to StatefulSets"
- documentation for additional information:
-
- https://kubernetes.io/docs/tasks/manage-stateful-set/upgrade-pet-set-to-stateful-set/
-
- PetSets MUST be removed before upgrading to OCP 3.5. Red Hat
- strongly recommends reading the above referenced documentation
- in its entirety before taking any destructive actions.
-
- If you want to simply remove all PetSets without manually
- migrating to StatefulSets, run this command as a user with
- cluster-admin privileges:
-
- $ oc get petsets --all-namespaces -o yaml | oc delete -f - --cascade=false
- when:
- # Search did not fail, valid resource type found
- - l_do_petsets_exist.results.returncode == 0
- # Items do exist in the search results
- - l_do_petsets_exist.results.results.0['items'] | length > 0
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_7/validator.yml b/playbooks/common/openshift-cluster/upgrades/v3_7/validator.yml
index 7a28eeb27..74d0cd8ad 100644
--- a/playbooks/common/openshift-cluster/upgrades/v3_7/validator.yml
+++ b/playbooks/common/openshift-cluster/upgrades/v3_7/validator.yml
@@ -11,13 +11,15 @@
tasks:
- name: Check for invalid namespaces and SDN errors
oc_objectvalidator:
-
+ # DO NOT DISABLE THIS, YOUR UPGRADE WILL FAIL IF YOU DO SO
- name: Confirm OpenShift authorization objects are in sync
command: >
{{ openshift.common.client_binary }} adm migrate authorization
- when: openshift_currently_installed_version | version_compare('3.7','<')
+ when:
+ - openshift_currently_installed_version | version_compare('3.7','<')
+ - openshift_upgrade_pre_authorization_migration_enabled | default(true) | bool
changed_when: false
register: l_oc_result
until: l_oc_result.rc == 0
- retries: 4
+ retries: 2
delay: 15
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins b/playbooks/common/openshift-cluster/upgrades/v3_8/filter_plugins
index 7de3c1dd7..7de3c1dd7 120000
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/filter_plugins
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_4/master_config_upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_8/master_config_upgrade.yml
index 52458e03c..1d4d1919c 100644
--- a/playbooks/common/openshift-cluster/upgrades/v3_4/master_config_upgrade.yml
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/master_config_upgrade.yml
@@ -1,6 +1,11 @@
---
- modify_yaml:
dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
+ yaml_key: 'controllerConfig.election.lockName'
+ yaml_value: 'openshift-master-controllers'
+
+- modify_yaml:
+ dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
yaml_key: 'controllerConfig.serviceServingCert.signer.certFile'
yaml_value: service-signer.crt
@@ -8,3 +13,8 @@
dest: "{{ openshift.common.config_base}}/master/master-config.yaml"
yaml_key: 'controllerConfig.serviceServingCert.signer.keyFile'
yaml_value: service-signer.key
+
+- modify_yaml:
+ dest: "{{ openshift.common.config_base }}/master/master-config.yaml"
+ yaml_key: servingInfo.clientCA
+ yaml_value: ca.crt
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/roles b/playbooks/common/openshift-cluster/upgrades/v3_8/roles
index 415645be6..415645be6 120000
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/roles
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/roles
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade.yml b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade.yml
index bda245fe1..b3162bd5f 100644
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade.yml
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade.yml
@@ -12,8 +12,8 @@
- pre_upgrade
tasks:
- set_fact:
- openshift_upgrade_target: "{{ '1.5' if deployment_type == 'origin' else '3.5' }}"
- openshift_upgrade_min: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
+ openshift_upgrade_target: '3.8'
+ openshift_upgrade_min: '3.7'
# Pre-upgrade
@@ -21,6 +21,10 @@
tags:
- pre_upgrade
+- include: ../pre/verify_etcd3_backend.yml
+ tags:
+ - pre_upgrade
+
- name: Update repos and initialize facts on all hosts
hosts: oo_masters_to_config:oo_nodes_to_upgrade:oo_etcd_to_config:oo_lb_to_config
tags:
@@ -47,6 +51,10 @@
tags:
- pre_upgrade
+- include: ../pre/verify_health_checks.yml
+ tags:
+ - pre_upgrade
+
- include: ../pre/verify_control_plane_running.yml
tags:
- pre_upgrade
@@ -113,7 +121,21 @@
- include: ../upgrade_control_plane.yml
vars:
- master_config_hook: "v3_5/master_config_upgrade.yml"
+ master_config_hook: "v3_7/master_config_upgrade.yml"
+
+# All controllers must be stopped at the same time then restarted
+- name: Cycle all controller services to force new leader election mode
+ hosts: oo_masters_to_config
+ gather_facts: no
+ tasks:
+ - name: Stop {{ openshift.common.service_type }}-master-controllers
+ systemd:
+ name: "{{ openshift.common.service_type }}-master-controllers"
+ state: stopped
+ - name: Start {{ openshift.common.service_type }}-master-controllers
+ systemd:
+ name: "{{ openshift.common.service_type }}-master-controllers"
+ state: started
- include: ../upgrade_nodes.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml
index 6cdea7b84..3df5b17b5 100644
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_control_plane.yml
@@ -21,14 +21,18 @@
- pre_upgrade
tasks:
- set_fact:
- openshift_upgrade_target: "{{ '1.5' if deployment_type == 'origin' else '3.5' }}"
- openshift_upgrade_min: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
+ openshift_upgrade_target: '3.8'
+ openshift_upgrade_min: '3.7'
# Pre-upgrade
- include: ../initialize_nodes_to_upgrade.yml
tags:
- pre_upgrade
+- include: ../pre/verify_etcd3_backend.yml
+ tags:
+ - pre_upgrade
+
- name: Update repos on control plane hosts
hosts: oo_masters_to_config:oo_etcd_to_config:oo_lb_to_config
tags:
@@ -55,6 +59,10 @@
tags:
- pre_upgrade
+- include: ../pre/verify_health_checks.yml
+ tags:
+ - pre_upgrade
+
- include: ../pre/verify_control_plane_running.yml
tags:
- pre_upgrade
@@ -117,6 +125,20 @@
- include: ../upgrade_control_plane.yml
vars:
- master_config_hook: "v3_5/master_config_upgrade.yml"
+ master_config_hook: "v3_7/master_config_upgrade.yml"
+
+# All controllers must be stopped at the same time then restarted
+- name: Cycle all controller services to force new leader election mode
+ hosts: oo_masters_to_config
+ gather_facts: no
+ tasks:
+ - name: Stop {{ openshift.common.service_type }}-master-controllers
+ systemd:
+ name: "{{ openshift.common.service_type }}-master-controllers"
+ state: stopped
+ - name: Start {{ openshift.common.service_type }}-master-controllers
+ systemd:
+ name: "{{ openshift.common.service_type }}-master-controllers"
+ state: started
- include: ../post_control_plane.yml
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml
index e29d0f8e6..f3d192ba7 100644
--- a/playbooks/common/openshift-cluster/upgrades/v3_5/upgrade_nodes.yml
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/upgrade_nodes.yml
@@ -14,8 +14,8 @@
- pre_upgrade
tasks:
- set_fact:
- openshift_upgrade_target: "{{ '1.5' if deployment_type == 'origin' else '3.5' }}"
- openshift_upgrade_min: "{{ '1.4' if deployment_type == 'origin' else '3.4' }}"
+ openshift_upgrade_target: '3.8'
+ openshift_upgrade_min: '3.7'
# Pre-upgrade
- include: ../initialize_nodes_to_upgrade.yml
@@ -48,6 +48,10 @@
tags:
- pre_upgrade
+- include: ../pre/verify_health_checks.yml
+ tags:
+ - pre_upgrade
+
- include: ../disable_node_excluders.yml
tags:
- pre_upgrade
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_8/validator.yml b/playbooks/common/openshift-cluster/upgrades/v3_8/validator.yml
new file mode 100644
index 000000000..d8540abfb
--- /dev/null
+++ b/playbooks/common/openshift-cluster/upgrades/v3_8/validator.yml
@@ -0,0 +1,7 @@
+---
+- name: Verify 3.8 specific upgrade checks
+ hosts: oo_first_master
+ roles:
+ - { role: lib_openshift }
+ tasks:
+ - debug: msg="noop"
diff --git a/playbooks/common/openshift-etcd/config.yml b/playbooks/common/openshift-etcd/config.yml
index 48d46bbb0..3fe483785 100644
--- a/playbooks/common/openshift-etcd/config.yml
+++ b/playbooks/common/openshift-etcd/config.yml
@@ -1,13 +1,15 @@
---
- name: etcd Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set etcd install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_etcd: "In Progress"
- aggregate: false
+ installer_phase_etcd:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- include: ca.yml
@@ -26,11 +28,13 @@
- role: nickhammond.logrotate
- name: etcd Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set etcd install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_etcd: "Complete"
- aggregate: false
+ installer_phase_etcd:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-glusterfs/config.yml b/playbooks/common/openshift-glusterfs/config.yml
index c2ae5f313..19e14ab3e 100644
--- a/playbooks/common/openshift-glusterfs/config.yml
+++ b/playbooks/common/openshift-glusterfs/config.yml
@@ -1,13 +1,15 @@
---
- name: GlusterFS Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set GlusterFS install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_glusterfs: "In Progress"
- aggregate: false
+ installer_phase_glusterfs:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Open firewall ports for GlusterFS nodes
hosts: glusterfs
@@ -46,11 +48,13 @@
when: groups.oo_glusterfs_to_config | default([]) | count > 0
- name: GlusterFS Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set GlusterFS install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_glusterfs: "Complete"
- aggregate: false
+ installer_phase_glusterfs:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-loadbalancer/config.yml b/playbooks/common/openshift-loadbalancer/config.yml
index 2a703cb61..d737b836b 100644
--- a/playbooks/common/openshift-loadbalancer/config.yml
+++ b/playbooks/common/openshift-loadbalancer/config.yml
@@ -1,13 +1,15 @@
---
- name: Load Balancer Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set load balancer install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_loadbalancer: "In Progress"
- aggregate: false
+ installer_phase_loadbalancer:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Configure firewall and docker for load balancers
hosts: oo_lb_to_config:!oo_masters_to_config:!oo_nodes_to_config
@@ -37,11 +39,13 @@
- role: tuned
- name: Load Balancer Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set load balancer install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_loadbalancer: "Complete"
- aggregate: false
+ installer_phase_loadbalancer:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-management/config.yml b/playbooks/common/openshift-management/config.yml
index 908679e81..3f1cdf713 100644
--- a/playbooks/common/openshift-management/config.yml
+++ b/playbooks/common/openshift-management/config.yml
@@ -1,13 +1,15 @@
---
- name: Management Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Management install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_management: "In Progress"
- aggregate: false
+ installer_phase_management:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Setup CFME
hosts: oo_first_master
@@ -25,11 +27,13 @@
template_dir: "{{ hostvars[groups.masters.0].r_openshift_management_mktemp.stdout }}"
- name: Management Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Management install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_management: "Complete"
- aggregate: false
+ installer_phase_management:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-master/additional_config.yml b/playbooks/common/openshift-master/additional_config.yml
index 350557f19..4fef5b923 100644
--- a/playbooks/common/openshift-master/additional_config.yml
+++ b/playbooks/common/openshift-master/additional_config.yml
@@ -1,13 +1,15 @@
---
- name: Master Additional Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Master Additional install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_master_additional: "In Progress"
- aggregate: false
+ installer_phase_master_additional:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Additional master configuration
hosts: oo_first_master
@@ -36,11 +38,13 @@
when: openshift_use_flannel | default(false) | bool
- name: Master Additional Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Master Additional install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_master_additional: "Complete"
- aggregate: false
+ installer_phase_master_additional:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-master/config.yml b/playbooks/common/openshift-master/config.yml
index 7ce0362ef..6b0fd6b7c 100644
--- a/playbooks/common/openshift-master/config.yml
+++ b/playbooks/common/openshift-master/config.yml
@@ -1,13 +1,15 @@
---
- name: Master Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Master install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_master: "In Progress"
- aggregate: false
+ installer_phase_master:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- include: certificates.yml
@@ -238,11 +240,13 @@
r_openshift_excluder_service_type: "{{ openshift.common.service_type }}"
- name: Master Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Master install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_master: "Complete"
- aggregate: false
+ installer_phase_master:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-master/files/openshift-ansible-catalog-console.js b/playbooks/common/openshift-master/files/openshift-ansible-catalog-console.js
deleted file mode 100644
index d0a9f11dc..000000000
--- a/playbooks/common/openshift-master/files/openshift-ansible-catalog-console.js
+++ /dev/null
@@ -1,2 +0,0 @@
-// empty file so that the master-config can still point to a file that exists
-// this file will be replaced by the template service broker role if enabled
diff --git a/playbooks/common/openshift-master/tasks/wire_aggregator.yml b/playbooks/common/openshift-master/tasks/wire_aggregator.yml
index df3ea27b4..0d23e9d61 100644
--- a/playbooks/common/openshift-master/tasks/wire_aggregator.yml
+++ b/playbooks/common/openshift-master/tasks/wire_aggregator.yml
@@ -136,9 +136,15 @@
when:
- not front_proxy_kubeconfig.stat.exists
-- name: copy tech preview extension file for service console UI
- copy:
- src: openshift-ansible-catalog-console.js
+- name: Delete temp directory
+ file:
+ name: "{{ certtemp.stdout }}"
+ state: absent
+ changed_when: False
+
+- name: Setup extension file for service console UI
+ template:
+ src: ../templates/openshift-ansible-catalog-console.js
dest: /etc/origin/master/openshift-ansible-catalog-console.js
- name: Update master config
@@ -212,9 +218,3 @@
changed_when: false
when:
- yedit_output.changed
-
-- name: Delete temp directory
- file:
- name: "{{ certtemp.stdout }}"
- state: absent
- changed_when: False
diff --git a/playbooks/common/openshift-master/templates/openshift-ansible-catalog-console.js b/playbooks/common/openshift-master/templates/openshift-ansible-catalog-console.js
new file mode 100644
index 000000000..fd02325ba
--- /dev/null
+++ b/playbooks/common/openshift-master/templates/openshift-ansible-catalog-console.js
@@ -0,0 +1 @@
+window.OPENSHIFT_CONSTANTS.TEMPLATE_SERVICE_BROKER_ENABLED = {{ 'true' if (template_service_broker_install | default(True)) else 'false' }};
diff --git a/playbooks/common/openshift-nfs/config.yml b/playbooks/common/openshift-nfs/config.yml
index ce672daf5..6ea77e00b 100644
--- a/playbooks/common/openshift-nfs/config.yml
+++ b/playbooks/common/openshift-nfs/config.yml
@@ -1,13 +1,15 @@
---
- name: NFS Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set NFS install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_nfs: "In Progress"
- aggregate: false
+ installer_phase_nfs:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- name: Configure nfs
hosts: oo_nfs_to_config
@@ -16,11 +18,13 @@
- role: openshift_storage_nfs
- name: NFS Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set NFS install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_nfs: "Complete"
- aggregate: false
+ installer_phase_nfs:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
diff --git a/playbooks/common/openshift-node/config.yml b/playbooks/common/openshift-node/config.yml
index 4f8f98aef..28e3c1b1b 100644
--- a/playbooks/common/openshift-node/config.yml
+++ b/playbooks/common/openshift-node/config.yml
@@ -1,13 +1,15 @@
---
- name: Node Install Checkpoint Start
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Node install 'In Progress'
+ run_once: true
set_stats:
data:
- installer_phase_node: "In Progress"
- aggregate: false
+ installer_phase_node:
+ status: "In Progress"
+ start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
- include: certificates.yml
@@ -24,11 +26,13 @@
- include: enable_excluders.yml
- name: Node Install Checkpoint End
- hosts: oo_all_hosts
+ hosts: all
gather_facts: false
tasks:
- name: Set Node install 'Complete'
+ run_once: true
set_stats:
data:
- installer_phase_node: "Complete"
- aggregate: false
+ installer_phase_node:
+ status: "Complete"
+ end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"