summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.tito/packages/openshift-ansible2
-rw-r--r--openshift-ansible.spec25
-rw-r--r--playbooks/common/openshift-cluster/upgrades/post.yml14
-rw-r--r--playbooks/common/openshift-cluster/upgrades/upgrade.yml2
-rw-r--r--roles/openshift_version/tasks/main.yml4
-rw-r--r--utils/docs/config.md57
-rw-r--r--utils/src/ooinstall/cli_installer.py9
-rw-r--r--utils/src/ooinstall/oo_config.py78
-rw-r--r--utils/test/cli_installer_tests.py6
-rw-r--r--utils/test/fixture.py1
-rw-r--r--utils/test/oo_config_tests.py22
11 files changed, 136 insertions, 84 deletions
diff --git a/.tito/packages/openshift-ansible b/.tito/packages/openshift-ansible
index e53f089d5..8cbc56bc0 100644
--- a/.tito/packages/openshift-ansible
+++ b/.tito/packages/openshift-ansible
@@ -1 +1 @@
-3.3.13-1 ./
+3.3.14-1 ./
diff --git a/openshift-ansible.spec b/openshift-ansible.spec
index 0413a73d4..0b8a644fc 100644
--- a/openshift-ansible.spec
+++ b/openshift-ansible.spec
@@ -5,7 +5,7 @@
}
Name: openshift-ansible
-Version: 3.3.13
+Version: 3.3.14
Release: 1%{?dist}
Summary: Openshift and Atomic Enterprise Ansible
License: ASL 2.0
@@ -221,6 +221,29 @@ Atomic OpenShift Utilities includes
%changelog
+* Tue Aug 23 2016 Scott Dodson <sdodson@redhat.com> 3.3.14-1
+- a-o-i: Fix ansible_ssh_user question (smunilla@redhat.com)
+- Don't run node config upgrade hook if host is not a node.
+ (dgoodwin@redhat.com)
+- Link ca to ca-bundle when ca-bundle does not exist. (abutcher@redhat.com)
+- Better error if no OpenShift RPMs are available. (dgoodwin@redhat.com)
+- Revert "Due to problems with with_fileglob lets avoid using it for now"
+ (sdodson@redhat.com)
+- Replace some virsh commands by native virt_XXX ansible module
+ (lhuard@amadeus.com)
+- Add warning at end of 3.3 upgrade if pluginOrderOverride is found.
+ (dgoodwin@redhat.com)
+- a-o-i: Remove Legacy Config Upgrade (smunilla@redhat.com)
+- Fix etcd uninstall (sdodson@redhat.com)
+- Bug 1358951 - Error loading config, no such key: 'deployment' when using
+ previously valid answers file (smunilla@redhat.com)
+- Fix standalone Docker upgrade missing symlink. (dgoodwin@redhat.com)
+- Open OpenStack security group for the service node port range
+ (lhuard@amadeus.com)
+- Fix the “node on master” feature (lhuard@amadeus.com)
+- Due to problems with with_fileglob lets avoid using it for now
+ (sdodson@redhat.com)
+
* Fri Aug 19 2016 Troy Dawson <tdawson@redhat.com> 3.3.13-1
- Fix warnings in OpenStack provider with ansible 2.1 (lhuard@amadeus.com)
- Mount /sys rw (sdodson@redhat.com)
diff --git a/playbooks/common/openshift-cluster/upgrades/post.yml b/playbooks/common/openshift-cluster/upgrades/post.yml
index bd97d0b34..e43954453 100644
--- a/playbooks/common/openshift-cluster/upgrades/post.yml
+++ b/playbooks/common/openshift-cluster/upgrades/post.yml
@@ -56,3 +56,17 @@
{{ oc_cmd }} patch dc/docker-registry -n default -p
'{"spec":{"template":{"spec":{"containers":[{"name":"registry","image":"{{ registry_image }}"}]}}}}'
--api-version=v1
+
+# Check for warnings to be printed at the end of the upgrade:
+- name: Check for warnings
+ hosts: oo_masters_to_config
+ tasks:
+ # Check if any masters are using pluginOrderOverride and warn if so, only for 1.3/3.3 and beyond:
+ - command: >
+ grep pluginOrderOverride {{ openshift.common.config_base }}/master/master-config.yaml
+ register: grep_plugin_order_override
+ when: openshift.common.version_gte_3_3_or_1_3 | bool
+ failed_when: false
+ - name: Warn if pluginOrderOverride is in use in master-config.yaml
+ debug: msg="WARNING pluginOrderOverride is being deprecated in master-config.yaml, please see https://docs.openshift.com/enterprise/latest/architecture/additional_concepts/admission_controllers.html for more information."
+ when: not grep_plugin_order_override | skipped and grep_plugin_order_override.rc == 0
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade.yml b/playbooks/common/openshift-cluster/upgrades/upgrade.yml
index f7ff16fb8..be4e02c4a 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade.yml
@@ -188,7 +188,7 @@
- include: docker/upgrade.yml
when: l_docker_upgrade is defined and l_docker_upgrade | bool and not openshift.common.is_atomic | bool
- include: "{{ node_config_hook }}"
- when: node_config_hook is defined
+ when: node_config_hook is defined and inventory_hostname in groups.oo_nodes_to_config
- include: rpm_upgrade.yml
vars:
diff --git a/roles/openshift_version/tasks/main.yml b/roles/openshift_version/tasks/main.yml
index 6e5d2b22c..a3a99d248 100644
--- a/roles/openshift_version/tasks/main.yml
+++ b/roles/openshift_version/tasks/main.yml
@@ -73,6 +73,10 @@
msg: openshift_version role was unable to set openshift_pkg_version
when: openshift_pkg_version is not defined
+- fail:
+ msg: "No OpenShift version available, please ensure your systems are fully registered and have access to appropriate yum repositories."
+ when: not is_containerized | bool and openshift_version == '0.0'
+
# We can't map an openshift_release to full rpm version like we can with containers, make sure
# the rpm version we looked up matches the release requested and error out if not.
- fail:
diff --git a/utils/docs/config.md b/utils/docs/config.md
index 2729f8d37..3677ffe2e 100644
--- a/utils/docs/config.md
+++ b/utils/docs/config.md
@@ -7,31 +7,38 @@ The default location this config file will be written to ~/.config/openshift/ins
## Example
```
-version: v1
+version: v2
variant: openshift-enterprise
-variant_version: 3.0
-ansible_ssh_user: root
-hosts:
-- ip: 10.0.0.1
- hostname: master-private.example.com
- public_ip: 24.222.0.1
- public_hostname: master.example.com
- master: true
- node: true
- containerized: true
- connect_to: 24.222.0.1
-- ip: 10.0.0.2
- hostname: node1-private.example.com
- public_ip: 24.222.0.2
- public_hostname: node1.example.com
- node: true
- connect_to: 10.0.0.2
-- ip: 10.0.0.3
- hostname: node2-private.example.com
- public_ip: 24.222.0.3
- public_hostname: node2.example.com
- node: true
- connect_to: 10.0.0.3
+variant_version: 3.3
+deployment:
+ ansible_ssh_user: root
+ hosts:
+ - connect_to: 24.222.0.1
+ ip: 10.0.0.1
+ hostname: master-private.example.com
+ public_ip: 24.222.0.1
+ public_hostname: master.example.com
+ roles:
+ - master
+ - node
+ containerized: true
+ - connect_to: 10.0.0.2
+ ip: 10.0.0.2
+ hostname: node1-private.example.com
+ public_ip: 24.222.0.2
+ public_hostname: node1.example.com
+ roles:
+ - node
+ - connect_to: 10.0.0.3
+ ip: 10.0.0.3
+ hostname: node2-private.example.com
+ public_ip: 24.222.0.3
+ public_hostname: node2.example.com
+ roles:
+ - node
+ roles:
+ master:
+ node:
```
## Primary Settings
@@ -76,5 +83,3 @@ Defines the user ansible will use to ssh to remote systems for gathering facts a
### ansible_log_path
Default: /tmp/ansible.log
-
-
diff --git a/utils/src/ooinstall/cli_installer.py b/utils/src/ooinstall/cli_installer.py
index d677ea8c8..2b070a3d2 100644
--- a/utils/src/ooinstall/cli_installer.py
+++ b/utils/src/ooinstall/cli_installer.py
@@ -127,7 +127,7 @@ OpenShift master service to use as the datastore. This can be later replaced
with a separate etcd instance, if required. If multiple masters are specified,
then a separate etcd cluster is configured with each master serving as a member.
-Any masters configured as part of this installation process are also
+Any masters configured as part of this installation process are also
configured as nodes. This enables the master to proxy to pods
from the API. By default, this node is unschedulable, but this can be changed
after installation with the 'oadm manage-node' command.
@@ -604,9 +604,8 @@ https://docs.openshift.com/enterprise/latest/admin_guide/install/prerequisites.h
confirm_continue(message)
click.clear()
- if not oo_cfg.settings.get('ansible_ssh_user', ''):
- oo_cfg.deployment.variables['ansible_ssh_user'] = \
- get_ansible_ssh_user()
+ if not oo_cfg.deployment.variables.get('ansible_ssh_user', False):
+ oo_cfg.deployment.variables['ansible_ssh_user'] = get_ansible_ssh_user()
click.clear()
if not oo_cfg.settings.get('variant', ''):
@@ -1038,7 +1037,7 @@ installation process.
The installation was successful!
If this is your first time installing please take a look at the Administrator
-Guide for advanced options related to routing, storage, authentication, and
+Guide for advanced options related to routing, storage, authentication, and
more:
http://docs.openshift.com/enterprise/latest/admin_guide/overview.html
diff --git a/utils/src/ooinstall/oo_config.py b/utils/src/ooinstall/oo_config.py
index 69ad2b4c5..f2990662e 100644
--- a/utils/src/ooinstall/oo_config.py
+++ b/utils/src/ooinstall/oo_config.py
@@ -165,22 +165,34 @@ class OOConfig(object):
self._set_defaults()
+# pylint: disable=too-many-branches
def _read_config(self):
+ def _print_read_config_error(error, path='the configuration file'):
+ message = """
+Error loading config. {}.
+
+See https://docs.openshift.com/enterprise/latest/install_config/install/quick_install.html#defining-an-installation-configuration-file
+for information on creating a configuration file or delete {} and re-run the installer.
+"""
+ print message.format(error, path)
+
try:
if os.path.exists(self.config_path):
with open(self.config_path, 'r') as cfgfile:
loaded_config = yaml.safe_load(cfgfile.read())
- # Use the presence of a Description as an indicator this is
- # a legacy config file:
- if 'Description' in self.settings:
- self._upgrade_legacy_config()
+ if not 'version' in loaded_config:
+ _print_read_config_error('Legacy configuration file found', self.config_path)
+ sys.exit(0)
+
+ if loaded_config.get('version', '') == 'v1':
+ loaded_config = self._upgrade_v1_config(loaded_config)
try:
host_list = loaded_config['deployment']['hosts']
role_list = loaded_config['deployment']['roles']
except KeyError as e:
- print "Error loading config, no such key: {}".format(e)
+ _print_read_config_error("No such key: {}".format(e), self.config_path)
sys.exit(0)
for setting in CONFIG_PERSIST_SETTINGS:
@@ -213,32 +225,36 @@ class OOConfig(object):
raise OOConfigFileError(
'Config file "{}" is not a valid YAML document'.format(self.config_path))
- def _upgrade_legacy_config(self):
- new_hosts = []
- remove_settings = ['validated_facts', 'Description', 'Name',
- 'Subscription', 'Vendor', 'Version', 'masters', 'nodes']
-
- if 'validated_facts' in self.settings:
- for key, value in self.settings['validated_facts'].iteritems():
- value['connect_to'] = key
- if 'masters' in self.settings and key in self.settings['masters']:
- value['master'] = True
- if 'nodes' in self.settings and key in self.settings['nodes']:
- value['node'] = True
- new_hosts.append(value)
- self.settings['hosts'] = new_hosts
-
- for s in remove_settings:
- if s in self.settings:
- del self.settings[s]
-
- # A legacy config implies openshift-enterprise 3.0:
- self.settings['variant'] = 'openshift-enterprise'
- self.settings['variant_version'] = '3.0'
-
- def _upgrade_v1_config(self):
- #TODO write code to upgrade old config
- return
+ def _upgrade_v1_config(self, config):
+ new_config_data = {}
+ new_config_data['deployment'] = {}
+ new_config_data['deployment']['hosts'] = []
+ new_config_data['deployment']['roles'] = {}
+ new_config_data['deployment']['variables'] = {}
+
+ role_list = {}
+
+ if config.get('ansible_ssh_user', False):
+ new_config_data['deployment']['ansible_ssh_user'] = config['ansible_ssh_user']
+
+ for host in config['hosts']:
+ host_props = {}
+ host_props['roles'] = []
+ host_props['connect_to'] = host['connect_to']
+
+ for prop in ['ip', 'public_ip', 'hostname', 'public_hostname', 'containerized', 'preconfigured']:
+ host_props[prop] = host.get(prop, None)
+
+ for role in ['master', 'node', 'master_lb', 'storage', 'etcd']:
+ if host.get(role, False):
+ host_props['roles'].append(role)
+ role_list[role] = ''
+
+ new_config_data['deployment']['hosts'].append(host_props)
+
+ new_config_data['deployment']['roles'] = role_list
+
+ return new_config_data
def _set_defaults(self):
diff --git a/utils/test/cli_installer_tests.py b/utils/test/cli_installer_tests.py
index 0556e52a1..6d9d443ff 100644
--- a/utils/test/cli_installer_tests.py
+++ b/utils/test/cli_installer_tests.py
@@ -101,6 +101,7 @@ MOCK_FACTS_QUICKHA = {
# Missing connect_to on some hosts:
BAD_CONFIG = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -132,6 +133,7 @@ deployment:
QUICKHA_CONFIG = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -189,6 +191,7 @@ deployment:
QUICKHA_2_MASTER_CONFIG = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -238,6 +241,7 @@ deployment:
QUICKHA_CONFIG_REUSED_LB = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -281,6 +285,7 @@ deployment:
QUICKHA_CONFIG_NO_LB = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -323,6 +328,7 @@ deployment:
QUICKHA_CONFIG_PRECONFIGURED_LB = """
variant: %s
+version: v2
deployment:
ansible_ssh_user: root
hosts:
diff --git a/utils/test/fixture.py b/utils/test/fixture.py
index b2a0a7134..a883e5c56 100644
--- a/utils/test/fixture.py
+++ b/utils/test/fixture.py
@@ -12,6 +12,7 @@ SAMPLE_CONFIG = """
variant: %s
variant_version: 3.3
master_routingconfig_subdomain: example.com
+version: v2
deployment:
ansible_ssh_user: root
hosts:
diff --git a/utils/test/oo_config_tests.py b/utils/test/oo_config_tests.py
index f82d55b05..b5068cc14 100644
--- a/utils/test/oo_config_tests.py
+++ b/utils/test/oo_config_tests.py
@@ -13,6 +13,7 @@ from ooinstall.oo_config import OOConfig, Host, OOConfigInvalidHostError
SAMPLE_CONFIG = """
variant: openshift-enterprise
variant_version: 3.3
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -43,27 +44,9 @@ deployment:
node:
"""
-# Used to test automatic upgrading of config:
-LEGACY_CONFIG = """
-Description: This is the configuration file for the OpenShift Ansible-Based Installer.
-Name: OpenShift Ansible-Based Installer Configuration
-Subscription: {type: none}
-Vendor: OpenShift Community
-Version: 0.0.1
-ansible_config: /tmp/notreal/ansible.cfg
-ansible_inventory_directory: /tmp/notreal/.config/openshift/.ansible
-ansible_log_path: /tmp/ansible.log
-ansible_plugins_directory: /tmp/notreal/.python-eggs/ooinstall-3.0.0-py2.7.egg-tmp/ooinstall/ansible_plugins
-masters: [10.0.0.1]
-nodes: [10.0.0.2, 10.0.0.3]
-validated_facts:
- 10.0.0.1: {hostname: master-private.example.com, ip: 10.0.0.1, public_hostname: master.example.com, public_ip: 24.222.0.1}
- 10.0.0.2: {hostname: node1-private.example.com, ip: 10.0.0.2, public_hostname: node1.example.com, public_ip: 24.222.0.2}
- 10.0.0.3: {hostname: node2-private.example.com, ip: 10.0.0.3, public_hostname: node2.example.com, public_ip: 24.222.0.3}
-"""
-
CONFIG_INCOMPLETE_FACTS = """
+version: v2
deployment:
ansible_ssh_user: root
hosts:
@@ -91,6 +74,7 @@ deployment:
CONFIG_BAD = """
variant: openshift-enterprise
+version: v2
deployment:
ansible_ssh_user: root
hosts: