summaryrefslogtreecommitdiffstats
path: root/roles
diff options
context:
space:
mode:
authorJoel Diaz <jdiaz@redhat.com>2016-04-19 11:44:46 -0400
committerJoel Diaz <jdiaz@redhat.com>2016-04-19 11:44:46 -0400
commit30c41f3bc24204bad68f30927ea59ec78821a43a (patch)
tree42b3cbefb3e915385b17ecd5e9b81721b98a4772 /roles
parent1bb3de7855096557a820845cef9efe4702093675 (diff)
parent3681ab5fb2a39ccb06024e6ad514ad50df21f9d2 (diff)
downloadopenshift-30c41f3bc24204bad68f30927ea59ec78821a43a.tar.gz
openshift-30c41f3bc24204bad68f30927ea59ec78821a43a.tar.bz2
openshift-30c41f3bc24204bad68f30927ea59ec78821a43a.tar.xz
openshift-30c41f3bc24204bad68f30927ea59ec78821a43a.zip
Merge pull request #1734 from joelddiaz/tools-roles-move
cleanup roles after roles move to openshift-tools
Diffstat (limited to 'roles')
-rw-r--r--roles/ansible/tasks/config.yml8
-rw-r--r--roles/ansible/tasks/main.yml10
-rw-r--r--roles/ansible_tower/tasks/main.yaml41
-rw-r--r--roles/ansible_tower_cli/README.md50
-rw-r--r--roles/ansible_tower_cli/defaults/main.yml2
-rw-r--r--roles/ansible_tower_cli/handlers/main.yml2
-rw-r--r--roles/ansible_tower_cli/meta/main.yml9
-rw-r--r--roles/ansible_tower_cli/tasks/main.yml18
-rw-r--r--roles/ansible_tower_cli/templates/tower_cli.cfg.j25
-rw-r--r--roles/ansible_tower_cli/vars/main.yml2
-rw-r--r--roles/chrony/README.md31
-rw-r--r--roles/chrony/defaults/main.yml2
-rw-r--r--roles/chrony/handlers/main.yml5
-rw-r--r--roles/chrony/meta/main.yml18
-rw-r--r--roles/chrony/tasks/main.yml30
-rw-r--r--roles/chrony/templates/chrony.conf.j245
-rw-r--r--roles/chrony/vars/main.yml2
-rw-r--r--roles/copr_cli/README.md38
-rw-r--r--roles/copr_cli/defaults/main.yml2
-rw-r--r--roles/copr_cli/handlers/main.yml2
-rw-r--r--roles/copr_cli/meta/main.yml14
-rw-r--r--roles/copr_cli/tasks/main.yml3
-rw-r--r--roles/copr_cli/vars/main.yml2
-rw-r--r--roles/docker_storage_setup/README.md42
-rw-r--r--roles/docker_storage_setup/defaults/main.yml2
-rwxr-xr-xroles/docker_storage_setup/tasks/main.yml95
-rw-r--r--roles/fluentd_master/meta/main.yml15
-rw-r--r--roles/lib_dyn/README.md27
-rw-r--r--roles/lib_dyn/library/dyn_record.py351
-rw-r--r--roles/lib_dyn/meta/main.yml33
-rw-r--r--roles/lib_dyn/tasks/main.yml7
-rw-r--r--roles/lib_openshift_api/build/ansible/edit.py84
-rw-r--r--roles/lib_openshift_api/build/ansible/obj.py139
-rw-r--r--roles/lib_openshift_api/build/ansible/router.py142
-rw-r--r--roles/lib_openshift_api/build/ansible/secret.py121
-rwxr-xr-xroles/lib_openshift_api/build/generate.py64
-rw-r--r--roles/lib_openshift_api/build/src/base.py300
-rw-r--r--roles/lib_openshift_api/build/src/edit.py49
-rw-r--r--roles/lib_openshift_api/build/src/obj.py78
-rw-r--r--roles/lib_openshift_api/build/src/router.py152
-rw-r--r--roles/lib_openshift_api/build/src/secret.py68
-rw-r--r--roles/lib_openshift_api/build/test/README5
-rwxr-xr-xroles/lib_openshift_api/build/test/deploymentconfig.yml120
-rwxr-xr-xroles/lib_openshift_api/build/test/edit.yml53
-rw-r--r--roles/lib_openshift_api/build/test/files/config.yml1
-rw-r--r--roles/lib_openshift_api/build/test/files/dc-mod.yml124
-rw-r--r--roles/lib_openshift_api/build/test/files/dc.yml120
-rw-r--r--roles/lib_openshift_api/build/test/files/passwords.yml4
-rw-r--r--roles/lib_openshift_api/build/test/files/router-mod.json30
-rw-r--r--roles/lib_openshift_api/build/test/files/router.json29
l---------roles/lib_openshift_api/build/test/roles1
-rwxr-xr-xroles/lib_openshift_api/build/test/router.yml79
-rwxr-xr-xroles/lib_openshift_api/build/test/secrets.yml81
-rwxr-xr-xroles/lib_openshift_api/build/test/services.yml133
-rw-r--r--roles/lib_openshift_api/library/oadm_router.py807
-rw-r--r--roles/lib_openshift_api/library/oc_edit.py646
-rw-r--r--roles/lib_openshift_api/library/oc_obj.py730
-rw-r--r--roles/lib_openshift_api/library/oc_secret.py702
-rw-r--r--roles/lib_timedatectl/library/timedatectl.py74
-rw-r--r--roles/lib_yaml_editor/build/ansible/yedit.py69
-rwxr-xr-xroles/lib_yaml_editor/build/generate.py43
-rw-r--r--roles/lib_yaml_editor/build/src/base.py17
-rw-r--r--roles/lib_yaml_editor/build/src/yedit.py209
-rw-r--r--roles/lib_yaml_editor/build/test/foo.yml1
-rwxr-xr-xroles/lib_yaml_editor/build/test/test.yaml15
-rw-r--r--roles/lib_yaml_editor/library/yedit.py300
-rw-r--r--roles/lib_zabbix/README.md38
-rw-r--r--roles/lib_zabbix/library/__init__.py3
-rw-r--r--roles/lib_zabbix/library/zbx_action.py690
-rw-r--r--roles/lib_zabbix/library/zbx_application.py142
-rw-r--r--roles/lib_zabbix/library/zbx_discoveryrule.py205
-rw-r--r--roles/lib_zabbix/library/zbx_graph.py331
-rw-r--r--roles/lib_zabbix/library/zbx_graphprototype.py331
-rw-r--r--roles/lib_zabbix/library/zbx_host.py182
-rw-r--r--roles/lib_zabbix/library/zbx_hostgroup.py116
-rw-r--r--roles/lib_zabbix/library/zbx_httptest.py290
-rw-r--r--roles/lib_zabbix/library/zbx_item.py303
-rw-r--r--roles/lib_zabbix/library/zbx_itemprototype.py327
-rw-r--r--roles/lib_zabbix/library/zbx_itservice.py263
-rw-r--r--roles/lib_zabbix/library/zbx_mediatype.py168
-rw-r--r--roles/lib_zabbix/library/zbx_template.py132
-rw-r--r--roles/lib_zabbix/library/zbx_trigger.py234
-rw-r--r--roles/lib_zabbix/library/zbx_triggerprototype.py177
-rw-r--r--roles/lib_zabbix/library/zbx_user.py192
-rw-r--r--roles/lib_zabbix/library/zbx_user_media.py283
-rw-r--r--roles/lib_zabbix/library/zbx_usergroup.py228
-rw-r--r--roles/lib_zabbix/tasks/create_template.yml146
-rw-r--r--roles/lib_zabbix/tasks/create_user.yml11
-rw-r--r--roles/os_ipv6_disable/tasks/main.yaml11
-rw-r--r--roles/os_reboot_server/tasks/main.yaml16
-rw-r--r--roles/os_utils/tasks/main.yaml17
-rw-r--r--roles/os_zabbix/README.md40
-rw-r--r--roles/os_zabbix/defaults/main.yml1
-rw-r--r--roles/os_zabbix/handlers/main.yml1
-rw-r--r--roles/os_zabbix/meta/main.yml9
-rw-r--r--roles/os_zabbix/tasks/main.yml166
-rw-r--r--roles/os_zabbix/vars/main.yml1
-rw-r--r--roles/os_zabbix/vars/template_app_zabbix_agent.yml23
-rw-r--r--roles/os_zabbix/vars/template_app_zabbix_server.yml412
-rw-r--r--roles/os_zabbix/vars/template_aws.yml25
-rw-r--r--roles/os_zabbix/vars/template_config_loop.yml14
-rw-r--r--roles/os_zabbix/vars/template_docker.yml116
-rw-r--r--roles/os_zabbix/vars/template_heartbeat.yml18
-rw-r--r--roles/os_zabbix/vars/template_openshift_master.yml458
-rw-r--r--roles/os_zabbix/vars/template_openshift_node.yml70
-rw-r--r--roles/os_zabbix/vars/template_ops_tools.yml54
-rw-r--r--roles/os_zabbix/vars/template_os_linux.yml314
-rw-r--r--roles/os_zabbix/vars/template_performance_copilot.yml14
-rw-r--r--roles/os_zabbix/vars/template_zagg_server.yml46
-rw-r--r--roles/oso_host_monitoring/README.md50
-rw-r--r--roles/oso_host_monitoring/defaults/main.yml1
-rw-r--r--roles/oso_host_monitoring/handlers/main.yml6
-rw-r--r--roles/oso_host_monitoring/meta/main.yml8
-rw-r--r--roles/oso_host_monitoring/tasks/main.yml47
-rw-r--r--roles/oso_host_monitoring/templates/docker-registry.ops.cfg.j21
-rw-r--r--roles/oso_host_monitoring/templates/oso-rhel7-host-monitoring.service.j278
-rw-r--r--roles/oso_host_monitoring/vars/main.yml1
-rw-r--r--roles/oso_monitoring_tools/README.md54
-rw-r--r--roles/oso_monitoring_tools/defaults/main.yml2
-rw-r--r--roles/oso_monitoring_tools/handlers/main.yml2
-rw-r--r--roles/oso_monitoring_tools/meta/main.yml8
-rw-r--r--roles/oso_monitoring_tools/tasks/main.yml18
-rw-r--r--roles/oso_monitoring_tools/vars/main.yml12
-rw-r--r--roles/tito/README.md38
-rw-r--r--roles/tito/defaults/main.yml2
-rw-r--r--roles/tito/handlers/main.yml2
-rw-r--r--roles/tito/meta/main.yml14
-rw-r--r--roles/tito/tasks/main.yml2
-rw-r--r--roles/tito/vars/main.yml2
-rw-r--r--roles/yum_repos/README.md113
-rw-r--r--roles/yum_repos/defaults/main.yml3
-rw-r--r--roles/yum_repos/meta/main.yml8
-rw-r--r--roles/yum_repos/tasks/main.yml48
-rw-r--r--roles/yum_repos/templates/yumrepo.j218
134 files changed, 0 insertions, 13659 deletions
diff --git a/roles/ansible/tasks/config.yml b/roles/ansible/tasks/config.yml
deleted file mode 100644
index 5e361429b..000000000
--- a/roles/ansible/tasks/config.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- name: modify ansible.cfg
- lineinfile:
- dest: /etc/ansible/ansible.cfg
- backrefs: yes
- regexp: "^#?({{ item.option }})( *)="
- line: '\1\2= {{ item.value }}'
- with_items: cfg_options
diff --git a/roles/ansible/tasks/main.yml b/roles/ansible/tasks/main.yml
deleted file mode 100644
index ea14fb39a..000000000
--- a/roles/ansible/tasks/main.yml
+++ /dev/null
@@ -1,10 +0,0 @@
----
-# Install ansible client
-
-- name: Install Ansible
- action: "{{ ansible_pkg_mgr }} name=ansible state=present"
- when: not openshift.common.is_containerized | bool
-
-- include: config.yml
- vars:
- cfg_options: "{{ ans_config }}"
diff --git a/roles/ansible_tower/tasks/main.yaml b/roles/ansible_tower/tasks/main.yaml
deleted file mode 100644
index 36fc9b282..000000000
--- a/roles/ansible_tower/tasks/main.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-- name: install some useful packages
- action: "{{ ansible_pkg_mgr }} name={{ item }} state=present"
- with_items:
- - git
- - python-pip
- - unzip
- - python-psphere
- - ansible
- - telnet
- - ack
- - pylint
-
-- name: download Tower setup
- get_url: url=http://releases.ansible.com/ansible-tower/setup/ansible-tower-setup-2.1.1.tar.gz dest=/opt/ force=no
-
-- name: extract Tower
- unarchive: src=/opt/ansible-tower-setup-2.1.1.tar.gz dest=/opt copy=no creates=ansible-tower-setup-2.1.1
-
-- name: Open firewalld port for http
- firewalld: port=80/tcp permanent=true state=enabled
-
-- name: Open firewalld port for https
- firewalld: port=443/tcp permanent=true state=enabled
-
-- name: Open firewalld port for https
- firewalld: port=8080/tcp permanent=true state=enabled
-
-- name: Set (httpd_can_network_connect) flag on and keep it persistent across reboots
- seboolean: name=httpd_can_network_connect state=yes persistent=yes
-
-- name: Set (httpd_can_network_connect_db) flag on and keep it persistent across reboots
- seboolean: name=httpd_can_network_connect_db state=yes persistent=yes
-
-- name: Setup proot to allow access to /etc/tower/
- lineinfile:
- dest: /etc/tower/settings.py
- backrefs: yes
- regexp: "^({{ item.option }})( *)="
- line: '\1\2= {{ item.value }}'
- with_items: config_changes | default([], true)
diff --git a/roles/ansible_tower_cli/README.md b/roles/ansible_tower_cli/README.md
deleted file mode 100644
index d2d68146f..000000000
--- a/roles/ansible_tower_cli/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-ansible_tower_cli
-==============
-
-Install ansible-tower-cli rpm.
-
-Requirements
-------------
-
-None
-
-Role Variables
---------------
-
-None
-
-Dependencies
-------------
-
-None
-
-Example Playbook
-----------------
-
-Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
-
- - hosts: servers
- roles:
- - roles/ansible_tower_cli
-
-License
--------
-
-Copyright 2012-2014 Red Hat, Inc., All rights reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author Information
-------------------
-
-openshift operations
diff --git a/roles/ansible_tower_cli/defaults/main.yml b/roles/ansible_tower_cli/defaults/main.yml
deleted file mode 100644
index bef66bbfd..000000000
--- a/roles/ansible_tower_cli/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# defaults file for ansible_tower_cli
diff --git a/roles/ansible_tower_cli/handlers/main.yml b/roles/ansible_tower_cli/handlers/main.yml
deleted file mode 100644
index 0ce873648..000000000
--- a/roles/ansible_tower_cli/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# handlers file for ansible_tower_cli
diff --git a/roles/ansible_tower_cli/meta/main.yml b/roles/ansible_tower_cli/meta/main.yml
deleted file mode 100644
index 41dd23720..000000000
--- a/roles/ansible_tower_cli/meta/main.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-galaxy_info:
- author: openshift operations name
- description: install ansible-tower-cli
- company: Red Hat, inc
- license: ASL 2.0
- min_ansible_version: 1.2
-dependencies: []
-
diff --git a/roles/ansible_tower_cli/tasks/main.yml b/roles/ansible_tower_cli/tasks/main.yml
deleted file mode 100644
index 0c5163b50..000000000
--- a/roles/ansible_tower_cli/tasks/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-- name: Install python-ansible-tower-cli
- action: "{{ ansible_pkg_mgr }} name=python-ansible-tower-cli state=present"
-
-- template:
- src: tower_cli.cfg.j2
- dest: /etc/tower/tower_cli.cfg
- owner: awx
- group: awx
- mode: 0640
-
-- file:
- state: link
- src: /etc/tower
- dest: /etc/awx
- owner: awx
- group: awx
-
diff --git a/roles/ansible_tower_cli/templates/tower_cli.cfg.j2 b/roles/ansible_tower_cli/templates/tower_cli.cfg.j2
deleted file mode 100644
index 5a0a275b0..000000000
--- a/roles/ansible_tower_cli/templates/tower_cli.cfg.j2
+++ /dev/null
@@ -1,5 +0,0 @@
-[general]
-host = {{ oo_towercli_host }}
-username = {{ oo_towercli_username }}
-password = {{ oo_towercli_password }}
-verify_ssl = true
diff --git a/roles/ansible_tower_cli/vars/main.yml b/roles/ansible_tower_cli/vars/main.yml
deleted file mode 100644
index a4c9abfb4..000000000
--- a/roles/ansible_tower_cli/vars/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# vars file for ansible_tower_cli
diff --git a/roles/chrony/README.md b/roles/chrony/README.md
deleted file mode 100644
index bf15d9669..000000000
--- a/roles/chrony/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-Role Name
-=========
-
-A role to configure chrony as the ntp client
-
-Requirements
-------------
-
-
-Role Variables
---------------
-
-chrony_ntp_servers: a list of ntp servers to use the chrony.conf file
-
-Dependencies
-------------
-
-roles/lib_timedatectl
-
-Example Playbook
-----------------
-
-License
--------
-
-Apache 2.0
-
-Author Information
-------------------
-
-Openshift Operations
diff --git a/roles/chrony/defaults/main.yml b/roles/chrony/defaults/main.yml
deleted file mode 100644
index 95576e666..000000000
--- a/roles/chrony/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# defaults file for chrony
diff --git a/roles/chrony/handlers/main.yml b/roles/chrony/handlers/main.yml
deleted file mode 100644
index 1973c79e2..000000000
--- a/roles/chrony/handlers/main.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-- name: Restart chronyd
- service:
- name: chronyd
- state: restarted
diff --git a/roles/chrony/meta/main.yml b/roles/chrony/meta/main.yml
deleted file mode 100644
index 85595d7c3..000000000
--- a/roles/chrony/meta/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-galaxy_info:
- author: Openshift Operations
- description: Configure chrony as an ntp server
- company: Red Hat
- license: Apache 2.0
- min_ansible_version: 1.9.2
- platforms:
- - name: EL
- versions:
- - 7
- - name: Fedora
- versions:
- - all
- categories:
- - system
-dependencies:
-- roles/lib_timedatectl
diff --git a/roles/chrony/tasks/main.yml b/roles/chrony/tasks/main.yml
deleted file mode 100644
index fae6d8e4c..000000000
--- a/roles/chrony/tasks/main.yml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-- name: remove ntp package
- yum:
- name: ntp
- state: absent
-
-- name: ensure chrony package is installed
- yum:
- name: chrony
- state: installed
-
-- name: Install /etc/chrony.conf
- template:
- src: chrony.conf.j2
- dest: /etc/chrony.conf
- owner: root
- group: root
- mode: 0644
- notify:
- - Restart chronyd
-
-- name: enabled timedatectl set-ntp yes
- timedatectl:
- ntp: True
-
-- name:
- service:
- name: chronyd
- state: started
- enabled: yes
diff --git a/roles/chrony/templates/chrony.conf.j2 b/roles/chrony/templates/chrony.conf.j2
deleted file mode 100644
index de43b6364..000000000
--- a/roles/chrony/templates/chrony.conf.j2
+++ /dev/null
@@ -1,45 +0,0 @@
-# Use public servers from the pool.ntp.org project.
-# Please consider joining the pool (http://www.pool.ntp.org/join.html).
-{% for server in chrony_ntp_servers %}
-server {{ server }} iburst
-{% endfor %}
-
-# Ignore stratum in source selection.
-stratumweight 0
-
-# Record the rate at which the system clock gains/losses time.
-driftfile /var/lib/chrony/drift
-
-# Enable kernel RTC synchronization.
-rtcsync
-
-# In first three updates step the system clock instead of slew
-# if the adjustment is larger than 10 seconds.
-makestep 10 3
-
-# Allow NTP client access from local network.
-#allow 192.168/16
-
-# Listen for commands only on localhost.
-bindcmdaddress 127.0.0.1
-bindcmdaddress ::1
-
-# Serve time even if not synchronized to any NTP server.
-#local stratum 10
-
-keyfile /etc/chrony.keys
-
-# Specify the key used as password for chronyc.
-commandkey 1
-
-# Generate command key if missing.
-generatecommandkey
-
-# Disable logging of client accesses.
-noclientlog
-
-# Send a message to syslog if a clock adjustment is larger than 0.5 seconds.
-logchange 0.5
-
-logdir /var/log/chrony
-#log measurements statistics tracking
diff --git a/roles/chrony/vars/main.yml b/roles/chrony/vars/main.yml
deleted file mode 100644
index 061a21547..000000000
--- a/roles/chrony/vars/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# vars file for chrony
diff --git a/roles/copr_cli/README.md b/roles/copr_cli/README.md
deleted file mode 100644
index edc68454e..000000000
--- a/roles/copr_cli/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-Role Name
-=========
-
-This role manages Copr CLI.
-
-https://apps.fedoraproject.org/packages/copr-cli/
-
-Requirements
-------------
-
-None
-
-Role Variables
---------------
-
-None
-
-Dependencies
-------------
-
-None
-
-Example Playbook
-----------------
-
- - hosts: servers
- roles:
- - role: copr_cli
-
-License
--------
-
-Apache License, Version 2.0
-
-Author Information
-------------------
-
-Thomas Wiest
diff --git a/roles/copr_cli/defaults/main.yml b/roles/copr_cli/defaults/main.yml
deleted file mode 100644
index 3b8adf910..000000000
--- a/roles/copr_cli/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# defaults file for copr_cli
diff --git a/roles/copr_cli/handlers/main.yml b/roles/copr_cli/handlers/main.yml
deleted file mode 100644
index c3dec5a4c..000000000
--- a/roles/copr_cli/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# handlers file for copr_cli
diff --git a/roles/copr_cli/meta/main.yml b/roles/copr_cli/meta/main.yml
deleted file mode 100644
index f050281fd..000000000
--- a/roles/copr_cli/meta/main.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-galaxy_info:
- author: Thomas Wiest
- description: Manages Copr CLI
- company: Red Hat
- license: Apache License, Version 2.0
- min_ansible_version: 1.2
- platforms:
- - name: EL
- versions:
- - 7
- categories:
- - packaging
-dependencies: []
diff --git a/roles/copr_cli/tasks/main.yml b/roles/copr_cli/tasks/main.yml
deleted file mode 100644
index b732fb7a4..000000000
--- a/roles/copr_cli/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-- action: "{{ ansible_pkg_mgr }} name=copr-cli state=present"
- when: not openshift.common.is_containerized | bool
diff --git a/roles/copr_cli/vars/main.yml b/roles/copr_cli/vars/main.yml
deleted file mode 100644
index 1522c94d9..000000000
--- a/roles/copr_cli/vars/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# vars file for copr_cli
diff --git a/roles/docker_storage_setup/README.md b/roles/docker_storage_setup/README.md
deleted file mode 100644
index 6039b349a..000000000
--- a/roles/docker_storage_setup/README.md
+++ /dev/null
@@ -1,42 +0,0 @@
-docker_storage_setup
-=========
-This role coverts docker to go from loopback to direct-lvm (the Red Hat recommended way to run docker).
-
-It requires the block device to be already provisioned and attached to the host.
-
- Notes:
- * This is NOT idempotent. Conversion needs to be done for it to be idempotent
- * This will remove /var/lib/docker!
- * You will need to re-deploy docker images
-
-Configure docker_storage_setup
-------------
-
-None
-
-Role Variables
---------------
-
-dss_docker_device: defaults to /dev/xvdb
-
-Dependencies
-------------
-
-None
-
-Example Playbook
-----------------
-
- - hosts: servers
- roles:
- - { role/docker_storage_setup, dss_docker_device: '/dev/xvdb' }
-
-License
--------
-
-ASL 2.0
-
-Author Information
-------------------
-
-OpenShift operations, Red Hat, Inc
diff --git a/roles/docker_storage_setup/defaults/main.yml b/roles/docker_storage_setup/defaults/main.yml
deleted file mode 100644
index 5013aba97..000000000
--- a/roles/docker_storage_setup/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-dss_docker_device: /dev/xvdb
diff --git a/roles/docker_storage_setup/tasks/main.yml b/roles/docker_storage_setup/tasks/main.yml
deleted file mode 100755
index 21c80e36e..000000000
--- a/roles/docker_storage_setup/tasks/main.yml
+++ /dev/null
@@ -1,95 +0,0 @@
----
-- name: Ensure docker is installed
- action: "{{ ansible_pkg_mgr }} name={{ item }} state=present"
- with_items:
- - docker-1.8.2
-
-# Docker doesn't seem to start cleanly the first time run
-# when loopback (not directlvm) is configured. Putting in an
-# ignore errors, and then sleeping till it can come up cleanly
-- name: Try to start docker. This might fail (loopback startup issue)
- service:
- name: docker
- state: started
- ignore_errors: yes
-
-- name: Pause for 1 minute
- pause:
- seconds: 30
-
-- name: Ensure docker is started
- service:
- name: docker
- state: started
-
-- name: Determine if loopback
- shell: docker info | grep 'Data file:.*loop'
- register: loop_device_check
- ignore_errors: yes
-
-- debug:
- var: loop_device_check
-
-- name: fail if we don't detect loopback
- fail:
- msg: loopback not detected! Please investigate manually.
- when: loop_device_check.rc == 1
-
-- name: "check to see if {{ dss_docker_device }} exists"
- command: "test -e {{ dss_docker_device }}"
- register: docker_dev_check
- ignore_errors: yes
-
-- debug: var=docker_dev_check
-
-- name: "fail if {{ dss_docker_device }} doesn't exist"
- fail:
- msg: "{{ dss_docker_device }} doesn't exist. Please investigate"
- when: docker_dev_check.rc != 0
-
-- name: stop docker
- service:
- name: docker
- state: stopped
-
-- name: delete /var/lib/docker
- command: rm -rf /var/lib/docker
-
-- name: remove /var/lib/docker
- command: rm -rf /var/lib/docker
-
-- name: copy the docker-storage-setup config file
- copy:
- content: >
- DEVS={{ dss_docker_device }}\n
- VG=docker_vg
- dest: /etc/sysconfig/docker-storage-setup
- owner: root
- group: root
- mode: 0664
-
-- name: docker storage setup
- command: docker-storage-setup
- register: docker_storage_setup_output
-
-- debug:
- msg: "{{ docker_storage_setup_output }}"
-
-- name: extend the vg
- command: lvextend -l 90%VG /dev/docker_vg/docker-pool
- register: lvextend_output
-
-- debug:
- msg: "{{ lvextend_output }}"
-
-- name: start docker
- service:
- name: docker
- state: restarted
-
-- name: docker info
- command: docker info
- register: dockerinfo
-
-- debug:
- msg: "{{ dockerinfo }}"
diff --git a/roles/fluentd_master/meta/main.yml b/roles/fluentd_master/meta/main.yml
deleted file mode 100644
index 148bc377e..000000000
--- a/roles/fluentd_master/meta/main.yml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-galaxy_info:
- author: OpenShift Red Hat
- description: Fluentd Master
- company: Red Hat, Inc.
- license: Apache License, Version 2.0
- min_ansible_version: 1.9
- platforms:
- - name: EL
- versions:
- - 7
- categories:
- - monitoring
- dependencies:
- - openshift_facts
diff --git a/roles/lib_dyn/README.md b/roles/lib_dyn/README.md
deleted file mode 100644
index 1eec9f81c..000000000
--- a/roles/lib_dyn/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-lib_dyn
-=========
-
-A role containing the dyn_record module for managing DNS records through Dyn's
-API
-
-Requirements
-------------
-
-The module requires the `dyn` python module for interacting with the Dyn API.
-https://github.com/dyninc/dyn-python
-
-Example Playbook
-----------------
-
-To make sure the `dyn_record` module is available for use include the role
-before it is used.
-
- - hosts: servers
- roles:
- - lib_dyn
-
-License
--------
-
-Apache
-
diff --git a/roles/lib_dyn/library/dyn_record.py b/roles/lib_dyn/library/dyn_record.py
deleted file mode 100644
index 42d970060..000000000
--- a/roles/lib_dyn/library/dyn_record.py
+++ /dev/null
@@ -1,351 +0,0 @@
-#!/usr/bin/python
-#
-# (c) 2015, Russell Harrison <rharriso@redhat.com>
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=too-many-branches
-'''Ansible module to manage records in the Dyn Managed DNS service'''
-DOCUMENTATION = '''
----
-module: dyn_record
-version_added: "1.9"
-short_description: Manage records in the Dyn Managed DNS service.
-description:
- - "Manages DNS records via the REST API of the Dyn Managed DNS service. It
- - "handles records only; there is no manipulation of zones or account support"
- - "yet. See: U(https://help.dyn.com/dns-api-knowledge-base/)"
-options:
- state:
- description:
- -"Whether the record should be c(present) or c(absent). Optionally the"
- - "state c(list) can be used to return the current value of a record."
- required: true
- choices: [ 'present', 'absent', 'list' ]
- default: present
-
- customer_name:
- description:
- - "The Dyn customer name for your account. If not set the value of the"
- - "c(DYNECT_CUSTOMER_NAME) environment variable is used."
- required: false
- default: nil
-
- user_name:
- description:
- - "The Dyn user name to log in with. If not set the value of the"
- - "c(DYNECT_USER_NAME) environment variable is used."
- required: false
- default: null
-
- user_password:
- description:
- - "The Dyn user's password to log in with. If not set the value of the"
- - "c(DYNECT_PASSWORD) environment variable is used."
- required: false
- default: null
-
- zone:
- description:
- - "The DNS zone in which your record is located."
- required: true
- default: null
-
- record_fqdn:
- description:
- - "Fully qualified domain name of the record name to get, create, delete,"
- - "or update."
- required: true
- default: null
-
- record_type:
- description:
- - "Record type."
- required: true
- choices: [ 'A', 'AAAA', 'CNAME', 'PTR', 'TXT' ]
- default: null
-
- record_value:
- description:
- - "Record value. If record_value is not specified; no changes will be"
- - "made and the module will fail"
- required: false
- default: null
-
- record_ttl:
- description:
- - 'Record's "Time to live". Number of seconds the record remains cached'
- - 'in DNS servers or c(0) to use the default TTL for the zone.'
- - 'This option is mutually exclusive with use_zone_ttl'
- required: false
- default: 0
-
- use_zone_ttl:
- description:
- - 'Use the DYN Zone's Default TTL'
- - 'This option is mutually exclusive with record_ttl'
- required: false
- default: false
- mutually exclusive with: record_ttl
-
-notes:
- - The module makes a broad assumption that there will be only one record per "node" (FQDN).
- - This module returns record(s) in the "result" element when 'state' is set to 'present'. This value can be be registered and used in your playbooks.
-
-requirements: [ dyn ]
-author: "Russell Harrison"
-'''
-
-EXAMPLES = '''
-# Attempting to cname www.example.com to web1.example.com
-- name: Update CNAME record
- dyn_record:
- state: present
- record_fqdn: www.example.com
- zone: example.com
- record_type: CNAME
- record_value: web1.example.com
- record_ttl: 7200
-
-# Use the zones default TTL
-- name: Update CNAME record
- dyn_record:
- state: present
- record_fqdn: www.example.com
- zone: example.com
- record_type: CNAME
- record_value: web1.example.com
- use_zone_ttl: true
-
-- name: Update A record
- dyn_record:
- state: present
- record_fqdn: web1.example.com
- zone: example.com
- record_value: 10.0.0.10
- record_type: A
-'''
-
-try:
- IMPORT_ERROR = False
- from dyn.tm.session import DynectSession
- from dyn.tm.zones import Zone
- import dyn.tm.errors
- import os
-
-except ImportError as error:
- IMPORT_ERROR = str(error)
-
-# Each of the record types use a different method for the value.
-RECORD_PARAMS = {
- 'A' : {'value_param': 'address'},
- 'AAAA' : {'value_param': 'address'},
- 'CNAME' : {'value_param': 'cname'},
- 'PTR' : {'value_param': 'ptrdname'},
- 'TXT' : {'value_param': 'txtdata'}
-}
-
-# You'll notice that the value_param doesn't match the key (records_key)
-# in the dict returned from Dyn when doing a dyn_node.get_all_records()
-# This is a frustrating lookup dict to allow mapping to the RECORD_PARAMS
-# dict so we can lookup other values in it efficiently
-
-def get_record_type(record_key):
- '''Get the record type represented by the keys returned from get_any_records.'''
- return record_key.replace('_records', '').upper()
-
-def get_record_key(record_type):
- '''Get the key to look up records in the dictionary returned from get_any_records.
- example:
- 'cname_records'
- '''
- return record_type.lower() + '_records'
-
-def get_any_records(module, node):
- '''Get any records for a given node'''
- # Lets get a list of the A records for the node
- try:
- records = node.get_any_records()
- except dyn.tm.errors.DynectGetError as error:
- if 'Not in zone' in str(error):
- # The node isn't in the zone so we'll return an empty dictionary
- return {}
- else:
- # An unknown error happened so we'll need to return it.
- module.fail_json(msg='Unable to get records',
- error=str(error))
-
- # Return a dictionary of the record objects
- return records
-
-def get_record_values(records):
- '''Get the record values for each record returned by get_any_records.'''
- # This simply returns the values from a record
- ret_dict = {}
- for key in records.keys():
- record_type = get_record_type(key)
- params = [RECORD_PARAMS[record_type]['value_param'], 'ttl', 'zone', 'fqdn']
- ret_dict[key] = []
- properties = {}
- for elem in records[key]:
- for param in params:
- properties[param] = getattr(elem, param)
- ret_dict[key].append(properties)
-
- return ret_dict
-
-def compare_record_values(record_type_key, user_record_value, dyn_values):
- ''' Verify the user record_value exists in dyn'''
- rtype = get_record_type(record_type_key)
- for record in dyn_values[record_type_key]:
- if user_record_value in record[RECORD_PARAMS[rtype]['value_param']]:
- return True
-
- return False
-
-def compare_record_ttl(record_type_key, user_record_value, dyn_values, user_param_ttl):
- ''' Verify the ttls match for the record'''
- rtype = get_record_type(record_type_key)
- for record in dyn_values[record_type_key]:
- # find the right record
- if user_record_value in record[RECORD_PARAMS[rtype]['value_param']]:
- # Compare ttls from the records
- if int(record['ttl']) == user_param_ttl:
- return True
-
- return False
-
-def main():
- '''Ansible module for managing Dyn DNS records.'''
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(default='present', choices=['present', 'absent', 'list']),
- customer_name=dict(default=os.environ.get('DYNECT_CUSTOMER_NAME', None), type='str'),
- user_name=dict(default=os.environ.get('DYNECT_USER_NAME', None), type='str', no_log=True),
- user_password=dict(default=os.environ.get('DYNECT_PASSWORD', None), type='str', no_log=True),
- zone=dict(required=True, type='str'),
- record_fqdn=dict(required=False, type='str'),
- record_type=dict(required=False, type='str', choices=[
- 'A', 'AAAA', 'CNAME', 'PTR', 'TXT']),
- record_value=dict(required=False, type='str'),
- record_ttl=dict(required=False, default=None, type='int'),
- use_zone_ttl=dict(required=False, default=False),
- ),
- required_together=(
- ['record_fqdn', 'record_value', 'record_ttl', 'record_type']
- ),
- mutually_exclusive=[('record_ttl', 'use_zone_ttl')]
- )
-
- if IMPORT_ERROR:
- module.fail_json(msg="Unable to import dyn module: https://pypi.python.org/pypi/dyn", error=IMPORT_ERROR)
-
- if module.params['record_ttl'] != None and int(module.params['record_ttl']) <= 0:
- module.fail_json(msg="Invalid Value for record TTL")
-
- # Start the Dyn session
- try:
- _ = DynectSession(module.params['customer_name'],
- module.params['user_name'],
- module.params['user_password'])
- except dyn.tm.errors.DynectAuthError as error:
- module.fail_json(msg='Unable to authenticate with Dyn', error=str(error))
-
- # Retrieve zone object
- try:
- dyn_zone = Zone(module.params['zone'])
- except dyn.tm.errors.DynectGetError as error:
- if 'No such zone' in str(error):
- module.fail_json(msg="Not a valid zone for this account", zone=module.params['zone'])
- else:
- module.fail_json(msg="Unable to retrieve zone", error=str(error))
-
- # To retrieve the node object we need to remove the zone name from the FQDN
- dyn_node_name = module.params['record_fqdn'].replace('.' + module.params['zone'], '')
-
- # Retrieve the zone object from dyn
- dyn_zone = Zone(module.params['zone'])
-
- # Retrieve the node object from dyn
- dyn_node = dyn_zone.get_node(node=dyn_node_name)
-
- # All states will need a list of the exiting records for the zone.
- dyn_node_records = get_any_records(module, dyn_node)
-
- dyn_values = get_record_values(dyn_node_records)
-
- if module.params['state'] == 'list':
- module.exit_json(changed=False, dyn_records=dyn_values)
-
- elif module.params['state'] == 'absent':
- # If there are any records present we'll want to delete the node.
- if dyn_node_records:
- dyn_node.delete()
-
- # Publish the zone since we've modified it.
- dyn_zone.publish()
-
- module.exit_json(changed=True, msg="Removed node %s from zone %s" % (dyn_node_name, module.params['zone']))
-
- module.exit_json(changed=False)
-
- elif module.params['state'] == 'present':
-
- # configure the TTL variable:
- # if use_zone_ttl, use the default TTL of the account.
- # if TTL == None, don't check it, set it as 0 (api default)
- # if TTL > 0, ensure this TTL is set
- if module.params['use_zone_ttl']:
- user_param_ttl = dyn_zone.ttl
- elif not module.params['record_ttl']:
- user_param_ttl = 0
- else:
- user_param_ttl = module.params['record_ttl']
-
- # First get a list of existing records for the node
- record_type_key = get_record_key(module.params['record_type'])
- user_record_value = module.params['record_value']
-
- # Check to see if the record is already in place before doing anything.
- if dyn_node_records and compare_record_values(record_type_key, user_record_value, dyn_values):
-
- if user_param_ttl == 0 or \
- compare_record_ttl(record_type_key, user_record_value, dyn_values, user_param_ttl):
- module.exit_json(changed=False, dyn_record=dyn_values)
-
- # Working on the assumption that there is only one record per
- # node we will first delete the node if there are any records before
- # creating the correct record
- if dyn_node_records:
- dyn_node.delete()
-
- # Now lets create the correct node entry.
- record = dyn_zone.add_record(dyn_node_name,
- module.params['record_type'],
- module.params['record_value'],
- user_param_ttl
- )
-
- # Now publish the zone since we've updated it.
- dyn_zone.publish()
-
- rmsg = "Created node [%s] " % dyn_node_name
- rmsg += "in zone: [%s]" % module.params['zone']
- module.exit_json(changed=True, msg=rmsg, dyn_record=get_record_values({record_type_key: [record]}))
-
- module.fail_json(msg="Unknown state: [%s]" % module.params['state'])
-
-# Ansible tends to need a wild card import so we'll use it here
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-from ansible.module_utils.basic import *
-if __name__ == '__main__':
- main()
diff --git a/roles/lib_dyn/meta/main.yml b/roles/lib_dyn/meta/main.yml
deleted file mode 100644
index 5475c6971..000000000
--- a/roles/lib_dyn/meta/main.yml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-galaxy_info:
- author: Russell Harrison
- description: A role to provide the dyn_record module
- company: Red Hat, Inc.
- # If the issue tracker for your role is not on github, uncomment the
- # next line and provide a value
- # issue_tracker_url: http://example.com/issue/tracker
- license: Apache
- min_ansible_version: 1.9
- platforms:
- - name: EL
- versions:
- - 7
- #- name: Fedora
- # versions:
- # - 19
- # - 20
- # - 21
- # - 22
- # Below are all categories currently available. Just as with
- # the platforms above, uncomment those that apply to your role.
- categories:
- - networking
-dependencies: []
- # List your role dependencies here, one per line.
- # Be sure to remove the '[]' above if you add dependencies
- # to this list.
- #
- # No role dependencies at this time. The module contained in this role does
- # require the dyn python module.
- # https://pypi.python.org/pypi/dyn
-
diff --git a/roles/lib_dyn/tasks/main.yml b/roles/lib_dyn/tasks/main.yml
deleted file mode 100644
index 965962928..000000000
--- a/roles/lib_dyn/tasks/main.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-# tasks file for lib_dyn
-
-- name: Make sure python-dyn is installed
- yum: name=python-dyn state=present
- tags:
- - lib_dyn
diff --git a/roles/lib_openshift_api/build/ansible/edit.py b/roles/lib_openshift_api/build/ansible/edit.py
deleted file mode 100644
index 943fa47a6..000000000
--- a/roles/lib_openshift_api/build/ansible/edit.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# pylint: skip-file
-
-def main():
- '''
- ansible oc module for services
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, required=True, type='str'),
- kind=dict(required=True,
- type='str',
- choices=['dc', 'deploymentconfig',
- 'svc', 'service',
- 'scc', 'securitycontextconstraints',
- 'ns', 'namespace', 'project', 'projects',
- 'is', 'imagestream',
- 'istag', 'imagestreamtag',
- 'bc', 'buildconfig',
- 'routes',
- 'node',
- 'secret',
- ]),
- file_name=dict(default=None, type='str'),
- file_format=dict(default='yaml', type='str'),
- content=dict(default=None, required=True, type='dict'),
- force=dict(default=False, type='bool'),
- ),
- supports_check_mode=True,
- )
- ocedit = Edit(module.params['kind'],
- module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = ocedit.get()
-
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.fail_json(msg=api_rval)
-
- ########
- # Update
- ########
- api_rval = ocedit.update(module.params['file_name'],
- module.params['content'],
- module.params['force'],
- module.params['file_format'])
-
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- if api_rval.has_key('updated') and not api_rval['updated']:
- module.exit_json(changed=False, results=api_rval, state="present")
-
- # return the created object
- api_rval = ocedit.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_openshift_api/build/ansible/obj.py b/roles/lib_openshift_api/build/ansible/obj.py
deleted file mode 100644
index a14ac0e43..000000000
--- a/roles/lib_openshift_api/build/ansible/obj.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# pylint: skip-file
-
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible oc module for services
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, type='str'),
- files=dict(default=None, type='list'),
- kind=dict(required=True,
- type='str',
- choices=['dc', 'deploymentconfig',
- 'svc', 'service',
- 'scc', 'securitycontextconstraints',
- 'ns', 'namespace', 'project', 'projects',
- 'is', 'imagestream',
- 'istag', 'imagestreamtag',
- 'bc', 'buildconfig',
- 'routes',
- 'node',
- 'secret',
- ]),
- delete_after=dict(default=False, type='bool'),
- content=dict(default=None, type='dict'),
- force=dict(default=False, type='bool'),
- ),
- mutually_exclusive=[["content", "files"]],
-
- supports_check_mode=True,
- )
- ocobj = OCObject(module.params['kind'],
- module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = ocobj.get()
-
- #####
- # Get
- #####
- if state == 'list':
- module.exit_json(changed=False, results=api_rval['results'], state="list")
-
- if not module.params['name']:
- module.fail_json(msg='Please specify a name when state is absent|present.')
- ########
- # Delete
- ########
- if state == 'absent':
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = ocobj.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
- if state == 'present':
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- # Create it here
- api_rval = ocobj.create(module.params['files'], module.params['content'])
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # return the created object
- api_rval = ocobj.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # Remove files
- if module.params['files'] and module.params['delete_after']:
- Utils.cleanup(module.params['files'])
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- # if a file path is passed, use it.
- update = ocobj.needs_update(module.params['files'], module.params['content'])
- if not isinstance(update, bool):
- module.fail_json(msg=update)
-
- # No changes
- if not update:
- if module.params['files'] and module.params['delete_after']:
- Utils.cleanup(module.params['files'])
-
- module.exit_json(changed=False, results=api_rval['results'][0], state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = ocobj.update(module.params['files'],
- module.params['content'],
- module.params['force'])
-
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # return the created object
- api_rval = ocobj.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_openshift_api/build/ansible/router.py b/roles/lib_openshift_api/build/ansible/router.py
deleted file mode 100644
index 3b24c7b5e..000000000
--- a/roles/lib_openshift_api/build/ansible/router.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# pylint: skip-file
-
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(default='present', type='str',
- choices=['present', 'absent']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default='router', type='str'),
-
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- credentials=dict(default='/etc/origin/master/openshift-router.kubeconfig', type='str'),
- cert_file=dict(default=None, type='str'),
- key_file=dict(default=None, type='str'),
- image=dict(default=None, type='str'), #'openshift3/ose-${component}:${version}'
- latest_image=dict(default=False, type='bool'),
- labels=dict(default=None, type='list'),
- ports=dict(default=['80:80', '443:443'], type='list'),
- replicas=dict(default=1, type='int'),
- selector=dict(default=None, type='str'),
- service_account=dict(default='router', type='str'),
- router_type=dict(default='haproxy-router', type='str'),
- host_network=dict(default=True, type='bool'),
- # external host options
- external_host=dict(default=None, type='str'),
- external_host_vserver=dict(default=None, type='str'),
- external_host_insecure=dict(default=False, type='bool'),
- external_host_partition_path=dict(default=None, type='str'),
- external_host_username=dict(default=None, type='str'),
- external_host_password=dict(default=None, type='str'),
- external_host_private_key=dict(default=None, type='str'),
- # Metrics
- expose_metrics=dict(default=False, type='bool'),
- metrics_image=dict(default=None, type='str'),
- # Stats
- stats_user=dict(default=None, type='str'),
- stats_password=dict(default=None, type='str'),
- stats_port=dict(default=1936, type='int'),
-
- ),
- mutually_exclusive=[["router_type", "images"]],
-
- supports_check_mode=True,
- )
-
- rconfig = RouterConfig(module.params['name'],
- module.params['kubeconfig'],
- {'credentials': {'value': module.params['credentials'], 'include': True},
- 'default_cert': {'value': None, 'include': True},
- 'cert_file': {'value': module.params['cert_file'], 'include': False},
- 'key_file': {'value': module.params['key_file'], 'include': False},
- 'image': {'value': module.params['image'], 'include': True},
- 'latest_image': {'value': module.params['latest_image'], 'include': True},
- 'labels': {'value': module.params['labels'], 'include': True},
- 'ports': {'value': ','.join(module.params['ports']), 'include': True},
- 'replicas': {'value': module.params['replicas'], 'include': True},
- 'selector': {'value': module.params['selector'], 'include': True},
- 'service_account': {'value': module.params['service_account'], 'include': True},
- 'router_type': {'value': module.params['router_type'], 'include': False},
- 'host_network': {'value': module.params['host_network'], 'include': True},
- 'external_host': {'value': module.params['external_host'], 'include': True},
- 'external_host_vserver': {'value': module.params['external_host_vserver'],
- 'include': True},
- 'external_host_insecure': {'value': module.params['external_host_insecure'],
- 'include': True},
- 'external_host_partition_path': {'value': module.params['external_host_partition_path'],
- 'include': True},
- 'external_host_username': {'value': module.params['external_host_username'],
- 'include': True},
- 'external_host_password': {'value': module.params['external_host_password'],
- 'include': True},
- 'external_host_private_key': {'value': module.params['external_host_private_key'],
- 'include': True},
- 'expose_metrics': {'value': module.params['expose_metrics'], 'include': True},
- 'metrics_image': {'value': module.params['metrics_image'], 'include': True},
- 'stats_user': {'value': module.params['stats_user'], 'include': True},
- 'stats_password': {'value': module.params['stats_password'], 'include': True},
- 'stats_port': {'value': module.params['stats_port'], 'include': True},
- })
-
-
- ocrouter = Router(rconfig)
-
- state = module.params['state']
-
- ########
- # Delete
- ########
- if state == 'absent':
- if not ocrouter.exists():
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = ocrouter.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
-
- if state == 'present':
- ########
- # Create
- ########
- if not ocrouter.exists():
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- api_rval = ocrouter.create()
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- if not ocrouter.needs_update():
- module.exit_json(changed=False, state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = ocrouter.update()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-main()
diff --git a/roles/lib_openshift_api/build/ansible/secret.py b/roles/lib_openshift_api/build/ansible/secret.py
deleted file mode 100644
index 8df7bbc64..000000000
--- a/roles/lib_openshift_api/build/ansible/secret.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# pylint: skip-file
-
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, type='str'),
- files=dict(default=None, type='list'),
- delete_after=dict(default=False, type='bool'),
- contents=dict(default=None, type='list'),
- force=dict(default=False, type='bool'),
- ),
- mutually_exclusive=[["contents", "files"]],
-
- supports_check_mode=True,
- )
- occmd = Secret(module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = occmd.get()
-
- #####
- # Get
- #####
- if state == 'list':
- module.exit_json(changed=False, results=api_rval['results'], state="list")
-
- if not module.params['name']:
- module.fail_json(msg='Please specify a name when state is absent|present.')
- ########
- # Delete
- ########
- if state == 'absent':
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = occmd.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
-
- if state == 'present':
- if module.params['files']:
- files = module.params['files']
- elif module.params['contents']:
- files = Utils.create_files_from_contents(module.params['contents'])
- else:
- module.fail_json(msg='Either specify files or contents.')
-
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- api_rval = occmd.create(module.params['files'], module.params['contents'])
-
- # Remove files
- if files and module.params['delete_after']:
- Utils.cleanup(files)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- secret = occmd.prep_secret(module.params['files'], module.params['contents'])
-
- if secret['returncode'] != 0:
- module.fail_json(msg=secret)
-
- if Utils.check_def_equal(secret['results'], api_rval['results'][0]):
-
- # Remove files
- if files and module.params['delete_after']:
- Utils.cleanup(files)
-
- module.exit_json(changed=False, results=secret['results'], state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = occmd.update(files, force=module.params['force'])
-
- # Remove files
- if secret and module.params['delete_after']:
- Utils.cleanup(files)
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_openshift_api/build/generate.py b/roles/lib_openshift_api/build/generate.py
deleted file mode 100755
index 9fc1986f1..000000000
--- a/roles/lib_openshift_api/build/generate.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-'''
- Generate the openshift-ansible/roles/lib_openshift_cli/library/ modules.
-'''
-
-import os
-
-# pylint: disable=anomalous-backslash-in-string
-GEN_STR = "#!/usr/bin/env python\n" + \
- "# ___ ___ _ _ ___ ___ _ _____ ___ ___\n" + \
- "# / __| __| \| | __| _ \ /_\_ _| __| \\\n" + \
- "# | (_ | _|| .` | _|| / / _ \| | | _|| |) |\n" + \
- "# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____\n" + \
- "# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|\n" + \
- "# | |) | (_) | | .` | (_) || | | _|| |) | | | |\n" + \
- "# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|\n"
-
-OPENSHIFT_ANSIBLE_PATH = os.path.dirname(os.path.realpath(__file__))
-
-
-FILES = {'oc_obj.py': ['src/base.py',
- '../../lib_yaml_editor/build/src/yedit.py',
- 'src/obj.py',
- 'ansible/obj.py',
- ],
- 'oc_secret.py': ['src/base.py',
- '../../lib_yaml_editor/build/src/yedit.py',
- 'src/secret.py',
- 'ansible/secret.py',
- ],
- 'oc_edit.py': ['src/base.py',
- '../../lib_yaml_editor/build/src/yedit.py',
- 'src/edit.py',
- 'ansible/edit.py',
- ],
- 'oadm_router.py': ['src/base.py',
- '../../lib_yaml_editor/build/src/yedit.py',
- 'src/router.py',
- 'ansible/router.py',
- ],
- }
-
-
-def main():
- ''' combine the necessary files to create the ansible module '''
- library = os.path.join(OPENSHIFT_ANSIBLE_PATH, '..', 'library/')
- for fname, parts in FILES.items():
- with open(os.path.join(library, fname), 'w') as afd:
- afd.seek(0)
- afd.write(GEN_STR)
- for fpart in parts:
- with open(os.path.join(OPENSHIFT_ANSIBLE_PATH, fpart)) as pfd:
- # first line is pylint disable so skip it
- for idx, line in enumerate(pfd):
- if idx == 0 and 'skip-file' in line:
- continue
-
- afd.write(line)
-
-
-if __name__ == '__main__':
- main()
-
-
diff --git a/roles/lib_openshift_api/build/src/base.py b/roles/lib_openshift_api/build/src/base.py
deleted file mode 100644
index 257379d92..000000000
--- a/roles/lib_openshift_api/build/src/base.py
+++ /dev/null
@@ -1,300 +0,0 @@
-# pylint: skip-file
-'''
- OpenShiftCLI class that wraps the oc commands in a subprocess
-'''
-
-import atexit
-import json
-import os
-import shutil
-import subprocess
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- '''return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-# pylint: disable=too-few-public-methods
-class OpenShiftCLI(object):
- ''' Class to wrap the command line tools '''
- def __init__(self,
- namespace,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftCLI '''
- self.namespace = namespace
- self.verbose = verbose
- self.kubeconfig = kubeconfig
-
- # Pylint allows only 5 arguments to be passed.
- # pylint: disable=too-many-arguments
- def _replace_content(self, resource, rname, content, force=False):
- ''' replace the current object with the content '''
- res = self._get(resource, rname)
- if not res['results']:
- return res
-
- fname = '/tmp/%s' % rname
- yed = Yedit(fname, res['results'][0])
- changes = []
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [fname])
-
- return self._replace(fname, force)
-
- def _replace(self, fname, force=False):
- '''return all pods '''
- cmd = ['-n', self.namespace, 'replace', '-f', fname]
- if force:
- cmd.append('--force')
- return self.openshift_cmd(cmd)
-
- def _create(self, fname):
- '''return all pods '''
- return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
-
- def _delete(self, resource, rname):
- '''return all pods '''
- return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
-
- def _get(self, resource, rname=None):
- '''return a secret by name '''
- cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
- if rname:
- cmd.append(rname)
-
- rval = self.openshift_cmd(cmd, output=True)
-
- # Ensure results are retuned in an array
- if rval.has_key('items'):
- rval['results'] = rval['items']
- elif not isinstance(rval['results'], list):
- rval['results'] = [rval['results']]
-
- return rval
-
- def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
- '''Base command for oc '''
- #cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
- cmds = []
- if oadm:
- cmds = ['/usr/bin/oadm']
- else:
- cmds = ['/usr/bin/oc']
-
- cmds.extend(cmd)
-
- rval = {}
- results = ''
- err = None
-
- if self.verbose:
- print ' '.join(cmds)
-
- proc = subprocess.Popen(cmds,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env={'KUBECONFIG': self.kubeconfig})
-
- proc.wait()
- stdout = proc.stdout.read()
- stderr = proc.stderr.read()
- rval = {"returncode": proc.returncode,
- "results": results,
- "cmd": ' '.join(cmds),
- }
-
- if proc.returncode == 0:
- if output:
- if output_type == 'json':
- try:
- rval['results'] = json.loads(stdout)
- except ValueError as err:
- if "No JSON object could be decoded" in err.message:
- err = err.message
- elif output_type == 'raw':
- rval['results'] = stdout
-
- if self.verbose:
- print stdout
- print stderr
- print
-
- if err:
- rval.update({"err": err,
- "stderr": stderr,
- "stdout": stdout,
- "cmd": cmds
- })
-
- else:
- rval.update({"stderr": stderr,
- "stdout": stdout,
- "results": {},
- })
-
- return rval
-
-class Utils(object):
- ''' utilities for openshiftcli modules '''
- @staticmethod
- def create_file(rname, data, ftype=None):
- ''' create a file in tmp with name and contents'''
- path = os.path.join('/tmp', rname)
- with open(path, 'w') as fds:
- if ftype == 'yaml':
- fds.write(yaml.safe_dump(data, default_flow_style=False))
-
- elif ftype == 'json':
- fds.write(json.dumps(data))
- else:
- fds.write(data)
-
- # Register cleanup when module is done
- atexit.register(Utils.cleanup, [path])
- return path
-
- @staticmethod
- def create_files_from_contents(data):
- '''Turn an array of dict: filename, content into a files array'''
- files = []
-
- for sfile in data:
- path = Utils.create_file(sfile['path'], sfile['content'])
- files.append(path)
-
- return files
-
- @staticmethod
- def cleanup(files):
- '''Clean up on exit '''
- for sfile in files:
- if os.path.exists(sfile):
- if os.path.isdir(sfile):
- shutil.rmtree(sfile)
- elif os.path.isfile(sfile):
- os.remove(sfile)
-
-
- @staticmethod
- def exists(results, _name):
- ''' Check to see if the results include the name '''
- if not results:
- return False
-
-
- if Utils.find_result(results, _name):
- return True
-
- return False
-
- @staticmethod
- def find_result(results, _name):
- ''' Find the specified result by name'''
- rval = None
- for result in results:
- if result.has_key('metadata') and result['metadata']['name'] == _name:
- rval = result
- break
-
- return rval
-
- @staticmethod
- def get_resource_file(sfile, sfile_type='yaml'):
- ''' return the service file '''
- contents = None
- with open(sfile) as sfd:
- contents = sfd.read()
-
- if sfile_type == 'yaml':
- contents = yaml.safe_load(contents)
- elif sfile_type == 'json':
- contents = json.loads(contents)
-
- return contents
-
- # Disabling too-many-branches. This is a yaml dictionary comparison function
- # pylint: disable=too-many-branches,too-many-return-statements
- @staticmethod
- def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
- ''' Given a user defined definition, compare it with the results given back by our query. '''
-
- # Currently these values are autogenerated and we do not need to check them
- skip = ['metadata', 'status']
- if skip_keys:
- skip.extend(skip_keys)
-
- for key, value in result_def.items():
- if key in skip:
- continue
-
- # Both are lists
- if isinstance(value, list):
- if not isinstance(user_def[key], list):
- if debug:
- print 'user_def[key] is not a list'
- return False
-
- for values in zip(user_def[key], value):
- if isinstance(values[0], dict) and isinstance(values[1], dict):
- if debug:
- print 'sending list - list'
- print type(values[0])
- print type(values[1])
- result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
- if not result:
- print 'list compare returned false'
- return False
-
- elif value != user_def[key]:
- if debug:
- print 'value should be identical'
- print value
- print user_def[key]
- return False
-
- # recurse on a dictionary
- elif isinstance(value, dict):
- if not isinstance(user_def[key], dict):
- if debug:
- print "dict returned false not instance of dict"
- return False
-
- # before passing ensure keys match
- api_values = set(value.keys()) - set(skip)
- user_values = set(user_def[key].keys()) - set(skip)
- if api_values != user_values:
- if debug:
- print api_values
- print user_values
- print "keys are not equal in dict"
- return False
-
- result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
- if not result:
- if debug:
- print "dict returned false"
- print result
- return False
-
- # Verify each key, value pair is the same
- else:
- if not user_def.has_key(key) or value != user_def[key]:
- if debug:
- print "value not equal; user_def does not have key"
- print value
- print user_def[key]
- return False
-
- return True
diff --git a/roles/lib_openshift_api/build/src/edit.py b/roles/lib_openshift_api/build/src/edit.py
deleted file mode 100644
index 7020ace47..000000000
--- a/roles/lib_openshift_api/build/src/edit.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# pylint: skip-file
-
-class Edit(OpenShiftCLI):
- ''' Class to wrap the oc command line tools
- '''
- # pylint: disable=too-many-arguments
- def __init__(self,
- kind,
- namespace,
- resource_name=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(Edit, self).__init__(namespace, kubeconfig)
- self.namespace = namespace
- self.kind = kind
- self.name = resource_name
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a secret by name '''
- return self._get(self.kind, self.name)
-
- def update(self, file_name, content, force=False, content_type='yaml'):
- '''run update '''
- if file_name:
- if content_type == 'yaml':
- data = yaml.load(open(file_name))
- elif content_type == 'json':
- data = json.loads(open(file_name).read())
-
- changes = []
- yed = Yedit(file_name, data)
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [file_name])
-
- return self._replace(file_name, force=force)
-
- return self._replace_content(self.kind, self.name, content, force=force)
-
-
diff --git a/roles/lib_openshift_api/build/src/obj.py b/roles/lib_openshift_api/build/src/obj.py
deleted file mode 100644
index 13aeba8e1..000000000
--- a/roles/lib_openshift_api/build/src/obj.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# pylint: skip-file
-
-class OCObject(OpenShiftCLI):
- ''' Class to wrap the oc command line tools '''
-
- # pylint allows 5. we need 6
- # pylint: disable=too-many-arguments
- def __init__(self,
- kind,
- namespace,
- rname=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(OCObject, self).__init__(namespace, kubeconfig)
- self.kind = kind
- self.namespace = namespace
- self.name = rname
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a deploymentconfig by name '''
- return self._get(self.kind, rname=self.name)
-
- def delete(self):
- '''return all pods '''
- return self._delete(self.kind, self.name)
-
- def create(self, files=None, content=None):
- '''Create a deploymentconfig '''
- if files:
- return self._create(files[0])
-
- return self._create(Utils.create_files_from_contents(content))
-
-
- # pylint: disable=too-many-function-args
- def update(self, files=None, content=None, force=False):
- '''run update dc
-
- This receives a list of file names and takes the first filename and calls replace.
- '''
- if files:
- return self._replace(files[0], force)
-
- return self.update_content(content, force)
-
- def update_content(self, content, force=False):
- '''update the dc with the content'''
- return self._replace_content(self.kind, self.name, content, force=force)
-
- def needs_update(self, files=None, content=None, content_type='yaml'):
- ''' check to see if we need to update '''
- objects = self.get()
- if objects['returncode'] != 0:
- return objects
-
- # pylint: disable=no-member
- data = None
- if files:
- data = Utils.get_resource_file(files[0], content_type)
-
- # if equal then no need. So not equal is True
- return not Utils.check_def_equal(data, objects['results'][0], skip_keys=None, debug=False)
- else:
- data = content
-
- for key, value in data.items():
- if key == 'metadata':
- continue
- if not objects['results'][0].has_key(key):
- return True
- if value != objects['results'][0][key]:
- return True
-
- return False
-
diff --git a/roles/lib_openshift_api/build/src/router.py b/roles/lib_openshift_api/build/src/router.py
deleted file mode 100644
index 69454d594..000000000
--- a/roles/lib_openshift_api/build/src/router.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# pylint: skip-file
-
-import time
-
-class RouterConfig(object):
- ''' RouterConfig is a DTO for the router. '''
- def __init__(self, rname, kubeconfig, router_options):
- self.name = rname
- self.kubeconfig = kubeconfig
- self._router_options = router_options
-
- @property
- def router_options(self):
- ''' return router options '''
- return self._router_options
-
- def to_option_list(self):
- ''' return all options as a string'''
- return RouterConfig.stringify(self.router_options)
-
- @staticmethod
- def stringify(options):
- ''' return hash as list of key value pairs '''
- rval = []
- for key, data in options.items():
- if data['include'] and data['value']:
- rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
-
- return rval
-
-class Router(OpenShiftCLI):
- ''' Class to wrap the oc command line tools '''
- def __init__(self,
- router_config,
- verbose=False):
- ''' Constructor for OpenshiftOC
-
- a router consists of 3 or more parts
- - dc/router
- - svc/router
- - endpoint/router
- '''
- super(Router, self).__init__('default', router_config.kubeconfig, verbose)
- self.rconfig = router_config
- self.verbose = verbose
- self.router_parts = [{'kind': 'dc', 'name': self.rconfig.name},
- {'kind': 'svc', 'name': self.rconfig.name},
- #{'kind': 'endpoints', 'name': self.rconfig.name},
- ]
- def get(self, filter_kind=None):
- ''' return the self.router_parts '''
- rparts = self.router_parts
- parts = []
- if filter_kind:
- rparts = [part for part in self.router_parts if filter_kind == part['kind']]
-
- for part in rparts:
- parts.append(self._get(part['kind'], rname=part['name']))
-
- return parts
-
- def exists(self):
- '''return a deploymentconfig by name '''
- parts = self.get()
- for part in parts:
- if part['returncode'] != 0:
- return False
-
- return True
-
- def delete(self):
- '''return all pods '''
- parts = []
- for part in self.router_parts:
- parts.append(self._delete(part['kind'], part['name']))
-
- return parts
-
- def create(self, dryrun=False, output=False, output_type='json'):
- '''Create a deploymentconfig '''
- # We need to create the pem file
- router_pem = '/tmp/router.pem'
- with open(router_pem, 'w') as rfd:
- rfd.write(open(self.rconfig.router_options['cert_file']['value']).read())
- rfd.write(open(self.rconfig.router_options['key_file']['value']).read())
-
- atexit.register(Utils.cleanup, [router_pem])
- self.rconfig.router_options['default_cert']['value'] = router_pem
-
- options = self.rconfig.to_option_list()
-
- cmd = ['router']
- cmd.extend(options)
- if dryrun:
- cmd.extend(['--dry-run=True', '-o', 'json'])
-
- results = self.openshift_cmd(cmd, oadm=True, output=output, output_type=output_type)
-
- return results
-
- def update(self):
- '''run update for the router. This performs a delete and then create '''
- parts = self.delete()
- if any([part['returncode'] != 0 for part in parts]):
- return parts
-
- # Ugly built in sleep here.
- time.sleep(15)
-
- return self.create()
-
- def needs_update(self, verbose=False):
- ''' check to see if we need to update '''
- dc_inmem = self.get(filter_kind='dc')[0]
- if dc_inmem['returncode'] != 0:
- return dc_inmem
-
- user_dc = self.create(dryrun=True, output=True, output_type='raw')
- if user_dc['returncode'] != 0:
- return user_dc
-
- # Since the output from oadm_router is returned as raw
- # we need to parse it. The first line is the stats_password
- user_dc_results = user_dc['results'].split('\n')
- # stats_password = user_dc_results[0]
-
- # Load the string back into json and get the newly created dc
- user_dc = json.loads('\n'.join(user_dc_results[1:]))['items'][0]
-
- # Router needs some exceptions.
- # We do not want to check the autogenerated password for stats admin
- if not self.rconfig.router_options['stats_password']['value']:
- for idx, env_var in enumerate(user_dc['spec']['template']['spec']['containers'][0]['env']):
- if env_var['name'] == 'STATS_PASSWORD':
- env_var['value'] = \
- dc_inmem['results'][0]['spec']['template']['spec']['containers'][0]['env'][idx]['value']
-
- # dry-run doesn't add the protocol to the ports section. We will manually do that.
- for idx, port in enumerate(user_dc['spec']['template']['spec']['containers'][0]['ports']):
- if not port.has_key('protocol'):
- port['protocol'] = 'TCP'
-
- # These are different when generating
- skip = ['dnsPolicy',
- 'terminationGracePeriodSeconds',
- 'restartPolicy', 'timeoutSeconds',
- 'livenessProbe', 'readinessProbe',
- 'terminationMessagePath',
- 'rollingParams',
- ]
-
- return not Utils.check_def_equal(user_dc, dc_inmem['results'][0], skip_keys=skip, debug=verbose)
diff --git a/roles/lib_openshift_api/build/src/secret.py b/roles/lib_openshift_api/build/src/secret.py
deleted file mode 100644
index 154716828..000000000
--- a/roles/lib_openshift_api/build/src/secret.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# pylint: skip-file
-
-class Secret(OpenShiftCLI):
- ''' Class to wrap the oc command line tools
- '''
- def __init__(self,
- namespace,
- secret_name=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(Secret, self).__init__(namespace, kubeconfig)
- self.namespace = namespace
- self.name = secret_name
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a secret by name '''
- return self._get('secrets', self.name)
-
- def delete(self):
- '''delete a secret by name'''
- return self._delete('secrets', self.name)
-
- def create(self, files=None, contents=None):
- '''Create a secret '''
- if not files:
- files = Utils.create_files_from_contents(contents)
-
- secrets = ["%s=%s" % (os.path.basename(sfile), sfile) for sfile in files]
- cmd = ['-n%s' % self.namespace, 'secrets', 'new', self.name]
- cmd.extend(secrets)
-
- return self.openshift_cmd(cmd)
-
- def update(self, files, force=False):
- '''run update secret
-
- This receives a list of file names and converts it into a secret.
- The secret is then written to disk and passed into the `oc replace` command.
- '''
- secret = self.prep_secret(files)
- if secret['returncode'] != 0:
- return secret
-
- sfile_path = '/tmp/%s' % self.name
- with open(sfile_path, 'w') as sfd:
- sfd.write(json.dumps(secret['results']))
-
- atexit.register(Utils.cleanup, [sfile_path])
-
- return self._replace(sfile_path, force=force)
-
- def prep_secret(self, files=None, contents=None):
- ''' return what the secret would look like if created
- This is accomplished by passing -ojson. This will most likely change in the future
- '''
- if not files:
- files = Utils.create_files_from_contents(contents)
-
- secrets = ["%s=%s" % (os.path.basename(sfile), sfile) for sfile in files]
- cmd = ['-ojson', '-n%s' % self.namespace, 'secrets', 'new', self.name]
- cmd.extend(secrets)
-
- return self.openshift_cmd(cmd, output=True)
-
-
diff --git a/roles/lib_openshift_api/build/test/README b/roles/lib_openshift_api/build/test/README
deleted file mode 100644
index af9f05b3d..000000000
--- a/roles/lib_openshift_api/build/test/README
+++ /dev/null
@@ -1,5 +0,0 @@
-After generate.py has run, the ansible modules will be placed under ../../../openshift-ansible/roles/lib_openshift_api/library.
-
-
-To run the tests you need to run them like this:
-./services.yml -M ../../library
diff --git a/roles/lib_openshift_api/build/test/deploymentconfig.yml b/roles/lib_openshift_api/build/test/deploymentconfig.yml
deleted file mode 100755
index d041ab22a..000000000
--- a/roles/lib_openshift_api/build/test/deploymentconfig.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: "oo_clusterid_mwoodson:&oo_version_3:&oo_master_primary"
- gather_facts: no
- user: root
-
- post_tasks:
- - copy:
- dest: "/tmp/{{ item }}"
- src: "files/{{ item }}"
- with_items:
- - dc.yml
-
- - name: list dc
- oc_obj:
- kind: dc
- state: list
- namespace: default
- name: router
- register: dcout
-
- - debug:
- var: dcout
-
- - name: absent dc
- oc_obj:
- kind: dc
- state: absent
- namespace: default
- name: router
- register: dcout
-
- - debug:
- var: dcout
-
- - name: present dc
- oc_obj:
- kind: dc
- state: present
- namespace: default
- name: router
- files:
- - /tmp/dc.yml
- register: dcout
-
- - debug:
- var: dcout
-
- - name: dump router
- oc_obj:
- kind: dc
- state: list
- name: router
- register: routerout
-
- - name: write router file
- copy:
- dest: /tmp/dc-mod.json
- content: "{{ routerout.results[0] }}"
-
- - command: cat /tmp/dc-mod.json
- register: catout
-
- - debug:
- msg: "{{ catout }}"
-
- - command: "sed -i 's/: 80/: 81/g' /tmp/dc-mod.json"
- register: catout
-
- - name: present dc update
- oc_obj:
- kind: dc
- state: present
- namespace: default
- name: router
- files:
- - /tmp/dc-mod.json
- delete_after: True
- register: dcout
-
- - debug:
- var: dcout
-
- - include_vars: "files/dc-mod.yml"
-
- - name: absent dc
- oc_obj:
- kind: dc
- state: absent
- namespace: default
- name: router
- register: dcout
-
- - debug:
- var: dcout
-
- - name: present dc
- oc_obj:
- kind: dc
- state: present
- namespace: default
- name: router
- files:
- - /tmp/dc.yml
- delete_after: True
- register: dcout
-
- - name: present dc
- oc_obj:
- kind: dc
- state: present
- namespace: default
- name: router
- content: "{{ dc }}"
- delete_after: True
- register: dcout
-
- - debug:
- var: dcout
-
diff --git a/roles/lib_openshift_api/build/test/edit.yml b/roles/lib_openshift_api/build/test/edit.yml
deleted file mode 100755
index 9aa01303a..000000000
--- a/roles/lib_openshift_api/build/test/edit.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: "oo_clusterid_mwoodson:&oo_version_3:&oo_master_primary"
- gather_facts: no
- user: root
-
- post_tasks:
- - copy:
- dest: "/tmp/{{ item }}"
- src: "files/{{ item }}"
- with_items:
- - dc.yml
-
- - name: present dc
- oc_edit:
- kind: dc
- namespace: default
- name: router
- content:
- spec.template.spec.containers[0].ports[0].containerPort: 80
- spec.template.spec.containers[0].ports[0].hostPort: 80
- register: dcout
-
- - debug:
- var: dcout
-
- - name: present dc
- oc_edit:
- kind: dc
- namespace: default
- name: router
- content:
- spec.template.spec.containers[0].ports[0].containerPort: 81
- spec.template.spec.containers[0].ports[0].hostPort: 81
- file_format: yaml
- register: dcout
-
- - debug:
- var: dcout
-
- - name: present dc
- oc_edit:
- kind: dc
- namespace: default
- name: router
- content:
- spec.template.spec.containers[0].ports[0].containerPort: 80
- spec.template.spec.containers[0].ports[0].hostPort: 80
- file_format: yaml
- register: dcout
-
- - debug:
- var: dcout
diff --git a/roles/lib_openshift_api/build/test/files/config.yml b/roles/lib_openshift_api/build/test/files/config.yml
deleted file mode 100644
index c544c6fd4..000000000
--- a/roles/lib_openshift_api/build/test/files/config.yml
+++ /dev/null
@@ -1 +0,0 @@
-value: True
diff --git a/roles/lib_openshift_api/build/test/files/dc-mod.yml b/roles/lib_openshift_api/build/test/files/dc-mod.yml
deleted file mode 100644
index 6c700d6c7..000000000
--- a/roles/lib_openshift_api/build/test/files/dc-mod.yml
+++ /dev/null
@@ -1,124 +0,0 @@
-dc:
- path:
- dc-mod.yml
- content:
- apiVersion: v1
- kind: DeploymentConfig
- metadata:
- labels:
- router: router
- name: router
- namespace: default
- resourceVersion: "84016"
- selfLink: /oapi/v1/namespaces/default/deploymentconfigs/router
- uid: 48f8b9d9-ed42-11e5-9903-0a9a9d4e7f2b
- spec:
- replicas: 2
- selector:
- router: router
- strategy:
- resources: {}
- rollingParams:
- intervalSeconds: 1
- maxSurge: 0
- maxUnavailable: 25%
- timeoutSeconds: 600
- updatePercent: -25
- updatePeriodSeconds: 1
- type: Rolling
- template:
- metadata:
- creationTimestamp: null
- labels:
- router: router
- spec:
- containers:
- - env:
- - name: DEFAULT_CERTIFICATE
- - name: OPENSHIFT_CA_DATA
- value: |
- -----BEGIN CERTIFICATE-----
- MIIC5jCCAdCgAwIBAgIBATALBgkqhkiG9w0BAQswJjEkMCIGA1UEAwwbb3BlbnNo
- -----END CERTIFICATE-----
- - name: OPENSHIFT_CERT_DATA
- value: |
- -----BEGIN CERTIFICATE-----
- MIIDDTCCAfegAwIBAgIBCDALBgkqhkiG9w0BAQswJjEkMCIGA1UEAwwbb3BlbnNo
- -----END CERTIFICATE-----
- - name: OPENSHIFT_INSECURE
- value: "false"
- - name: OPENSHIFT_KEY_DATA
- value: |
- -----BEGIN RSA PRIVATE KEY-----
- MIIEogIBAAKCAQEA2lf49DrPHfCdCORcnIbmDVrx8yos7trjWdBvuledijyslRVR
- -----END RSA PRIVATE KEY-----
- - name: OPENSHIFT_MASTER
- value: https://internal.api.mwoodson.openshift.com
- - name: ROUTER_EXTERNAL_HOST_HOSTNAME
- - name: ROUTER_EXTERNAL_HOST_HTTPS_VSERVER
- - name: ROUTER_EXTERNAL_HOST_HTTP_VSERVER
- - name: ROUTER_EXTERNAL_HOST_INSECURE
- value: "false"
- - name: ROUTER_EXTERNAL_HOST_PARTITION_PATH
- - name: ROUTER_EXTERNAL_HOST_PASSWORD
- - name: ROUTER_EXTERNAL_HOST_PRIVKEY
- value: /etc/secret-volume/router.pem
- - name: ROUTER_EXTERNAL_HOST_USERNAME
- - name: ROUTER_SERVICE_NAME
- value: router
- - name: ROUTER_SERVICE_NAMESPACE
- value: default
- - name: STATS_PASSWORD
- value: ugCk6YBm4q
- - name: STATS_PORT
- value: "1936"
- - name: STATS_USERNAME
- value: admin
- image: openshift3/ose-haproxy-router:v3.1.1.6
- imagePullPolicy: IfNotPresent
- livenessProbe:
- httpGet:
- host: localhost
- path: /healthz
- port: 1936
- scheme: HTTP
- initialDelaySeconds: 10
- timeoutSeconds: 1
- name: router
- ports:
- - containerPort: 81
- hostPort: 81
- protocol: TCP
- - containerPort: 443
- hostPort: 443
- protocol: TCP
- - containerPort: 1936
- hostPort: 1936
- name: stats
- protocol: TCP
- readinessProbe:
- httpGet:
- host: localhost
- path: /healthz
- port: 1937
- scheme: HTTP
- timeoutSeconds: 1
- resources: {}
- terminationMessagePath: /dev/termination-log
- dnsPolicy: ClusterFirst
- hostNetwork: true
- nodeSelector:
- type: infra
- restartPolicy: Always
- securityContext: {}
- serviceAccount: router
- serviceAccountName: router
- terminationGracePeriodSeconds: 30
- triggers:
- - type: ConfigChange
- status:
- details:
- causes:
- - type: ConfigChange
- latestVersion: 1
-
diff --git a/roles/lib_openshift_api/build/test/files/dc.yml b/roles/lib_openshift_api/build/test/files/dc.yml
deleted file mode 100644
index 24f690ef4..000000000
--- a/roles/lib_openshift_api/build/test/files/dc.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-apiVersion: v1
-kind: DeploymentConfig
-metadata:
- creationTimestamp: 2016-04-01T15:23:29Z
- labels:
- router: router
- name: router
- namespace: default
- resourceVersion: "1338477"
- selfLink: /oapi/v1/namespaces/default/deploymentconfigs/router
- uid: b00c7eba-f81d-11e5-809b-0a581f893e3f
-spec:
- replicas: 2
- selector:
- router: router
- strategy:
- resources: {}
- rollingParams:
- intervalSeconds: 1
- maxSurge: 0
- maxUnavailable: 25%
- timeoutSeconds: 600
- updatePercent: -25
- updatePeriodSeconds: 1
- type: Rolling
- template:
- metadata:
- creationTimestamp: null
- labels:
- router: router
- spec:
- containers:
- - env:
- - name: DEFAULT_CERTIFICATE
- - name: OPENSHIFT_CA_DATA
- value: |
- -----BEGIN CERTIFICATE-----
- MIIC5jCCAdCgAwIBAgIBATALBgkqhkiG9w0BAQswJjEkMCIGA1UEAwwbb3BlbnNo
- -----END CERTIFICATE-----
- - name: OPENSHIFT_CERT_DATA
- value: |
- -----BEGIN CERTIFICATE-----
- MIIDDTCCAfegAwIBAgIBCDALBgkqhkiG9w0BAQswJjEkMCIGA1UEAwwbb3BlbnNo
- -----END CERTIFICATE-----
- - name: OPENSHIFT_INSECURE
- value: "false"
- - name: OPENSHIFT_KEY_DATA
- value: |
- -----BEGIN RSA PRIVATE KEY-----
- MIIEogIBAAKCAQEA2lf49DrPHfCdCORcnIbmDVrx8yos7trjWdBvuledijyslRVR
- -----END RSA PRIVATE KEY-----
- - name: OPENSHIFT_MASTER
- value: https://internal.api.mwoodson.openshift.com
- - name: ROUTER_EXTERNAL_HOST_HOSTNAME
- - name: ROUTER_EXTERNAL_HOST_HTTPS_VSERVER
- - name: ROUTER_EXTERNAL_HOST_HTTP_VSERVER
- - name: ROUTER_EXTERNAL_HOST_INSECURE
- value: "false"
- - name: ROUTER_EXTERNAL_HOST_PARTITION_PATH
- - name: ROUTER_EXTERNAL_HOST_PASSWORD
- - name: ROUTER_EXTERNAL_HOST_PRIVKEY
- value: /etc/secret-volume/router.pem
- - name: ROUTER_EXTERNAL_HOST_USERNAME
- - name: ROUTER_SERVICE_NAME
- value: router
- - name: ROUTER_SERVICE_NAMESPACE
- value: default
- - name: STATS_PASSWORD
- value: ugCk6YBm4q
- - name: STATS_PORT
- value: "1936"
- - name: STATS_USERNAME
- value: admin
- image: openshift3/ose-haproxy-router:v3.1.1.6
- imagePullPolicy: IfNotPresent
- livenessProbe:
- httpGet:
- host: localhost
- path: /healthz
- port: 1936
- scheme: HTTP
- initialDelaySeconds: 10
- timeoutSeconds: 1
- name: router
- ports:
- - containerPort: 80
- hostPort: 80
- protocol: TCP
- - containerPort: 443
- hostPort: 443
- protocol: TCP
- - containerPort: 1936
- hostPort: 1936
- name: stats
- protocol: TCP
- readinessProbe:
- httpGet:
- host: localhost
- path: /healthz
- port: 1936
- scheme: HTTP
- timeoutSeconds: 1
- resources: {}
- terminationMessagePath: /dev/termination-log
- dnsPolicy: ClusterFirst
- hostNetwork: true
- nodeSelector:
- type: infra
- restartPolicy: Always
- securityContext: {}
- serviceAccount: router
- serviceAccountName: router
- terminationGracePeriodSeconds: 30
- triggers:
- - type: ConfigChange
-status:
- details:
- causes:
- - type: ConfigChange
- latestVersion: 12
diff --git a/roles/lib_openshift_api/build/test/files/passwords.yml b/roles/lib_openshift_api/build/test/files/passwords.yml
deleted file mode 100644
index fadbf1d85..000000000
--- a/roles/lib_openshift_api/build/test/files/passwords.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-test1
-test2
-test3
-test4
diff --git a/roles/lib_openshift_api/build/test/files/router-mod.json b/roles/lib_openshift_api/build/test/files/router-mod.json
deleted file mode 100644
index 45e2e7c8d..000000000
--- a/roles/lib_openshift_api/build/test/files/router-mod.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
- "kind": "Service",
- "apiVersion": "v1",
- "metadata": {
- "name": "router",
- "namespace": "default",
- "labels": {
- "router": "router"
- }
- },
- "spec": {
- "ports": [
- {
- "name": "81-tcp",
- "protocol": "TCP",
- "port": 81,
- "targetPort": 81
- }
- ],
- "selector": {
- "router": "router"
- },
- "type": "ClusterIP",
- "sessionAffinity": "None"
- },
- "status": {
- "loadBalancer": {}
- }
-}
-
diff --git a/roles/lib_openshift_api/build/test/files/router.json b/roles/lib_openshift_api/build/test/files/router.json
deleted file mode 100644
index cad3c6f53..000000000
--- a/roles/lib_openshift_api/build/test/files/router.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "apiVersion": "v1",
- "kind": "Service",
- "metadata": {
- "labels": {
- "router": "router"
- },
- "name": "router",
- "namespace": "default"
- },
- "spec": {
- "ports": [
- {
- "name": "80-tcp",
- "port": 80,
- "protocol": "TCP",
- "targetPort": 80
- }
- ],
- "selector": {
- "router": "router"
- },
- "sessionAffinity": "None",
- "type": "ClusterIP"
- },
- "status": {
- "loadBalancer": {}
- }
-}
diff --git a/roles/lib_openshift_api/build/test/roles b/roles/lib_openshift_api/build/test/roles
deleted file mode 120000
index ae82aa9bb..000000000
--- a/roles/lib_openshift_api/build/test/roles
+++ /dev/null
@@ -1 +0,0 @@
-../../../../roles/ \ No newline at end of file
diff --git a/roles/lib_openshift_api/build/test/router.yml b/roles/lib_openshift_api/build/test/router.yml
deleted file mode 100755
index 7ab192b97..000000000
--- a/roles/lib_openshift_api/build/test/router.yml
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: "oo_clusterid_mwoodson:&oo_master_primary"
- gather_facts: no
- user: root
-
- tasks:
- - oadm_router:
- state: absent
- credentials: /etc/origin/master/openshift-router.kubeconfig
- service_account: router
- replicas: 2
- namespace: default
- selector: type=infra
- cert_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.crt
- key_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.key
- register: routerout
-
- - debug: var=routerout
-
- - pause:
- seconds: 10
-
- - oadm_router:
- credentials: /etc/origin/master/openshift-router.kubeconfig
- service_account: router
- replicas: 2
- namespace: default
- selector: type=infra
- cert_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.crt
- key_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.key
- register: routerout
-
- - debug: var=routerout
-
- - pause:
- seconds: 10
-
- - oadm_router:
- credentials: /etc/origin/master/openshift-router.kubeconfig
- service_account: router
- replicas: 2
- namespace: default
- selector: type=infra
- cert_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.crt
- key_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.key
- register: routerout
-
- - debug: var=routerout
-
- - pause:
- seconds: 10
-
- - oadm_router:
- credentials: /etc/origin/master/openshift-router.kubeconfig
- service_account: router
- replicas: 3
- namespace: default
- selector: type=test
- cert_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.crt
- key_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.key
- register: routerout
-
- - debug: var=routerout
-
- - pause:
- seconds: 10
-
- - oadm_router:
- credentials: /etc/origin/master/openshift-router.kubeconfig
- service_account: router
- replicas: 2
- namespace: default
- selector: type=infra
- cert_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.crt
- key_file: /etc/origin/master/named_certificates/12ab.mwoodson.openshiftapps.com.key
- register: routerout
-
- - debug: var=routerout
diff --git a/roles/lib_openshift_api/build/test/secrets.yml b/roles/lib_openshift_api/build/test/secrets.yml
deleted file mode 100755
index dddc05c4d..000000000
--- a/roles/lib_openshift_api/build/test/secrets.yml
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: "oo_clusterid_mwoodson:&oo_version_3:&oo_master_primary"
- gather_facts: no
- user: root
-
- post_tasks:
- - copy:
- dest: "/tmp/{{ item }}"
- src: "files/{{ item }}"
- with_items:
- - config.yml
- - passwords.yml
-
- - name: list secrets
- oc_secret:
- state: list
- namespace: default
- name: kenny
- register: secret_out
-
- - debug:
- var: secret_out
-
- - name: absent secrets
- oc_secret:
- state: absent
- namespace: default
- name: kenny
- register: secret_out
-
- - debug:
- var: secret_out
-
- - name: present secrets
- oc_secret:
- state: present
- namespace: default
- name: kenny
- files:
- - /tmp/config.yml
- - /tmp/passwords.yml
- delete_after: True
- register: secret_out
-
- - debug:
- var: secret_out
-
- - name: present secrets
- oc_secret:
- state: present
- namespace: default
- name: kenny
- contents:
- - path: config.yml
- content: "value: True\n"
- - path: passwords.yml
- content: "test1\ntest2\ntest3\ntest4\n"
- delete_after: True
- register: secret_out
-
- - debug:
- var: secret_out
-
- - name: present secrets update
- oc_secret:
- state: present
- namespace: default
- name: kenny
- contents:
- - path: config.yml
- content: "value: True\n"
- - path: passwords.yml
- content: "test1\ntest2\ntest3\ntest4\ntest5\n"
- delete_after: True
- force: True
- register: secret_out
-
- - debug:
- var: secret_out
-
diff --git a/roles/lib_openshift_api/build/test/services.yml b/roles/lib_openshift_api/build/test/services.yml
deleted file mode 100755
index a32e8d012..000000000
--- a/roles/lib_openshift_api/build/test/services.yml
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: "oo_clusterid_mwoodson:&oo_master_primary"
- gather_facts: no
- user: root
-
- roles:
- - roles/lib_yaml_editor
-
- tasks:
- - copy:
- dest: "/tmp/{{ item }}"
- src: "files/{{ item }}"
- with_items:
- - router.json
- - router-mod.json
-
- - name: list services
- oc_obj:
- kind: service
- state: list
- namespace: default
- name: router
- register: service_out
-
- - debug:
- var: service_out.results
-
- - name: absent service
- oc_obj:
- kind: service
- state: absent
- namespace: default
- name: router
- register: service_out
-
- - debug:
- var: service_out
-
- - name: present service create
- oc_obj:
- kind: service
- state: present
- namespace: default
- name: router
- files:
- - /tmp/router.json
- delete_after: True
- register: service_out
-
- - debug:
- var: service_out
-
- - name: dump router
- oc_obj:
- kind: service
- state: list
- name: router
- namespace: default
- register: routerout
-
- - name: write router file
- copy:
- dest: /tmp/router-mod.json
- content: "{{ routerout.results[0] }}"
-
- - command: cat /tmp/router-mod.json
- register: catout
-
- - debug:
- msg: "{{ catout }}"
-
- - command: "sed -i 's/80-tcp/81-tcp/g' /tmp/router-mod.json"
- register: catout
-
- - name: present service replace
- oc_obj:
- kind: service
- state: present
- namespace: default
- name: router
- files:
- - /tmp/router-mod.json
- #delete_after: True
- register: service_out
-
- - debug:
- var: service_out
-
- - name: list services
- oc_obj:
- kind: service
- state: list
- namespace: default
- name: router
- register: service_out
-
- - debug:
- var: service_out.results
-
- - set_fact:
- new_service: "{{ service_out.results[0] }}"
-
- - yedit:
- src: /tmp/routeryedit
- content: "{{ new_service }}"
- key: spec.ports
- value:
- - name: 80-tcp
- port: 80
- protocol: TCP
- targetPort: 80
-
- - yedit:
- src: /tmp/routeryedit
- state: list
- register: yeditout
-
- - debug:
- var: yeditout
-
- - name: present service replace
- oc_obj:
- kind: service
- state: present
- namespace: default
- name: router
- content: "{{ yeditout.results }}"
- delete_after: True
- register: service_out
-
- - debug:
- var: service_out
diff --git a/roles/lib_openshift_api/library/oadm_router.py b/roles/lib_openshift_api/library/oadm_router.py
deleted file mode 100644
index c6b45c14e..000000000
--- a/roles/lib_openshift_api/library/oadm_router.py
+++ /dev/null
@@ -1,807 +0,0 @@
-#!/usr/bin/env python
-# ___ ___ _ _ ___ ___ _ _____ ___ ___
-# / __| __| \| | __| _ \ /_\_ _| __| \
-# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
-# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
-# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
-# | |) | (_) | | .` | (_) || | | _|| |) | | | |
-# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
-'''
- OpenShiftCLI class that wraps the oc commands in a subprocess
-'''
-
-import atexit
-import json
-import os
-import shutil
-import subprocess
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- '''return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-# pylint: disable=too-few-public-methods
-class OpenShiftCLI(object):
- ''' Class to wrap the command line tools '''
- def __init__(self,
- namespace,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftCLI '''
- self.namespace = namespace
- self.verbose = verbose
- self.kubeconfig = kubeconfig
-
- # Pylint allows only 5 arguments to be passed.
- # pylint: disable=too-many-arguments
- def _replace_content(self, resource, rname, content, force=False):
- ''' replace the current object with the content '''
- res = self._get(resource, rname)
- if not res['results']:
- return res
-
- fname = '/tmp/%s' % rname
- yed = Yedit(fname, res['results'][0])
- changes = []
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [fname])
-
- return self._replace(fname, force)
-
- def _replace(self, fname, force=False):
- '''return all pods '''
- cmd = ['-n', self.namespace, 'replace', '-f', fname]
- if force:
- cmd.append('--force')
- return self.openshift_cmd(cmd)
-
- def _create(self, fname):
- '''return all pods '''
- return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
-
- def _delete(self, resource, rname):
- '''return all pods '''
- return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
-
- def _get(self, resource, rname=None):
- '''return a secret by name '''
- cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
- if rname:
- cmd.append(rname)
-
- rval = self.openshift_cmd(cmd, output=True)
-
- # Ensure results are retuned in an array
- if rval.has_key('items'):
- rval['results'] = rval['items']
- elif not isinstance(rval['results'], list):
- rval['results'] = [rval['results']]
-
- return rval
-
- def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
- '''Base command for oc '''
- #cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
- cmds = []
- if oadm:
- cmds = ['/usr/bin/oadm']
- else:
- cmds = ['/usr/bin/oc']
-
- cmds.extend(cmd)
-
- rval = {}
- results = ''
- err = None
-
- if self.verbose:
- print ' '.join(cmds)
-
- proc = subprocess.Popen(cmds,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env={'KUBECONFIG': self.kubeconfig})
-
- proc.wait()
- stdout = proc.stdout.read()
- stderr = proc.stderr.read()
- rval = {"returncode": proc.returncode,
- "results": results,
- "cmd": ' '.join(cmds),
- }
-
- if proc.returncode == 0:
- if output:
- if output_type == 'json':
- try:
- rval['results'] = json.loads(stdout)
- except ValueError as err:
- if "No JSON object could be decoded" in err.message:
- err = err.message
- elif output_type == 'raw':
- rval['results'] = stdout
-
- if self.verbose:
- print stdout
- print stderr
- print
-
- if err:
- rval.update({"err": err,
- "stderr": stderr,
- "stdout": stdout,
- "cmd": cmds
- })
-
- else:
- rval.update({"stderr": stderr,
- "stdout": stdout,
- "results": {},
- })
-
- return rval
-
-class Utils(object):
- ''' utilities for openshiftcli modules '''
- @staticmethod
- def create_file(rname, data, ftype=None):
- ''' create a file in tmp with name and contents'''
- path = os.path.join('/tmp', rname)
- with open(path, 'w') as fds:
- if ftype == 'yaml':
- fds.write(yaml.safe_dump(data, default_flow_style=False))
-
- elif ftype == 'json':
- fds.write(json.dumps(data))
- else:
- fds.write(data)
-
- # Register cleanup when module is done
- atexit.register(Utils.cleanup, [path])
- return path
-
- @staticmethod
- def create_files_from_contents(data):
- '''Turn an array of dict: filename, content into a files array'''
- files = []
-
- for sfile in data:
- path = Utils.create_file(sfile['path'], sfile['content'])
- files.append(path)
-
- return files
-
- @staticmethod
- def cleanup(files):
- '''Clean up on exit '''
- for sfile in files:
- if os.path.exists(sfile):
- if os.path.isdir(sfile):
- shutil.rmtree(sfile)
- elif os.path.isfile(sfile):
- os.remove(sfile)
-
-
- @staticmethod
- def exists(results, _name):
- ''' Check to see if the results include the name '''
- if not results:
- return False
-
-
- if Utils.find_result(results, _name):
- return True
-
- return False
-
- @staticmethod
- def find_result(results, _name):
- ''' Find the specified result by name'''
- rval = None
- for result in results:
- if result.has_key('metadata') and result['metadata']['name'] == _name:
- rval = result
- break
-
- return rval
-
- @staticmethod
- def get_resource_file(sfile, sfile_type='yaml'):
- ''' return the service file '''
- contents = None
- with open(sfile) as sfd:
- contents = sfd.read()
-
- if sfile_type == 'yaml':
- contents = yaml.safe_load(contents)
- elif sfile_type == 'json':
- contents = json.loads(contents)
-
- return contents
-
- # Disabling too-many-branches. This is a yaml dictionary comparison function
- # pylint: disable=too-many-branches,too-many-return-statements
- @staticmethod
- def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
- ''' Given a user defined definition, compare it with the results given back by our query. '''
-
- # Currently these values are autogenerated and we do not need to check them
- skip = ['metadata', 'status']
- if skip_keys:
- skip.extend(skip_keys)
-
- for key, value in result_def.items():
- if key in skip:
- continue
-
- # Both are lists
- if isinstance(value, list):
- if not isinstance(user_def[key], list):
- if debug:
- print 'user_def[key] is not a list'
- return False
-
- for values in zip(user_def[key], value):
- if isinstance(values[0], dict) and isinstance(values[1], dict):
- if debug:
- print 'sending list - list'
- print type(values[0])
- print type(values[1])
- result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
- if not result:
- print 'list compare returned false'
- return False
-
- elif value != user_def[key]:
- if debug:
- print 'value should be identical'
- print value
- print user_def[key]
- return False
-
- # recurse on a dictionary
- elif isinstance(value, dict):
- if not isinstance(user_def[key], dict):
- if debug:
- print "dict returned false not instance of dict"
- return False
-
- # before passing ensure keys match
- api_values = set(value.keys()) - set(skip)
- user_values = set(user_def[key].keys()) - set(skip)
- if api_values != user_values:
- if debug:
- print api_values
- print user_values
- print "keys are not equal in dict"
- return False
-
- result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
- if not result:
- if debug:
- print "dict returned false"
- print result
- return False
-
- # Verify each key, value pair is the same
- else:
- if not user_def.has_key(key) or value != user_def[key]:
- if debug:
- print "value not equal; user_def does not have key"
- print value
- print user_def[key]
- return False
-
- return True
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
-
-import time
-
-class RouterConfig(object):
- ''' RouterConfig is a DTO for the router. '''
- def __init__(self, rname, kubeconfig, router_options):
- self.name = rname
- self.kubeconfig = kubeconfig
- self._router_options = router_options
-
- @property
- def router_options(self):
- ''' return router options '''
- return self._router_options
-
- def to_option_list(self):
- ''' return all options as a string'''
- return RouterConfig.stringify(self.router_options)
-
- @staticmethod
- def stringify(options):
- ''' return hash as list of key value pairs '''
- rval = []
- for key, data in options.items():
- if data['include'] and data['value']:
- rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
-
- return rval
-
-class Router(OpenShiftCLI):
- ''' Class to wrap the oc command line tools '''
- def __init__(self,
- router_config,
- verbose=False):
- ''' Constructor for OpenshiftOC
-
- a router consists of 3 or more parts
- - dc/router
- - svc/router
- - endpoint/router
- '''
- super(Router, self).__init__('default', router_config.kubeconfig, verbose)
- self.rconfig = router_config
- self.verbose = verbose
- self.router_parts = [{'kind': 'dc', 'name': self.rconfig.name},
- {'kind': 'svc', 'name': self.rconfig.name},
- #{'kind': 'endpoints', 'name': self.rconfig.name},
- ]
- def get(self, filter_kind=None):
- ''' return the self.router_parts '''
- rparts = self.router_parts
- parts = []
- if filter_kind:
- rparts = [part for part in self.router_parts if filter_kind == part['kind']]
-
- for part in rparts:
- parts.append(self._get(part['kind'], rname=part['name']))
-
- return parts
-
- def exists(self):
- '''return a deploymentconfig by name '''
- parts = self.get()
- for part in parts:
- if part['returncode'] != 0:
- return False
-
- return True
-
- def delete(self):
- '''return all pods '''
- parts = []
- for part in self.router_parts:
- parts.append(self._delete(part['kind'], part['name']))
-
- return parts
-
- def create(self, dryrun=False, output=False, output_type='json'):
- '''Create a deploymentconfig '''
- # We need to create the pem file
- router_pem = '/tmp/router.pem'
- with open(router_pem, 'w') as rfd:
- rfd.write(open(self.rconfig.router_options['cert_file']['value']).read())
- rfd.write(open(self.rconfig.router_options['key_file']['value']).read())
-
- atexit.register(Utils.cleanup, [router_pem])
- self.rconfig.router_options['default_cert']['value'] = router_pem
-
- options = self.rconfig.to_option_list()
-
- cmd = ['router']
- cmd.extend(options)
- if dryrun:
- cmd.extend(['--dry-run=True', '-o', 'json'])
-
- results = self.openshift_cmd(cmd, oadm=True, output=output, output_type=output_type)
-
- return results
-
- def update(self):
- '''run update for the router. This performs a delete and then create '''
- parts = self.delete()
- if any([part['returncode'] != 0 for part in parts]):
- return parts
-
- # Ugly built in sleep here.
- time.sleep(15)
-
- return self.create()
-
- def needs_update(self, verbose=False):
- ''' check to see if we need to update '''
- dc_inmem = self.get(filter_kind='dc')[0]
- if dc_inmem['returncode'] != 0:
- return dc_inmem
-
- user_dc = self.create(dryrun=True, output=True, output_type='raw')
- if user_dc['returncode'] != 0:
- return user_dc
-
- # Since the output from oadm_router is returned as raw
- # we need to parse it. The first line is the stats_password
- user_dc_results = user_dc['results'].split('\n')
- # stats_password = user_dc_results[0]
-
- # Load the string back into json and get the newly created dc
- user_dc = json.loads('\n'.join(user_dc_results[1:]))['items'][0]
-
- # Router needs some exceptions.
- # We do not want to check the autogenerated password for stats admin
- if not self.rconfig.router_options['stats_password']['value']:
- for idx, env_var in enumerate(user_dc['spec']['template']['spec']['containers'][0]['env']):
- if env_var['name'] == 'STATS_PASSWORD':
- env_var['value'] = \
- dc_inmem['results'][0]['spec']['template']['spec']['containers'][0]['env'][idx]['value']
-
- # dry-run doesn't add the protocol to the ports section. We will manually do that.
- for idx, port in enumerate(user_dc['spec']['template']['spec']['containers'][0]['ports']):
- if not port.has_key('protocol'):
- port['protocol'] = 'TCP'
-
- # These are different when generating
- skip = ['dnsPolicy',
- 'terminationGracePeriodSeconds',
- 'restartPolicy', 'timeoutSeconds',
- 'livenessProbe', 'readinessProbe',
- 'terminationMessagePath',
- 'rollingParams',
- ]
-
- return not Utils.check_def_equal(user_dc, dc_inmem['results'][0], skip_keys=skip, debug=verbose)
-
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(default='present', type='str',
- choices=['present', 'absent']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default='router', type='str'),
-
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- credentials=dict(default='/etc/origin/master/openshift-router.kubeconfig', type='str'),
- cert_file=dict(default=None, type='str'),
- key_file=dict(default=None, type='str'),
- image=dict(default=None, type='str'), #'openshift3/ose-${component}:${version}'
- latest_image=dict(default=False, type='bool'),
- labels=dict(default=None, type='list'),
- ports=dict(default=['80:80', '443:443'], type='list'),
- replicas=dict(default=1, type='int'),
- selector=dict(default=None, type='str'),
- service_account=dict(default='router', type='str'),
- router_type=dict(default='haproxy-router', type='str'),
- host_network=dict(default=True, type='bool'),
- # external host options
- external_host=dict(default=None, type='str'),
- external_host_vserver=dict(default=None, type='str'),
- external_host_insecure=dict(default=False, type='bool'),
- external_host_partition_path=dict(default=None, type='str'),
- external_host_username=dict(default=None, type='str'),
- external_host_password=dict(default=None, type='str'),
- external_host_private_key=dict(default=None, type='str'),
- # Metrics
- expose_metrics=dict(default=False, type='bool'),
- metrics_image=dict(default=None, type='str'),
- # Stats
- stats_user=dict(default=None, type='str'),
- stats_password=dict(default=None, type='str'),
- stats_port=dict(default=1936, type='int'),
-
- ),
- mutually_exclusive=[["router_type", "images"]],
-
- supports_check_mode=True,
- )
-
- rconfig = RouterConfig(module.params['name'],
- module.params['kubeconfig'],
- {'credentials': {'value': module.params['credentials'], 'include': True},
- 'default_cert': {'value': None, 'include': True},
- 'cert_file': {'value': module.params['cert_file'], 'include': False},
- 'key_file': {'value': module.params['key_file'], 'include': False},
- 'image': {'value': module.params['image'], 'include': True},
- 'latest_image': {'value': module.params['latest_image'], 'include': True},
- 'labels': {'value': module.params['labels'], 'include': True},
- 'ports': {'value': ','.join(module.params['ports']), 'include': True},
- 'replicas': {'value': module.params['replicas'], 'include': True},
- 'selector': {'value': module.params['selector'], 'include': True},
- 'service_account': {'value': module.params['service_account'], 'include': True},
- 'router_type': {'value': module.params['router_type'], 'include': False},
- 'host_network': {'value': module.params['host_network'], 'include': True},
- 'external_host': {'value': module.params['external_host'], 'include': True},
- 'external_host_vserver': {'value': module.params['external_host_vserver'],
- 'include': True},
- 'external_host_insecure': {'value': module.params['external_host_insecure'],
- 'include': True},
- 'external_host_partition_path': {'value': module.params['external_host_partition_path'],
- 'include': True},
- 'external_host_username': {'value': module.params['external_host_username'],
- 'include': True},
- 'external_host_password': {'value': module.params['external_host_password'],
- 'include': True},
- 'external_host_private_key': {'value': module.params['external_host_private_key'],
- 'include': True},
- 'expose_metrics': {'value': module.params['expose_metrics'], 'include': True},
- 'metrics_image': {'value': module.params['metrics_image'], 'include': True},
- 'stats_user': {'value': module.params['stats_user'], 'include': True},
- 'stats_password': {'value': module.params['stats_password'], 'include': True},
- 'stats_port': {'value': module.params['stats_port'], 'include': True},
- })
-
-
- ocrouter = Router(rconfig)
-
- state = module.params['state']
-
- ########
- # Delete
- ########
- if state == 'absent':
- if not ocrouter.exists():
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = ocrouter.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
-
- if state == 'present':
- ########
- # Create
- ########
- if not ocrouter.exists():
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- api_rval = ocrouter.create()
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- if not ocrouter.needs_update():
- module.exit_json(changed=False, state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = ocrouter.update()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-main()
diff --git a/roles/lib_openshift_api/library/oc_edit.py b/roles/lib_openshift_api/library/oc_edit.py
deleted file mode 100644
index e43b6175a..000000000
--- a/roles/lib_openshift_api/library/oc_edit.py
+++ /dev/null
@@ -1,646 +0,0 @@
-#!/usr/bin/env python
-# ___ ___ _ _ ___ ___ _ _____ ___ ___
-# / __| __| \| | __| _ \ /_\_ _| __| \
-# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
-# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
-# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
-# | |) | (_) | | .` | (_) || | | _|| |) | | | |
-# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
-'''
- OpenShiftCLI class that wraps the oc commands in a subprocess
-'''
-
-import atexit
-import json
-import os
-import shutil
-import subprocess
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- '''return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-# pylint: disable=too-few-public-methods
-class OpenShiftCLI(object):
- ''' Class to wrap the command line tools '''
- def __init__(self,
- namespace,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftCLI '''
- self.namespace = namespace
- self.verbose = verbose
- self.kubeconfig = kubeconfig
-
- # Pylint allows only 5 arguments to be passed.
- # pylint: disable=too-many-arguments
- def _replace_content(self, resource, rname, content, force=False):
- ''' replace the current object with the content '''
- res = self._get(resource, rname)
- if not res['results']:
- return res
-
- fname = '/tmp/%s' % rname
- yed = Yedit(fname, res['results'][0])
- changes = []
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [fname])
-
- return self._replace(fname, force)
-
- def _replace(self, fname, force=False):
- '''return all pods '''
- cmd = ['-n', self.namespace, 'replace', '-f', fname]
- if force:
- cmd.append('--force')
- return self.openshift_cmd(cmd)
-
- def _create(self, fname):
- '''return all pods '''
- return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
-
- def _delete(self, resource, rname):
- '''return all pods '''
- return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
-
- def _get(self, resource, rname=None):
- '''return a secret by name '''
- cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
- if rname:
- cmd.append(rname)
-
- rval = self.openshift_cmd(cmd, output=True)
-
- # Ensure results are retuned in an array
- if rval.has_key('items'):
- rval['results'] = rval['items']
- elif not isinstance(rval['results'], list):
- rval['results'] = [rval['results']]
-
- return rval
-
- def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
- '''Base command for oc '''
- #cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
- cmds = []
- if oadm:
- cmds = ['/usr/bin/oadm']
- else:
- cmds = ['/usr/bin/oc']
-
- cmds.extend(cmd)
-
- rval = {}
- results = ''
- err = None
-
- if self.verbose:
- print ' '.join(cmds)
-
- proc = subprocess.Popen(cmds,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env={'KUBECONFIG': self.kubeconfig})
-
- proc.wait()
- stdout = proc.stdout.read()
- stderr = proc.stderr.read()
- rval = {"returncode": proc.returncode,
- "results": results,
- "cmd": ' '.join(cmds),
- }
-
- if proc.returncode == 0:
- if output:
- if output_type == 'json':
- try:
- rval['results'] = json.loads(stdout)
- except ValueError as err:
- if "No JSON object could be decoded" in err.message:
- err = err.message
- elif output_type == 'raw':
- rval['results'] = stdout
-
- if self.verbose:
- print stdout
- print stderr
- print
-
- if err:
- rval.update({"err": err,
- "stderr": stderr,
- "stdout": stdout,
- "cmd": cmds
- })
-
- else:
- rval.update({"stderr": stderr,
- "stdout": stdout,
- "results": {},
- })
-
- return rval
-
-class Utils(object):
- ''' utilities for openshiftcli modules '''
- @staticmethod
- def create_file(rname, data, ftype=None):
- ''' create a file in tmp with name and contents'''
- path = os.path.join('/tmp', rname)
- with open(path, 'w') as fds:
- if ftype == 'yaml':
- fds.write(yaml.safe_dump(data, default_flow_style=False))
-
- elif ftype == 'json':
- fds.write(json.dumps(data))
- else:
- fds.write(data)
-
- # Register cleanup when module is done
- atexit.register(Utils.cleanup, [path])
- return path
-
- @staticmethod
- def create_files_from_contents(data):
- '''Turn an array of dict: filename, content into a files array'''
- files = []
-
- for sfile in data:
- path = Utils.create_file(sfile['path'], sfile['content'])
- files.append(path)
-
- return files
-
- @staticmethod
- def cleanup(files):
- '''Clean up on exit '''
- for sfile in files:
- if os.path.exists(sfile):
- if os.path.isdir(sfile):
- shutil.rmtree(sfile)
- elif os.path.isfile(sfile):
- os.remove(sfile)
-
-
- @staticmethod
- def exists(results, _name):
- ''' Check to see if the results include the name '''
- if not results:
- return False
-
-
- if Utils.find_result(results, _name):
- return True
-
- return False
-
- @staticmethod
- def find_result(results, _name):
- ''' Find the specified result by name'''
- rval = None
- for result in results:
- if result.has_key('metadata') and result['metadata']['name'] == _name:
- rval = result
- break
-
- return rval
-
- @staticmethod
- def get_resource_file(sfile, sfile_type='yaml'):
- ''' return the service file '''
- contents = None
- with open(sfile) as sfd:
- contents = sfd.read()
-
- if sfile_type == 'yaml':
- contents = yaml.safe_load(contents)
- elif sfile_type == 'json':
- contents = json.loads(contents)
-
- return contents
-
- # Disabling too-many-branches. This is a yaml dictionary comparison function
- # pylint: disable=too-many-branches,too-many-return-statements
- @staticmethod
- def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
- ''' Given a user defined definition, compare it with the results given back by our query. '''
-
- # Currently these values are autogenerated and we do not need to check them
- skip = ['metadata', 'status']
- if skip_keys:
- skip.extend(skip_keys)
-
- for key, value in result_def.items():
- if key in skip:
- continue
-
- # Both are lists
- if isinstance(value, list):
- if not isinstance(user_def[key], list):
- if debug:
- print 'user_def[key] is not a list'
- return False
-
- for values in zip(user_def[key], value):
- if isinstance(values[0], dict) and isinstance(values[1], dict):
- if debug:
- print 'sending list - list'
- print type(values[0])
- print type(values[1])
- result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
- if not result:
- print 'list compare returned false'
- return False
-
- elif value != user_def[key]:
- if debug:
- print 'value should be identical'
- print value
- print user_def[key]
- return False
-
- # recurse on a dictionary
- elif isinstance(value, dict):
- if not isinstance(user_def[key], dict):
- if debug:
- print "dict returned false not instance of dict"
- return False
-
- # before passing ensure keys match
- api_values = set(value.keys()) - set(skip)
- user_values = set(user_def[key].keys()) - set(skip)
- if api_values != user_values:
- if debug:
- print api_values
- print user_values
- print "keys are not equal in dict"
- return False
-
- result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
- if not result:
- if debug:
- print "dict returned false"
- print result
- return False
-
- # Verify each key, value pair is the same
- else:
- if not user_def.has_key(key) or value != user_def[key]:
- if debug:
- print "value not equal; user_def does not have key"
- print value
- print user_def[key]
- return False
-
- return True
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
-
-class Edit(OpenShiftCLI):
- ''' Class to wrap the oc command line tools
- '''
- # pylint: disable=too-many-arguments
- def __init__(self,
- kind,
- namespace,
- resource_name=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(Edit, self).__init__(namespace, kubeconfig)
- self.namespace = namespace
- self.kind = kind
- self.name = resource_name
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a secret by name '''
- return self._get(self.kind, self.name)
-
- def update(self, file_name, content, force=False, content_type='yaml'):
- '''run update '''
- if file_name:
- if content_type == 'yaml':
- data = yaml.load(open(file_name))
- elif content_type == 'json':
- data = json.loads(open(file_name).read())
-
- changes = []
- yed = Yedit(file_name, data)
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [file_name])
-
- return self._replace(file_name, force=force)
-
- return self._replace_content(self.kind, self.name, content, force=force)
-
-
-
-def main():
- '''
- ansible oc module for services
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, required=True, type='str'),
- kind=dict(required=True,
- type='str',
- choices=['dc', 'deploymentconfig',
- 'svc', 'service',
- 'scc', 'securitycontextconstraints',
- 'ns', 'namespace', 'project', 'projects',
- 'is', 'imagestream',
- 'istag', 'imagestreamtag',
- 'bc', 'buildconfig',
- 'routes',
- 'node',
- 'secret',
- ]),
- file_name=dict(default=None, type='str'),
- file_format=dict(default='yaml', type='str'),
- content=dict(default=None, required=True, type='dict'),
- force=dict(default=False, type='bool'),
- ),
- supports_check_mode=True,
- )
- ocedit = Edit(module.params['kind'],
- module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = ocedit.get()
-
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.fail_json(msg=api_rval)
-
- ########
- # Update
- ########
- api_rval = ocedit.update(module.params['file_name'],
- module.params['content'],
- module.params['force'],
- module.params['file_format'])
-
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- if api_rval.has_key('updated') and not api_rval['updated']:
- module.exit_json(changed=False, results=api_rval, state="present")
-
- # return the created object
- api_rval = ocedit.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_openshift_api/library/oc_obj.py b/roles/lib_openshift_api/library/oc_obj.py
deleted file mode 100644
index f0ea66aee..000000000
--- a/roles/lib_openshift_api/library/oc_obj.py
+++ /dev/null
@@ -1,730 +0,0 @@
-#!/usr/bin/env python
-# ___ ___ _ _ ___ ___ _ _____ ___ ___
-# / __| __| \| | __| _ \ /_\_ _| __| \
-# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
-# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
-# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
-# | |) | (_) | | .` | (_) || | | _|| |) | | | |
-# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
-'''
- OpenShiftCLI class that wraps the oc commands in a subprocess
-'''
-
-import atexit
-import json
-import os
-import shutil
-import subprocess
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- '''return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-# pylint: disable=too-few-public-methods
-class OpenShiftCLI(object):
- ''' Class to wrap the command line tools '''
- def __init__(self,
- namespace,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftCLI '''
- self.namespace = namespace
- self.verbose = verbose
- self.kubeconfig = kubeconfig
-
- # Pylint allows only 5 arguments to be passed.
- # pylint: disable=too-many-arguments
- def _replace_content(self, resource, rname, content, force=False):
- ''' replace the current object with the content '''
- res = self._get(resource, rname)
- if not res['results']:
- return res
-
- fname = '/tmp/%s' % rname
- yed = Yedit(fname, res['results'][0])
- changes = []
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [fname])
-
- return self._replace(fname, force)
-
- def _replace(self, fname, force=False):
- '''return all pods '''
- cmd = ['-n', self.namespace, 'replace', '-f', fname]
- if force:
- cmd.append('--force')
- return self.openshift_cmd(cmd)
-
- def _create(self, fname):
- '''return all pods '''
- return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
-
- def _delete(self, resource, rname):
- '''return all pods '''
- return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
-
- def _get(self, resource, rname=None):
- '''return a secret by name '''
- cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
- if rname:
- cmd.append(rname)
-
- rval = self.openshift_cmd(cmd, output=True)
-
- # Ensure results are retuned in an array
- if rval.has_key('items'):
- rval['results'] = rval['items']
- elif not isinstance(rval['results'], list):
- rval['results'] = [rval['results']]
-
- return rval
-
- def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
- '''Base command for oc '''
- #cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
- cmds = []
- if oadm:
- cmds = ['/usr/bin/oadm']
- else:
- cmds = ['/usr/bin/oc']
-
- cmds.extend(cmd)
-
- rval = {}
- results = ''
- err = None
-
- if self.verbose:
- print ' '.join(cmds)
-
- proc = subprocess.Popen(cmds,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env={'KUBECONFIG': self.kubeconfig})
-
- proc.wait()
- stdout = proc.stdout.read()
- stderr = proc.stderr.read()
- rval = {"returncode": proc.returncode,
- "results": results,
- "cmd": ' '.join(cmds),
- }
-
- if proc.returncode == 0:
- if output:
- if output_type == 'json':
- try:
- rval['results'] = json.loads(stdout)
- except ValueError as err:
- if "No JSON object could be decoded" in err.message:
- err = err.message
- elif output_type == 'raw':
- rval['results'] = stdout
-
- if self.verbose:
- print stdout
- print stderr
- print
-
- if err:
- rval.update({"err": err,
- "stderr": stderr,
- "stdout": stdout,
- "cmd": cmds
- })
-
- else:
- rval.update({"stderr": stderr,
- "stdout": stdout,
- "results": {},
- })
-
- return rval
-
-class Utils(object):
- ''' utilities for openshiftcli modules '''
- @staticmethod
- def create_file(rname, data, ftype=None):
- ''' create a file in tmp with name and contents'''
- path = os.path.join('/tmp', rname)
- with open(path, 'w') as fds:
- if ftype == 'yaml':
- fds.write(yaml.safe_dump(data, default_flow_style=False))
-
- elif ftype == 'json':
- fds.write(json.dumps(data))
- else:
- fds.write(data)
-
- # Register cleanup when module is done
- atexit.register(Utils.cleanup, [path])
- return path
-
- @staticmethod
- def create_files_from_contents(data):
- '''Turn an array of dict: filename, content into a files array'''
- files = []
-
- for sfile in data:
- path = Utils.create_file(sfile['path'], sfile['content'])
- files.append(path)
-
- return files
-
- @staticmethod
- def cleanup(files):
- '''Clean up on exit '''
- for sfile in files:
- if os.path.exists(sfile):
- if os.path.isdir(sfile):
- shutil.rmtree(sfile)
- elif os.path.isfile(sfile):
- os.remove(sfile)
-
-
- @staticmethod
- def exists(results, _name):
- ''' Check to see if the results include the name '''
- if not results:
- return False
-
-
- if Utils.find_result(results, _name):
- return True
-
- return False
-
- @staticmethod
- def find_result(results, _name):
- ''' Find the specified result by name'''
- rval = None
- for result in results:
- if result.has_key('metadata') and result['metadata']['name'] == _name:
- rval = result
- break
-
- return rval
-
- @staticmethod
- def get_resource_file(sfile, sfile_type='yaml'):
- ''' return the service file '''
- contents = None
- with open(sfile) as sfd:
- contents = sfd.read()
-
- if sfile_type == 'yaml':
- contents = yaml.safe_load(contents)
- elif sfile_type == 'json':
- contents = json.loads(contents)
-
- return contents
-
- # Disabling too-many-branches. This is a yaml dictionary comparison function
- # pylint: disable=too-many-branches,too-many-return-statements
- @staticmethod
- def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
- ''' Given a user defined definition, compare it with the results given back by our query. '''
-
- # Currently these values are autogenerated and we do not need to check them
- skip = ['metadata', 'status']
- if skip_keys:
- skip.extend(skip_keys)
-
- for key, value in result_def.items():
- if key in skip:
- continue
-
- # Both are lists
- if isinstance(value, list):
- if not isinstance(user_def[key], list):
- if debug:
- print 'user_def[key] is not a list'
- return False
-
- for values in zip(user_def[key], value):
- if isinstance(values[0], dict) and isinstance(values[1], dict):
- if debug:
- print 'sending list - list'
- print type(values[0])
- print type(values[1])
- result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
- if not result:
- print 'list compare returned false'
- return False
-
- elif value != user_def[key]:
- if debug:
- print 'value should be identical'
- print value
- print user_def[key]
- return False
-
- # recurse on a dictionary
- elif isinstance(value, dict):
- if not isinstance(user_def[key], dict):
- if debug:
- print "dict returned false not instance of dict"
- return False
-
- # before passing ensure keys match
- api_values = set(value.keys()) - set(skip)
- user_values = set(user_def[key].keys()) - set(skip)
- if api_values != user_values:
- if debug:
- print api_values
- print user_values
- print "keys are not equal in dict"
- return False
-
- result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
- if not result:
- if debug:
- print "dict returned false"
- print result
- return False
-
- # Verify each key, value pair is the same
- else:
- if not user_def.has_key(key) or value != user_def[key]:
- if debug:
- print "value not equal; user_def does not have key"
- print value
- print user_def[key]
- return False
-
- return True
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
-
-class OCObject(OpenShiftCLI):
- ''' Class to wrap the oc command line tools '''
-
- # pylint allows 5. we need 6
- # pylint: disable=too-many-arguments
- def __init__(self,
- kind,
- namespace,
- rname=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(OCObject, self).__init__(namespace, kubeconfig)
- self.kind = kind
- self.namespace = namespace
- self.name = rname
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a deploymentconfig by name '''
- return self._get(self.kind, rname=self.name)
-
- def delete(self):
- '''return all pods '''
- return self._delete(self.kind, self.name)
-
- def create(self, files=None, content=None):
- '''Create a deploymentconfig '''
- if files:
- return self._create(files[0])
-
- return self._create(Utils.create_files_from_contents(content))
-
-
- # pylint: disable=too-many-function-args
- def update(self, files=None, content=None, force=False):
- '''run update dc
-
- This receives a list of file names and takes the first filename and calls replace.
- '''
- if files:
- return self._replace(files[0], force)
-
- return self.update_content(content, force)
-
- def update_content(self, content, force=False):
- '''update the dc with the content'''
- return self._replace_content(self.kind, self.name, content, force=force)
-
- def needs_update(self, files=None, content=None, content_type='yaml'):
- ''' check to see if we need to update '''
- objects = self.get()
- if objects['returncode'] != 0:
- return objects
-
- # pylint: disable=no-member
- data = None
- if files:
- data = Utils.get_resource_file(files[0], content_type)
-
- # if equal then no need. So not equal is True
- return not Utils.check_def_equal(data, objects['results'][0], skip_keys=None, debug=False)
- else:
- data = content
-
- for key, value in data.items():
- if key == 'metadata':
- continue
- if not objects['results'][0].has_key(key):
- return True
- if value != objects['results'][0][key]:
- return True
-
- return False
-
-
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible oc module for services
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, type='str'),
- files=dict(default=None, type='list'),
- kind=dict(required=True,
- type='str',
- choices=['dc', 'deploymentconfig',
- 'svc', 'service',
- 'scc', 'securitycontextconstraints',
- 'ns', 'namespace', 'project', 'projects',
- 'is', 'imagestream',
- 'istag', 'imagestreamtag',
- 'bc', 'buildconfig',
- 'routes',
- 'node',
- 'secret',
- ]),
- delete_after=dict(default=False, type='bool'),
- content=dict(default=None, type='dict'),
- force=dict(default=False, type='bool'),
- ),
- mutually_exclusive=[["content", "files"]],
-
- supports_check_mode=True,
- )
- ocobj = OCObject(module.params['kind'],
- module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = ocobj.get()
-
- #####
- # Get
- #####
- if state == 'list':
- module.exit_json(changed=False, results=api_rval['results'], state="list")
-
- if not module.params['name']:
- module.fail_json(msg='Please specify a name when state is absent|present.')
- ########
- # Delete
- ########
- if state == 'absent':
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = ocobj.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
- if state == 'present':
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- # Create it here
- api_rval = ocobj.create(module.params['files'], module.params['content'])
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # return the created object
- api_rval = ocobj.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # Remove files
- if module.params['files'] and module.params['delete_after']:
- Utils.cleanup(module.params['files'])
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- # if a file path is passed, use it.
- update = ocobj.needs_update(module.params['files'], module.params['content'])
- if not isinstance(update, bool):
- module.fail_json(msg=update)
-
- # No changes
- if not update:
- if module.params['files'] and module.params['delete_after']:
- Utils.cleanup(module.params['files'])
-
- module.exit_json(changed=False, results=api_rval['results'][0], state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = ocobj.update(module.params['files'],
- module.params['content'],
- module.params['force'])
-
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- # return the created object
- api_rval = ocobj.get()
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_openshift_api/library/oc_secret.py b/roles/lib_openshift_api/library/oc_secret.py
deleted file mode 100644
index ca58d7139..000000000
--- a/roles/lib_openshift_api/library/oc_secret.py
+++ /dev/null
@@ -1,702 +0,0 @@
-#!/usr/bin/env python
-# ___ ___ _ _ ___ ___ _ _____ ___ ___
-# / __| __| \| | __| _ \ /_\_ _| __| \
-# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
-# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
-# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
-# | |) | (_) | | .` | (_) || | | _|| |) | | | |
-# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
-'''
- OpenShiftCLI class that wraps the oc commands in a subprocess
-'''
-
-import atexit
-import json
-import os
-import shutil
-import subprocess
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- '''return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-# pylint: disable=too-few-public-methods
-class OpenShiftCLI(object):
- ''' Class to wrap the command line tools '''
- def __init__(self,
- namespace,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftCLI '''
- self.namespace = namespace
- self.verbose = verbose
- self.kubeconfig = kubeconfig
-
- # Pylint allows only 5 arguments to be passed.
- # pylint: disable=too-many-arguments
- def _replace_content(self, resource, rname, content, force=False):
- ''' replace the current object with the content '''
- res = self._get(resource, rname)
- if not res['results']:
- return res
-
- fname = '/tmp/%s' % rname
- yed = Yedit(fname, res['results'][0])
- changes = []
- for key, value in content.items():
- changes.append(yed.put(key, value))
-
- if any([not change[0] for change in changes]):
- return {'returncode': 0, 'updated': False}
-
- yed.write()
-
- atexit.register(Utils.cleanup, [fname])
-
- return self._replace(fname, force)
-
- def _replace(self, fname, force=False):
- '''return all pods '''
- cmd = ['-n', self.namespace, 'replace', '-f', fname]
- if force:
- cmd.append('--force')
- return self.openshift_cmd(cmd)
-
- def _create(self, fname):
- '''return all pods '''
- return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
-
- def _delete(self, resource, rname):
- '''return all pods '''
- return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
-
- def _get(self, resource, rname=None):
- '''return a secret by name '''
- cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
- if rname:
- cmd.append(rname)
-
- rval = self.openshift_cmd(cmd, output=True)
-
- # Ensure results are retuned in an array
- if rval.has_key('items'):
- rval['results'] = rval['items']
- elif not isinstance(rval['results'], list):
- rval['results'] = [rval['results']]
-
- return rval
-
- def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
- '''Base command for oc '''
- #cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
- cmds = []
- if oadm:
- cmds = ['/usr/bin/oadm']
- else:
- cmds = ['/usr/bin/oc']
-
- cmds.extend(cmd)
-
- rval = {}
- results = ''
- err = None
-
- if self.verbose:
- print ' '.join(cmds)
-
- proc = subprocess.Popen(cmds,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env={'KUBECONFIG': self.kubeconfig})
-
- proc.wait()
- stdout = proc.stdout.read()
- stderr = proc.stderr.read()
- rval = {"returncode": proc.returncode,
- "results": results,
- "cmd": ' '.join(cmds),
- }
-
- if proc.returncode == 0:
- if output:
- if output_type == 'json':
- try:
- rval['results'] = json.loads(stdout)
- except ValueError as err:
- if "No JSON object could be decoded" in err.message:
- err = err.message
- elif output_type == 'raw':
- rval['results'] = stdout
-
- if self.verbose:
- print stdout
- print stderr
- print
-
- if err:
- rval.update({"err": err,
- "stderr": stderr,
- "stdout": stdout,
- "cmd": cmds
- })
-
- else:
- rval.update({"stderr": stderr,
- "stdout": stdout,
- "results": {},
- })
-
- return rval
-
-class Utils(object):
- ''' utilities for openshiftcli modules '''
- @staticmethod
- def create_file(rname, data, ftype=None):
- ''' create a file in tmp with name and contents'''
- path = os.path.join('/tmp', rname)
- with open(path, 'w') as fds:
- if ftype == 'yaml':
- fds.write(yaml.safe_dump(data, default_flow_style=False))
-
- elif ftype == 'json':
- fds.write(json.dumps(data))
- else:
- fds.write(data)
-
- # Register cleanup when module is done
- atexit.register(Utils.cleanup, [path])
- return path
-
- @staticmethod
- def create_files_from_contents(data):
- '''Turn an array of dict: filename, content into a files array'''
- files = []
-
- for sfile in data:
- path = Utils.create_file(sfile['path'], sfile['content'])
- files.append(path)
-
- return files
-
- @staticmethod
- def cleanup(files):
- '''Clean up on exit '''
- for sfile in files:
- if os.path.exists(sfile):
- if os.path.isdir(sfile):
- shutil.rmtree(sfile)
- elif os.path.isfile(sfile):
- os.remove(sfile)
-
-
- @staticmethod
- def exists(results, _name):
- ''' Check to see if the results include the name '''
- if not results:
- return False
-
-
- if Utils.find_result(results, _name):
- return True
-
- return False
-
- @staticmethod
- def find_result(results, _name):
- ''' Find the specified result by name'''
- rval = None
- for result in results:
- if result.has_key('metadata') and result['metadata']['name'] == _name:
- rval = result
- break
-
- return rval
-
- @staticmethod
- def get_resource_file(sfile, sfile_type='yaml'):
- ''' return the service file '''
- contents = None
- with open(sfile) as sfd:
- contents = sfd.read()
-
- if sfile_type == 'yaml':
- contents = yaml.safe_load(contents)
- elif sfile_type == 'json':
- contents = json.loads(contents)
-
- return contents
-
- # Disabling too-many-branches. This is a yaml dictionary comparison function
- # pylint: disable=too-many-branches,too-many-return-statements
- @staticmethod
- def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
- ''' Given a user defined definition, compare it with the results given back by our query. '''
-
- # Currently these values are autogenerated and we do not need to check them
- skip = ['metadata', 'status']
- if skip_keys:
- skip.extend(skip_keys)
-
- for key, value in result_def.items():
- if key in skip:
- continue
-
- # Both are lists
- if isinstance(value, list):
- if not isinstance(user_def[key], list):
- if debug:
- print 'user_def[key] is not a list'
- return False
-
- for values in zip(user_def[key], value):
- if isinstance(values[0], dict) and isinstance(values[1], dict):
- if debug:
- print 'sending list - list'
- print type(values[0])
- print type(values[1])
- result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
- if not result:
- print 'list compare returned false'
- return False
-
- elif value != user_def[key]:
- if debug:
- print 'value should be identical'
- print value
- print user_def[key]
- return False
-
- # recurse on a dictionary
- elif isinstance(value, dict):
- if not isinstance(user_def[key], dict):
- if debug:
- print "dict returned false not instance of dict"
- return False
-
- # before passing ensure keys match
- api_values = set(value.keys()) - set(skip)
- user_values = set(user_def[key].keys()) - set(skip)
- if api_values != user_values:
- if debug:
- print api_values
- print user_values
- print "keys are not equal in dict"
- return False
-
- result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
- if not result:
- if debug:
- print "dict returned false"
- print result
- return False
-
- # Verify each key, value pair is the same
- else:
- if not user_def.has_key(key) or value != user_def[key]:
- if debug:
- print "value not equal; user_def does not have key"
- print value
- print user_def[key]
- return False
-
- return True
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
-
-class Secret(OpenShiftCLI):
- ''' Class to wrap the oc command line tools
- '''
- def __init__(self,
- namespace,
- secret_name=None,
- kubeconfig='/etc/origin/master/admin.kubeconfig',
- verbose=False):
- ''' Constructor for OpenshiftOC '''
- super(Secret, self).__init__(namespace, kubeconfig)
- self.namespace = namespace
- self.name = secret_name
- self.kubeconfig = kubeconfig
- self.verbose = verbose
-
- def get(self):
- '''return a secret by name '''
- return self._get('secrets', self.name)
-
- def delete(self):
- '''delete a secret by name'''
- return self._delete('secrets', self.name)
-
- def create(self, files=None, contents=None):
- '''Create a secret '''
- if not files:
- files = Utils.create_files_from_contents(contents)
-
- secrets = ["%s=%s" % (os.path.basename(sfile), sfile) for sfile in files]
- cmd = ['-n%s' % self.namespace, 'secrets', 'new', self.name]
- cmd.extend(secrets)
-
- return self.openshift_cmd(cmd)
-
- def update(self, files, force=False):
- '''run update secret
-
- This receives a list of file names and converts it into a secret.
- The secret is then written to disk and passed into the `oc replace` command.
- '''
- secret = self.prep_secret(files)
- if secret['returncode'] != 0:
- return secret
-
- sfile_path = '/tmp/%s' % self.name
- with open(sfile_path, 'w') as sfd:
- sfd.write(json.dumps(secret['results']))
-
- atexit.register(Utils.cleanup, [sfile_path])
-
- return self._replace(sfile_path, force=force)
-
- def prep_secret(self, files=None, contents=None):
- ''' return what the secret would look like if created
- This is accomplished by passing -ojson. This will most likely change in the future
- '''
- if not files:
- files = Utils.create_files_from_contents(contents)
-
- secrets = ["%s=%s" % (os.path.basename(sfile), sfile) for sfile in files]
- cmd = ['-ojson', '-n%s' % self.namespace, 'secrets', 'new', self.name]
- cmd.extend(secrets)
-
- return self.openshift_cmd(cmd, output=True)
-
-
-
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- namespace=dict(default='default', type='str'),
- name=dict(default=None, type='str'),
- files=dict(default=None, type='list'),
- delete_after=dict(default=False, type='bool'),
- contents=dict(default=None, type='list'),
- force=dict(default=False, type='bool'),
- ),
- mutually_exclusive=[["contents", "files"]],
-
- supports_check_mode=True,
- )
- occmd = Secret(module.params['namespace'],
- module.params['name'],
- kubeconfig=module.params['kubeconfig'],
- verbose=module.params['debug'])
-
- state = module.params['state']
-
- api_rval = occmd.get()
-
- #####
- # Get
- #####
- if state == 'list':
- module.exit_json(changed=False, results=api_rval['results'], state="list")
-
- if not module.params['name']:
- module.fail_json(msg='Please specify a name when state is absent|present.')
- ########
- # Delete
- ########
- if state == 'absent':
- if not Utils.exists(api_rval['results'], module.params['name']):
- module.exit_json(changed=False, state="absent")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a delete.')
-
- api_rval = occmd.delete()
- module.exit_json(changed=True, results=api_rval, state="absent")
-
-
- if state == 'present':
- if module.params['files']:
- files = module.params['files']
- elif module.params['contents']:
- files = Utils.create_files_from_contents(module.params['contents'])
- else:
- module.fail_json(msg='Either specify files or contents.')
-
- ########
- # Create
- ########
- if not Utils.exists(api_rval['results'], module.params['name']):
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed a create.')
-
- api_rval = occmd.create(module.params['files'], module.params['contents'])
-
- # Remove files
- if files and module.params['delete_after']:
- Utils.cleanup(files)
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- ########
- # Update
- ########
- secret = occmd.prep_secret(module.params['files'], module.params['contents'])
-
- if secret['returncode'] != 0:
- module.fail_json(msg=secret)
-
- if Utils.check_def_equal(secret['results'], api_rval['results'][0]):
-
- # Remove files
- if files and module.params['delete_after']:
- Utils.cleanup(files)
-
- module.exit_json(changed=False, results=secret['results'], state="present")
-
- if module.check_mode:
- module.exit_json(change=False, msg='Would have performed an update.')
-
- api_rval = occmd.update(files, force=module.params['force'])
-
- # Remove files
- if secret and module.params['delete_after']:
- Utils.cleanup(files)
-
- if api_rval['returncode'] != 0:
- module.fail_json(msg=api_rval)
-
-
- module.exit_json(changed=True, results=api_rval, state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_timedatectl/library/timedatectl.py b/roles/lib_timedatectl/library/timedatectl.py
deleted file mode 100644
index b6eab5918..000000000
--- a/roles/lib_timedatectl/library/timedatectl.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-'''
- timedatectl ansible module
-
- This module supports setting ntp enabled
-'''
-import subprocess
-
-
-
-
-def do_timedatectl(options=None):
- ''' subprocess timedatectl '''
-
- cmd = ['/usr/bin/timedatectl']
- if options:
- cmd += options.split()
-
- proc = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE)
- proc.wait()
- return proc.stdout.read()
-
-def main():
- ''' Ansible module for timedatectl
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- #state=dict(default='enabled', type='str'),
- ntp=dict(default=True, type='bool'),
- ),
- #supports_check_mode=True
- )
-
- # do something
- ntp_enabled = False
-
- results = do_timedatectl()
-
- for line in results.split('\n'):
- if 'NTP enabled' in line:
- if 'yes' in line:
- ntp_enabled = True
-
- ########
- # Enable NTP
- ########
- if module.params['ntp']:
- if ntp_enabled:
- module.exit_json(changed=False, results="enabled", state="enabled")
-
- # Enable it
- # Commands to enable ntp
- else:
- results = do_timedatectl('set-ntp yes')
- module.exit_json(changed=True, results="enabled", state="enabled", cmdout=results)
-
- #########
- # Disable NTP
- #########
- else:
- if not ntp_enabled:
- module.exit_json(changed=False, results="disabled", state="disabled")
-
- results = do_timedatectl('set-ntp no')
- module.exit_json(changed=True, results="disabled", state="disabled")
-
- module.exit_json(failed=True, changed=False, results="Something went wrong", state="unknown")
-
-# Pylint is getting in the way of basic Ansible
-# pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_yaml_editor/build/ansible/yedit.py b/roles/lib_yaml_editor/build/ansible/yedit.py
deleted file mode 100644
index dab3d6347..000000000
--- a/roles/lib_yaml_editor/build/ansible/yedit.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#pylint: skip-file
-
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- src=dict(default=None, type='str'),
- content=dict(default=None, type='dict'),
- key=dict(default=None, type='str'),
- value=dict(default=None, type='str'),
- value_format=dict(default='yaml', choices=['yaml', 'json'], type='str'),
- ),
- #mutually_exclusive=[["src", "content"]],
-
- supports_check_mode=True,
- )
- state = module.params['state']
-
- yamlfile = Yedit(module.params['src'], module.params['content'])
-
- rval = yamlfile.load()
- if not rval and state != 'present':
- module.fail_json(msg='Error opening file [%s]. Verify that the' + \
- ' file exists, that it is has correct permissions, and is valid yaml.')
-
- if state == 'list':
- module.exit_json(changed=False, results=rval, state="list")
-
- if state == 'absent':
- rval = yamlfile.delete(module.params['key'])
- module.exit_json(changed=rval[0], results=rval[1], state="absent")
-
- if state == 'present':
-
- if module.params['value_format'] == 'yaml':
- value = yaml.load(module.params['value'])
- elif module.params['value_format'] == 'json':
- value = json.loads(module.params['value'])
-
- if rval:
- rval = yamlfile.put(module.params['key'], value)
- if rval[0]:
- yamlfile.write()
- module.exit_json(changed=rval[0], results=rval[1], state="present")
-
- if not module.params['content']:
- rval = yamlfile.create(module.params['key'], value)
- else:
- rval = yamlfile.load()
- yamlfile.write()
-
- module.exit_json(changed=rval[0], results=rval[1], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_yaml_editor/build/generate.py b/roles/lib_yaml_editor/build/generate.py
deleted file mode 100755
index 312e4d0ee..000000000
--- a/roles/lib_yaml_editor/build/generate.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-'''
- Generate the openshift-ansible/roles/lib_openshift_cli/library/ modules.
-'''
-
-import os
-
-# pylint: disable=anomalous-backslash-in-string
-GEN_STR = "#!/usr/bin/env python\n" + \
- "# ___ ___ _ _ ___ ___ _ _____ ___ ___\n" + \
- "# / __| __| \| | __| _ \ /_\_ _| __| \\\n" + \
- "# | (_ | _|| .` | _|| / / _ \| | | _|| |) |\n" + \
- "# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____\n" + \
- "# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|\n" + \
- "# | |) | (_) | | .` | (_) || | | _|| |) | | | |\n" + \
- "# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|\n"
-
-OPENSHIFT_ANSIBLE_PATH = os.path.dirname(os.path.realpath(__file__))
-
-FILES = {'yedit.py': ['src/base.py', 'src/yedit.py', 'ansible/yedit.py'],
- }
-
-def main():
- ''' combine the necessary files to create the ansible module '''
- library = os.path.join(OPENSHIFT_ANSIBLE_PATH, '..', 'library/')
- for fname, parts in FILES.items():
- with open(os.path.join(library, fname), 'w') as afd:
- afd.seek(0)
- afd.write(GEN_STR)
- for fpart in parts:
- with open(os.path.join(OPENSHIFT_ANSIBLE_PATH, fpart)) as pfd:
- # first line is pylint disable so skip it
- for idx, line in enumerate(pfd):
- if idx == 0 and 'skip-file' in line:
- continue
-
- afd.write(line)
-
-
-if __name__ == '__main__':
- main()
-
-
diff --git a/roles/lib_yaml_editor/build/src/base.py b/roles/lib_yaml_editor/build/src/base.py
deleted file mode 100644
index 9e43d45dc..000000000
--- a/roles/lib_yaml_editor/build/src/base.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# pylint: skip-file
-
-'''
-module for managing yaml files
-'''
-
-import os
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- ''' return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
diff --git a/roles/lib_yaml_editor/build/src/yedit.py b/roles/lib_yaml_editor/build/src/yedit.py
deleted file mode 100644
index 642733914..000000000
--- a/roles/lib_yaml_editor/build/src/yedit.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# pylint: skip-file
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
diff --git a/roles/lib_yaml_editor/build/test/foo.yml b/roles/lib_yaml_editor/build/test/foo.yml
deleted file mode 100644
index 20e9ff3fe..000000000
--- a/roles/lib_yaml_editor/build/test/foo.yml
+++ /dev/null
@@ -1 +0,0 @@
-foo: bar
diff --git a/roles/lib_yaml_editor/build/test/test.yaml b/roles/lib_yaml_editor/build/test/test.yaml
deleted file mode 100755
index ac9c37565..000000000
--- a/roles/lib_yaml_editor/build/test/test.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/ansible-playbook
----
-- hosts: localhost
- gather_facts: no
- tasks:
- - yedit:
- src: /home/kwoodson/git/openshift-ansible/roles/lib_yaml_editor/build/test/foo.yml
- key: foo
- value: barplus
- state: present
- register: output
-
- - debug:
- msg: "{{ output }}"
-
diff --git a/roles/lib_yaml_editor/library/yedit.py b/roles/lib_yaml_editor/library/yedit.py
deleted file mode 100644
index b7ae45b31..000000000
--- a/roles/lib_yaml_editor/library/yedit.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python
-# ___ ___ _ _ ___ ___ _ _____ ___ ___
-# / __| __| \| | __| _ \ /_\_ _| __| \
-# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
-# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
-# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
-# | |) | (_) | | .` | (_) || | | _|| |) | | | |
-# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
-
-'''
-module for managing yaml files
-'''
-
-import os
-import re
-
-import yaml
-# This is here because of a bug that causes yaml
-# to incorrectly handle timezone info on timestamps
-def timestamp_constructor(_, node):
- ''' return timestamps as strings'''
- return str(node.value)
-yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
-
-
-class YeditException(Exception):
- ''' Exception class for Yedit '''
- pass
-
-class Yedit(object):
- ''' Class to modify yaml files '''
- re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
- re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
-
- def __init__(self, filename=None, content=None, content_type='yaml'):
- self.content = content
- self.filename = filename
- self.__yaml_dict = content
- self.content_type = content_type
- if self.filename and not self.content:
- self.load(content_type=self.content_type)
-
- @property
- def yaml_dict(self):
- ''' getter method for yaml_dict '''
- return self.__yaml_dict
-
- @yaml_dict.setter
- def yaml_dict(self, value):
- ''' setter method for yaml_dict '''
- self.__yaml_dict = value
-
- @staticmethod
- def remove_entry(data, key):
- ''' remove data at location key '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for remove
- # expected list entry
- if key_indexes[-1][0]:
- if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- del data[int(key_indexes[-1][0])]
- return True
-
- # expected dict entry
- elif key_indexes[-1][1]:
- if isinstance(data, dict):
- del data[key_indexes[-1][1]]
- return True
-
- @staticmethod
- def add_entry(data, key, item=None):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- curr_data = data
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes[:-1]:
- if dict_key:
- if isinstance(data, dict) and data.has_key(dict_key):
- data = data[dict_key]
- continue
-
- data[dict_key] = {}
- data = data[dict_key]
-
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- # process last index for add
- # expected list entry
- if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
- data[int(key_indexes[-1][0])] = item
-
- # expected dict entry
- elif key_indexes[-1][1] and isinstance(data, dict):
- data[key_indexes[-1][1]] = item
-
- return curr_data
-
- @staticmethod
- def get_entry(data, key):
- ''' Get an item from a dictionary with key notation a.b.c
- d = {'a': {'b': 'c'}}}
- key = a.b
- return c
- '''
- if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
- return None
-
- key_indexes = re.findall(Yedit.re_key, key)
- for arr_ind, dict_key in key_indexes:
- if dict_key and isinstance(data, dict):
- data = data.get(dict_key, None)
- elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
- data = data[int(arr_ind)]
- else:
- return None
-
- return data
-
- def write(self):
- ''' write to file '''
- if not self.filename:
- raise YeditException('Please specify a filename.')
-
- with open(self.filename, 'w') as yfd:
- yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
-
- def read(self):
- ''' write to file '''
- # check if it exists
- if not self.exists():
- return None
-
- contents = None
- with open(self.filename) as yfd:
- contents = yfd.read()
-
- return contents
-
- def exists(self):
- ''' return whether file exists '''
- if os.path.exists(self.filename):
- return True
-
- return False
-
- def load(self, content_type='yaml'):
- ''' return yaml file '''
- contents = self.read()
-
- if not contents:
- return None
-
- # check if it is yaml
- try:
- if content_type == 'yaml':
- self.yaml_dict = yaml.load(contents)
- elif content_type == 'json':
- self.yaml_dict = json.loads(contents)
- except yaml.YAMLError as _:
- # Error loading yaml or json
- return None
-
- return self.yaml_dict
-
- def get(self, key):
- ''' get a specified key'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- return entry
-
- def delete(self, key):
- ''' remove key from a dict'''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
- if not entry:
- return (False, self.yaml_dict)
-
- result = Yedit.remove_entry(self.yaml_dict, key)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def put(self, key, value):
- ''' put key, value into a dict '''
- try:
- entry = Yedit.get_entry(self.yaml_dict, key)
- except KeyError as _:
- entry = None
-
- if entry == value:
- return (False, self.yaml_dict)
-
- result = Yedit.add_entry(self.yaml_dict, key, value)
- if not result:
- return (False, self.yaml_dict)
-
- return (True, self.yaml_dict)
-
- def create(self, key, value):
- ''' create a yaml file '''
- if not self.exists():
- self.yaml_dict = {key: value}
- return (True, self.yaml_dict)
-
- return (False, self.yaml_dict)
-
-def main():
- '''
- ansible oc module for secrets
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(default='present', type='str',
- choices=['present', 'absent', 'list']),
- debug=dict(default=False, type='bool'),
- src=dict(default=None, type='str'),
- content=dict(default=None, type='dict'),
- key=dict(default=None, type='str'),
- value=dict(default=None, type='str'),
- value_format=dict(default='yaml', choices=['yaml', 'json'], type='str'),
- ),
- #mutually_exclusive=[["src", "content"]],
-
- supports_check_mode=True,
- )
- state = module.params['state']
-
- yamlfile = Yedit(module.params['src'], module.params['content'])
-
- rval = yamlfile.load()
- if not rval and state != 'present':
- module.fail_json(msg='Error opening file [%s]. Verify that the' + \
- ' file exists, that it is has correct permissions, and is valid yaml.')
-
- if state == 'list':
- module.exit_json(changed=False, results=rval, state="list")
-
- if state == 'absent':
- rval = yamlfile.delete(module.params['key'])
- module.exit_json(changed=rval[0], results=rval[1], state="absent")
-
- if state == 'present':
-
- if module.params['value_format'] == 'yaml':
- value = yaml.load(module.params['value'])
- elif module.params['value_format'] == 'json':
- value = json.loads(module.params['value'])
-
- if rval:
- rval = yamlfile.put(module.params['key'], value)
- if rval[0]:
- yamlfile.write()
- module.exit_json(changed=rval[0], results=rval[1], state="present")
-
- if not module.params['content']:
- rval = yamlfile.create(module.params['key'], value)
- else:
- rval = yamlfile.load()
- yamlfile.write()
-
- module.exit_json(changed=rval[0], results=rval[1], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/README.md b/roles/lib_zabbix/README.md
deleted file mode 100644
index 69debc698..000000000
--- a/roles/lib_zabbix/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-zabbix
-=========
-
-Automate zabbix tasks.
-
-Requirements
-------------
-
-This requires the openshift_tools rpm be installed for the zbxapi.py library. It can be found here: https://github.com/openshift/openshift-tools under openshift_tools/monitoring/zbxapi.py for now.
-
-Role Variables
---------------
-
-None
-
-Dependencies
-------------
-
-This depeonds on the zbxapi.py library located here: https://github.com/openshift/openshift-tools under openshift_tools/monitoring/zbxapi.py for now.
-
-Example Playbook
-----------------
-
- - zbx_host:
- server: zab_server
- user: zab_user
- password: zab_password
- name: 'myhost'
-
-License
--------
-
-ASL 2.0
-
-Author Information
-------------------
-
-OpenShift operations, Red Hat, Inc
diff --git a/roles/lib_zabbix/library/__init__.py b/roles/lib_zabbix/library/__init__.py
deleted file mode 100644
index 0c7e19e41..000000000
--- a/roles/lib_zabbix/library/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-'''
-ZabbixAPI ansible module
-'''
diff --git a/roles/lib_zabbix/library/zbx_action.py b/roles/lib_zabbix/library/zbx_action.py
deleted file mode 100644
index 499084942..000000000
--- a/roles/lib_zabbix/library/zbx_action.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python
-# vim: expandtab:tabstop=4:shiftwidth=4
-'''
- Ansible module for zabbix actions
-'''
-#
-# Zabbix action ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection, ZabbixAPIError
-
-CUSTOM_SCRIPT_ACTION = '0'
-IPMI_ACTION = '1'
-SSH_ACTION = '2'
-TELNET_ACTION = '3'
-GLOBAL_SCRIPT_ACTION = '4'
-
-EXECUTE_ON_ZABBIX_AGENT = '0'
-EXECUTE_ON_ZABBIX_SERVER = '1'
-
-OPERATION_REMOTE_COMMAND = '1'
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def conditions_equal(zab_conditions, user_conditions):
- '''Compare two lists of conditions'''
- c_type = 'conditiontype'
- _op = 'operator'
- val = 'value'
- if len(user_conditions) != len(zab_conditions):
- return False
-
- for zab_cond, user_cond in zip(zab_conditions, user_conditions):
- if zab_cond[c_type] != str(user_cond[c_type]) or zab_cond[_op] != str(user_cond[_op]) or \
- zab_cond[val] != str(user_cond[val]):
- return False
-
- return True
-
-def filter_differences(zabbix_filters, user_filters):
- '''Determine the differences from user and zabbix for operations'''
- rval = {}
- for key, val in user_filters.items():
-
- if key == 'conditions':
- if not conditions_equal(zabbix_filters[key], val):
- rval[key] = val
-
- elif zabbix_filters[key] != str(val):
- rval[key] = val
-
- return rval
-
-def opconditions_diff(zab_val, user_val):
- ''' Report whether there are differences between opconditions on
- zabbix and opconditions supplied by user '''
-
- if len(zab_val) != len(user_val):
- return True
-
- for z_cond, u_cond in zip(zab_val, user_val):
- if not all([str(u_cond[op_key]) == z_cond[op_key] for op_key in \
- ['conditiontype', 'operator', 'value']]):
- return True
-
- return False
-
-def opmessage_diff(zab_val, user_val):
- ''' Report whether there are differences between opmessage on
- zabbix and opmessage supplied by user '''
-
- for op_msg_key, op_msg_val in user_val.items():
- if zab_val[op_msg_key] != str(op_msg_val):
- return True
-
- return False
-
-def opmessage_grp_diff(zab_val, user_val):
- ''' Report whether there are differences between opmessage_grp
- on zabbix and opmessage_grp supplied by user '''
-
- zab_grp_ids = set([ugrp['usrgrpid'] for ugrp in zab_val])
- usr_grp_ids = set([ugrp['usrgrpid'] for ugrp in user_val])
- if usr_grp_ids != zab_grp_ids:
- return True
-
- return False
-
-def opmessage_usr_diff(zab_val, user_val):
- ''' Report whether there are differences between opmessage_usr
- on zabbix and opmessage_usr supplied by user '''
-
- zab_usr_ids = set([usr['userid'] for usr in zab_val])
- usr_ids = set([usr['userid'] for usr in user_val])
- if usr_ids != zab_usr_ids:
- return True
-
- return False
-
-def opcommand_diff(zab_op_cmd, usr_op_cmd):
- ''' Check whether user-provided opcommand matches what's already
- stored in Zabbix '''
-
- for usr_op_cmd_key, usr_op_cmd_val in usr_op_cmd.items():
- if zab_op_cmd[usr_op_cmd_key] != str(usr_op_cmd_val):
- return True
- return False
-
-def host_in_zabbix(zab_hosts, usr_host):
- ''' Check whether a particular user host is already in the
- Zabbix list of hosts '''
-
- for usr_hst_key, usr_hst_val in usr_host.items():
- for zab_host in zab_hosts:
- if usr_hst_key in zab_host and \
- zab_host[usr_hst_key] == str(usr_hst_val):
- return True
-
- return False
-
-def hostlist_in_zabbix(zab_hosts, usr_hosts):
- ''' Check whether user-provided list of hosts are already in
- the Zabbix action '''
-
- if len(zab_hosts) != len(usr_hosts):
- return False
-
- for usr_host in usr_hosts:
- if not host_in_zabbix(zab_hosts, usr_host):
- return False
-
- return True
-
-# We are comparing two lists of dictionaries (the one stored on zabbix and the
-# one the user is providing). For each type of operation, determine whether there
-# is a difference between what is stored on zabbix and what the user is providing.
-# If there is a difference, we take the user-provided data for what needs to
-# be stored/updated into zabbix.
-def operation_differences(zabbix_ops, user_ops):
- '''Determine the differences from user and zabbix for operations'''
-
- # if they don't match, take the user options
- if len(zabbix_ops) != len(user_ops):
- return user_ops
-
- rval = {}
- for zab, user in zip(zabbix_ops, user_ops):
- for oper in user.keys():
- if oper == 'opconditions' and opconditions_diff(zab[oper], \
- user[oper]):
- rval[oper] = user[oper]
-
- elif oper == 'opmessage' and opmessage_diff(zab[oper], \
- user[oper]):
- rval[oper] = user[oper]
-
- elif oper == 'opmessage_grp' and opmessage_grp_diff(zab[oper], \
- user[oper]):
- rval[oper] = user[oper]
-
- elif oper == 'opmessage_usr' and opmessage_usr_diff(zab[oper], \
- user[oper]):
- rval[oper] = user[oper]
-
- elif oper == 'opcommand' and opcommand_diff(zab[oper], \
- user[oper]):
- rval[oper] = user[oper]
-
- # opcommand_grp can be treated just like opcommand_hst
- # as opcommand_grp[] is just a list of groups
- elif oper == 'opcommand_hst' or oper == 'opcommand_grp':
- if not hostlist_in_zabbix(zab[oper], user[oper]):
- rval[oper] = user[oper]
-
- # if it's any other type of operation than the ones tested above
- # just do a direct compare
- elif oper not in ['opconditions', 'opmessage', 'opmessage_grp',
- 'opmessage_usr', 'opcommand', 'opcommand_hst',
- 'opcommand_grp'] \
- and str(zab[oper]) != str(user[oper]):
- rval[oper] = user[oper]
-
- return rval
-
-def get_users(zapi, users):
- '''get the mediatype id from the mediatype name'''
- rval_users = []
-
- for user in users:
- content = zapi.get_content('user',
- 'get',
- {'filter': {'alias': user}})
- rval_users.append({'userid': content['result'][0]['userid']})
-
- return rval_users
-
-def get_user_groups(zapi, groups):
- '''get the mediatype id from the mediatype name'''
- user_groups = []
-
- for group in groups:
- content = zapi.get_content('usergroup',
- 'get',
- {'search': {'name': group}})
- for result in content['result']:
- user_groups.append({'usrgrpid': result['usrgrpid']})
-
- return user_groups
-
-def get_mediatype_id_by_name(zapi, m_name):
- '''get the mediatype id from the mediatype name'''
- content = zapi.get_content('mediatype',
- 'get',
- {'filter': {'description': m_name}})
-
- return content['result'][0]['mediatypeid']
-
-def get_priority(priority):
- ''' determine priority
- '''
- prior = 0
- if 'info' in priority:
- prior = 1
- elif 'warn' in priority:
- prior = 2
- elif 'avg' == priority or 'ave' in priority:
- prior = 3
- elif 'high' in priority:
- prior = 4
- elif 'dis' in priority:
- prior = 5
-
- return prior
-
-def get_event_source(from_src):
- '''Translate even str into value'''
- choices = ['trigger', 'discovery', 'auto', 'internal']
- rval = 0
- try:
- rval = choices.index(from_src)
- except ValueError as _:
- ZabbixAPIError('Value not found for event source [%s]' % from_src)
-
- return rval
-
-def get_status(inc_status):
- '''determine status for action'''
- rval = 1
- if inc_status == 'enabled':
- rval = 0
-
- return rval
-
-def get_condition_operator(inc_operator):
- ''' determine the condition operator'''
- vals = {'=': 0,
- '<>': 1,
- 'like': 2,
- 'not like': 3,
- 'in': 4,
- '>=': 5,
- '<=': 6,
- 'not in': 7,
- }
-
- return vals[inc_operator]
-
-def get_host_id_by_name(zapi, host_name):
- '''Get host id by name'''
- content = zapi.get_content('host',
- 'get',
- {'filter': {'name': host_name}})
-
- return content['result'][0]['hostid']
-
-def get_trigger_value(inc_trigger):
- '''determine the proper trigger value'''
- rval = 1
- if inc_trigger == 'PROBLEM':
- rval = 1
- else:
- rval = 0
-
- return rval
-
-def get_template_id_by_name(zapi, t_name):
- '''get the template id by name'''
- content = zapi.get_content('template',
- 'get',
- {'filter': {'host': t_name}})
-
- return content['result'][0]['templateid']
-
-
-def get_host_group_id_by_name(zapi, hg_name):
- '''Get hostgroup id by name'''
- content = zapi.get_content('hostgroup',
- 'get',
- {'filter': {'name': hg_name}})
-
- return content['result'][0]['groupid']
-
-def get_condition_type(event_source, inc_condition):
- '''determine the condition type'''
- c_types = {}
- if event_source == 'trigger':
- c_types = {'host group': 0,
- 'host': 1,
- 'trigger': 2,
- 'trigger name': 3,
- 'trigger severity': 4,
- 'trigger value': 5,
- 'time period': 6,
- 'host template': 13,
- 'application': 15,
- 'maintenance status': 16,
- }
-
- elif event_source == 'discovery':
- c_types = {'host IP': 7,
- 'discovered service type': 8,
- 'discovered service port': 9,
- 'discovery status': 10,
- 'uptime or downtime duration': 11,
- 'received value': 12,
- 'discovery rule': 18,
- 'discovery check': 19,
- 'proxy': 20,
- 'discovery object': 21,
- }
-
- elif event_source == 'auto':
- c_types = {'proxy': 20,
- 'host name': 22,
- 'host metadata': 24,
- }
-
- elif event_source == 'internal':
- c_types = {'host group': 0,
- 'host': 1,
- 'host template': 13,
- 'application': 15,
- 'event type': 23,
- }
- else:
- raise ZabbixAPIError('Unkown event source %s' % event_source)
-
- return c_types[inc_condition]
-
-def get_operation_type(inc_operation):
- ''' determine the correct operation type'''
- o_types = {'send message': 0,
- 'remote command': OPERATION_REMOTE_COMMAND,
- 'add host': 2,
- 'remove host': 3,
- 'add to host group': 4,
- 'remove from host group': 5,
- 'link to template': 6,
- 'unlink from template': 7,
- 'enable host': 8,
- 'disable host': 9,
- }
-
- return o_types[inc_operation]
-
-def get_opcommand_type(opcommand_type):
- ''' determine the opcommand type '''
- oc_types = {'custom script': CUSTOM_SCRIPT_ACTION,
- 'IPMI': IPMI_ACTION,
- 'SSH': SSH_ACTION,
- 'Telnet': TELNET_ACTION,
- 'global script': GLOBAL_SCRIPT_ACTION,
- }
-
- return oc_types[opcommand_type]
-
-def get_execute_on(execute_on):
- ''' determine the execution target '''
- e_types = {'zabbix agent': EXECUTE_ON_ZABBIX_AGENT,
- 'zabbix server': EXECUTE_ON_ZABBIX_SERVER,
- }
-
- return e_types[execute_on]
-
-def action_remote_command(ansible_module, zapi, operation):
- ''' Process remote command type of actions '''
-
- if 'type' not in operation['opcommand']:
- ansible_module.exit_json(failed=True, changed=False, state='unknown',
- results="No Operation Type provided")
-
- operation['opcommand']['type'] = get_opcommand_type(operation['opcommand']['type'])
-
- if operation['opcommand']['type'] == CUSTOM_SCRIPT_ACTION:
-
- if 'execute_on' in operation['opcommand']:
- operation['opcommand']['execute_on'] = get_execute_on(operation['opcommand']['execute_on'])
-
- # custom script still requires the target hosts/groups to be set
- operation['opcommand_hst'] = []
- operation['opcommand_grp'] = []
- for usr_host in operation['target_hosts']:
- if usr_host['target_type'] == 'zabbix server':
- # 0 = target host local/current host
- operation['opcommand_hst'].append({'hostid': 0})
- elif usr_host['target_type'] == 'group':
- group_name = usr_host['target']
- gid = get_host_group_id_by_name(zapi, group_name)
- operation['opcommand_grp'].append({'groupid': gid})
- elif usr_host['target_type'] == 'host':
- host_name = usr_host['target']
- hid = get_host_id_by_name(zapi, host_name)
- operation['opcommand_hst'].append({'hostid': hid})
-
- # 'target_hosts' is just to make it easier to build zbx_actions
- # not part of ZabbixAPI
- del operation['target_hosts']
- else:
- ansible_module.exit_json(failed=True, changed=False, state='unknown',
- results="Unsupported remote command type")
-
-
-def get_action_operations(ansible_module, zapi, inc_operations):
- '''Convert the operations into syntax for api'''
- for operation in inc_operations:
- operation['operationtype'] = get_operation_type(operation['operationtype'])
- if operation['operationtype'] == 0: # send message. Need to fix the
- operation['opmessage']['mediatypeid'] = \
- get_mediatype_id_by_name(zapi, operation['opmessage']['mediatypeid'])
- operation['opmessage_grp'] = get_user_groups(zapi, operation.get('opmessage_grp', []))
- operation['opmessage_usr'] = get_users(zapi, operation.get('opmessage_usr', []))
- if operation['opmessage']['default_msg']:
- operation['opmessage']['default_msg'] = 1
- else:
- operation['opmessage']['default_msg'] = 0
-
- elif operation['operationtype'] == OPERATION_REMOTE_COMMAND:
- action_remote_command(ansible_module, zapi, operation)
-
- # Handle Operation conditions:
- # Currently there is only 1 available which
- # is 'event acknowledged'. In the future
- # if there are any added we will need to pass this
- # option to a function and return the correct conditiontype
- if operation.has_key('opconditions'):
- for condition in operation['opconditions']:
- if condition['conditiontype'] == 'event acknowledged':
- condition['conditiontype'] = 14
-
- if condition['operator'] == '=':
- condition['operator'] = 0
-
- if condition['value'] == 'acknowledged':
- condition['value'] = 1
- else:
- condition['value'] = 0
-
-
- return inc_operations
-
-def get_operation_evaltype(inc_type):
- '''get the operation evaltype'''
- rval = 0
- if inc_type == 'and/or':
- rval = 0
- elif inc_type == 'and':
- rval = 1
- elif inc_type == 'or':
- rval = 2
- elif inc_type == 'custom':
- rval = 3
-
- return rval
-
-def get_action_conditions(zapi, event_source, inc_conditions):
- '''Convert the conditions into syntax for api'''
-
- calc_type = inc_conditions.pop('calculation_type')
- inc_conditions['evaltype'] = get_operation_evaltype(calc_type)
- for cond in inc_conditions['conditions']:
-
- cond['operator'] = get_condition_operator(cond['operator'])
- # Based on conditiontype we need to set the proper value
- # e.g. conditiontype = hostgroup then the value needs to be a hostgroup id
- # e.g. conditiontype = host the value needs to be a host id
- cond['conditiontype'] = get_condition_type(event_source, cond['conditiontype'])
- if cond['conditiontype'] == 0:
- cond['value'] = get_host_group_id_by_name(zapi, cond['value'])
- elif cond['conditiontype'] == 1:
- cond['value'] = get_host_id_by_name(zapi, cond['value'])
- elif cond['conditiontype'] == 4:
- cond['value'] = get_priority(cond['value'])
-
- elif cond['conditiontype'] == 5:
- cond['value'] = get_trigger_value(cond['value'])
- elif cond['conditiontype'] == 13:
- cond['value'] = get_template_id_by_name(zapi, cond['value'])
- elif cond['conditiontype'] == 16:
- cond['value'] = ''
-
- return inc_conditions
-
-
-def get_send_recovery(send_recovery):
- '''Get the integer value'''
- rval = 0
- if send_recovery:
- rval = 1
-
- return rval
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_item
- '''
-
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
-
- name=dict(default=None, type='str'),
- event_source=dict(default='trigger', choices=['trigger', 'discovery', 'auto', 'internal'], type='str'),
- action_subject=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}", type='str'),
- action_message=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}\r\n" +
- "Last value: {ITEM.LASTVALUE}\r\n\r\n{TRIGGER.URL}", type='str'),
- reply_subject=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}", type='str'),
- reply_message=dict(default="Trigger: {TRIGGER.NAME}\r\nTrigger status: {TRIGGER.STATUS}\r\n" +
- "Trigger severity: {TRIGGER.SEVERITY}\r\nTrigger URL: {TRIGGER.URL}\r\n\r\n" +
- "Item values:\r\n\r\n1. {ITEM.NAME1} ({HOST.NAME1}:{ITEM.KEY1}): " +
- "{ITEM.VALUE1}\r\n2. {ITEM.NAME2} ({HOST.NAME2}:{ITEM.KEY2}): " +
- "{ITEM.VALUE2}\r\n3. {ITEM.NAME3} ({HOST.NAME3}:{ITEM.KEY3}): " +
- "{ITEM.VALUE3}", type='str'),
- send_recovery=dict(default=False, type='bool'),
- status=dict(default=None, type='str'),
- escalation_time=dict(default=60, type='int'),
- conditions_filter=dict(default=None, type='dict'),
- operations=dict(default=None, type='list'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'action'
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'name': module.params['name']},
- 'selectFilter': 'extend',
- 'selectOperations': 'extend',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['actionid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- conditions = get_action_conditions(zapi, module.params['event_source'], module.params['conditions_filter'])
- operations = get_action_operations(module, zapi,
- module.params['operations'])
- params = {'name': module.params['name'],
- 'esc_period': module.params['escalation_time'],
- 'eventsource': get_event_source(module.params['event_source']),
- 'status': get_status(module.params['status']),
- 'def_shortdata': module.params['action_subject'],
- 'def_longdata': module.params['action_message'],
- 'r_shortdata': module.params['reply_subject'],
- 'r_longdata': module.params['reply_message'],
- 'recovery_msg': get_send_recovery(module.params['send_recovery']),
- 'filter': conditions,
- 'operations': operations,
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- _ = params.pop('hostid', None)
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'operations':
- ops = operation_differences(zab_results[key], value)
- if ops:
- differences[key] = ops
-
- elif key == 'filter':
- filters = filter_differences(zab_results[key], value)
- if filters:
- differences[key] = filters
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update.
- # action update requires an id, filters, and operations
- differences['actionid'] = zab_results['actionid']
- differences['operations'] = params['operations']
- differences['filter'] = params['filter']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_application.py b/roles/lib_zabbix/library/zbx_application.py
deleted file mode 100644
index 472390071..000000000
--- a/roles/lib_zabbix/library/zbx_application.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/usr/bin/env python
-'''
-Ansible module for application
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix application ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_template_ids(zapi, template_name):
- '''
- get related templates
- '''
- template_ids = []
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name}})
- if content.has_key('result'):
- template_ids.append(content['result'][0]['templateid'])
- return template_ids
-
-def main():
- ''' Ansible module for application
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str', required=True),
- template_name=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the application for the rest of the calls
- zbx_class_name = 'application'
- idname = 'applicationid'
- aname = module.params['name']
- state = module.params['state']
- # get a applicationid, see if it exists
- tids = get_template_ids(zapi, module.params['template_name'])
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'name': aname},
- 'templateids': tids[0],
- })
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
- params = {'hostid': tids[0],
- 'name': aname,
- }
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
- if key == 'templates' and zab_results.has_key('parentTemplates'):
- if zab_results['parentTemplates'] != value:
- differences[key] = value
- elif zab_results[key] != str(value) and zab_results[key] != value:
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=content['result'], state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_discoveryrule.py b/roles/lib_zabbix/library/zbx_discoveryrule.py
deleted file mode 100644
index 7c5f98397..000000000
--- a/roles/lib_zabbix/library/zbx_discoveryrule.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-'''
-Zabbix discovery rule ansible module
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_template(zapi, template_name):
- '''get a template by name
- '''
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name},
- 'output': 'extend',
- 'selectInterfaces': 'interfaceid',
- })
- if not content['result']:
- return None
- return content['result'][0]
-
-def get_type(vtype):
- '''
- Determine which type of discoverrule this is
- '''
- _types = {'agent': 0,
- 'SNMPv1': 1,
- 'trapper': 2,
- 'simple': 3,
- 'SNMPv2': 4,
- 'internal': 5,
- 'SNMPv3': 6,
- 'active': 7,
- 'external': 10,
- 'database monitor': 11,
- 'ipmi': 12,
- 'ssh': 13,
- 'telnet': 14,
- 'JMX': 16,
- }
-
- for typ in _types.keys():
- if vtype in typ or vtype == typ:
- _vtype = _types[typ]
- break
- else:
- _vtype = 2
-
- return _vtype
-
-def main():
- '''
- Ansible module for zabbix discovery rules
- '''
-
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- key=dict(default=None, type='str'),
- description=dict(default=None, type='str'),
- interfaceid=dict(default=None, type='int'),
- ztype=dict(default='trapper', type='str'),
- delay=dict(default=60, type='int'),
- lifetime=dict(default=30, type='int'),
- template_name=dict(default=[], type='list'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'discoveryrule'
- idname = "itemid"
- dname = module.params['name']
- state = module.params['state']
- template = get_template(zapi, module.params['template_name'])
-
- # selectInterfaces doesn't appear to be working but is needed.
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'name': dname},
- 'templateids': template['templateid'],
- #'selectDServices': 'extend',
- #'selectDChecks': 'extend',
- #'selectDhosts': 'dhostid',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
-
- # Create and Update
- if state == 'present':
- params = {'name': dname,
- 'key_': module.params['key'],
- 'hostid': template['templateid'],
- 'interfaceid': module.params['interfaceid'],
- 'lifetime': module.params['lifetime'],
- 'type': get_type(module.params['ztype']),
- 'description': module.params['description'],
- }
- if params['type'] in [2, 5, 7, 11]:
- params.pop('interfaceid')
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_graph.py b/roles/lib_zabbix/library/zbx_graph.py
deleted file mode 100644
index 71f4e1264..000000000
--- a/roles/lib_zabbix/library/zbx_graph.py
+++ /dev/null
@@ -1,331 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for zabbix graphs
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix graphs ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#---
-#- hosts: localhost
-# gather_facts: no
-# tasks:
-# - zbx_graph:
-# zbx_server: https://zabbixserver/zabbix/api_jsonrpc.php
-# zbx_user: Admin
-# zbx_password: zabbix
-# name: Test Graph
-# height: 300
-# width: 500
-# graph_items:
-# - item_name: openshift.master.etcd.create.fail
-# color: red
-# line_style: bold
-# - item_name: openshift.master.etcd.create.success
-# color: red
-# line_style: bold
-#
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_graph_type(graphtype):
- '''
- Possible values:
- 0 - normal;
- 1 - stacked;
- 2 - pie;
- 3 - exploded;
- '''
- gtype = 0
- if 'stacked' in graphtype:
- gtype = 1
- elif 'pie' in graphtype:
- gtype = 2
- elif 'exploded' in graphtype:
- gtype = 3
-
- return gtype
-
-def get_show_legend(show_legend):
- '''Get the value for show_legend
- 0 - hide
- 1 - (default) show
- '''
- rval = 1
- if 'hide' == show_legend:
- rval = 0
-
- return rval
-
-def get_template_id(zapi, template_name):
- '''
- get related templates
- '''
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'filter': {'host': template_name},})
-
- if content.has_key('result'):
- return content['result'][0]['templateid']
-
- return None
-
-def get_color(color_in):
- ''' Receive a color and translate it to a hex representation of the color
-
- Will have a few setup by default
- '''
- colors = {'black': '000000',
- 'red': 'FF0000',
- 'pink': 'FFC0CB',
- 'purple': '800080',
- 'orange': 'FFA500',
- 'gold': 'FFD700',
- 'yellow': 'FFFF00',
- 'green': '008000',
- 'cyan': '00FFFF',
- 'aqua': '00FFFF',
- 'blue': '0000FF',
- 'brown': 'A52A2A',
- 'gray': '808080',
- 'grey': '808080',
- 'silver': 'C0C0C0',
- }
- if colors.has_key(color_in):
- return colors[color_in]
-
- return color_in
-
-def get_line_style(style):
- '''determine the line style
- '''
- line_style = {'line': 0,
- 'filled': 1,
- 'bold': 2,
- 'dot': 3,
- 'dashed': 4,
- 'gradient': 5,
- }
-
- if line_style.has_key(style):
- return line_style[style]
-
- return 0
-
-def get_calc_function(func):
- '''Determine the caclulation function'''
- rval = 2 # default to avg
- if 'min' in func:
- rval = 1
- elif 'max' in func:
- rval = 4
- elif 'all' in func:
- rval = 7
- elif 'last' in func:
- rval = 9
-
- return rval
-
-def get_graph_item_type(gtype):
- '''Determine the graph item type
- '''
- rval = 0 # simple graph type
- if 'sum' in gtype:
- rval = 2
-
- return rval
-
-def get_graph_items(zapi, gitems):
- '''Get graph items by id'''
-
- r_items = []
- for item in gitems:
- content = zapi.get_content('item',
- 'get',
- {'filter': {'name': item['item_name']}})
- _ = item.pop('item_name')
- color = get_color(item.pop('color'))
- drawtype = get_line_style(item.get('line_style', 'line'))
- func = get_calc_function(item.get('calc_func', 'avg'))
- g_type = get_graph_item_type(item.get('graph_item_type', 'simple'))
-
- if content.has_key('result'):
- tmp = {'itemid': content['result'][0]['itemid'],
- 'color': color,
- 'drawtype': drawtype,
- 'calc_fnc': func,
- 'type': g_type,
- }
- r_items.append(tmp)
-
- return r_items
-
-def compare_gitems(zabbix_items, user_items):
- '''Compare zabbix results with the user's supplied items
- return True if user_items are equal
- return False if any of the values differ
- '''
- if len(zabbix_items) != len(user_items):
- return False
-
- for u_item in user_items:
- for z_item in zabbix_items:
- if u_item['itemid'] == z_item['itemid']:
- if not all([str(value) == z_item[key] for key, value in u_item.items()]):
- return False
-
- return True
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_graphs
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- height=dict(default=None, type='int'),
- width=dict(default=None, type='int'),
- graph_type=dict(default='normal', type='str'),
- show_legend=dict(default='show', type='str'),
- state=dict(default='present', type='str'),
- graph_items=dict(default=None, type='list'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'graph'
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'filter': {'name': module.params['name']},
- #'templateids': templateid,
- 'selectGraphItems': 'extend',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['graphid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- params = {'name': module.params['name'],
- 'height': module.params['height'],
- 'width': module.params['width'],
- 'graphtype': get_graph_type(module.params['graph_type']),
- 'show_legend': get_show_legend(module.params['show_legend']),
- 'gitems': get_graph_items(zapi, module.params['graph_items']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'gitems':
- if not compare_gitems(zab_results[key], value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences['graphid'] = zab_results['graphid']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_graphprototype.py b/roles/lib_zabbix/library/zbx_graphprototype.py
deleted file mode 100644
index d63873b00..000000000
--- a/roles/lib_zabbix/library/zbx_graphprototype.py
+++ /dev/null
@@ -1,331 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for zabbix graphprototypes
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix graphprototypes ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#---
-#- hosts: localhost
-# gather_facts: no
-# tasks:
-# - zbx_graphprototype:
-# zbx_server: https://zabbixserver/zabbix/api_jsonrpc.php
-# zbx_user: Admin
-# zbx_password: zabbix
-# name: Test Graph
-# height: 300
-# width: 500
-# graph_items:
-# - item_name: Bytes per second IN on network interface {#OSO_NET_INTERFACE}
-# color: red
-# line_style: bold
-# item_type: prototype
-# - item_name: Template OS Linux: Bytes per second OUT on network interface {#OSO_NET_INTERFACE}
-# item_type: prototype
-#
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_graph_type(graphtype):
- '''
- Possible values:
- 0 - normal;
- 1 - stacked;
- 2 - pie;
- 3 - exploded;
- '''
- gtype = 0
- if 'stacked' in graphtype:
- gtype = 1
- elif 'pie' in graphtype:
- gtype = 2
- elif 'exploded' in graphtype:
- gtype = 3
-
- return gtype
-
-def get_show_legend(show_legend):
- '''Get the value for show_legend
- 0 - hide
- 1 - (default) show
- '''
- rval = 1
- if 'hide' == show_legend:
- rval = 0
-
- return rval
-
-def get_template_id(zapi, template_name):
- '''
- get related templates
- '''
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'filter': {'host': template_name},})
-
- if content.has_key('result'):
- return content['result'][0]['templateid']
-
- return None
-
-def get_color(color_in='black'):
- ''' Receive a color and translate it to a hex representation of the color
-
- Will have a few setup by default
- '''
- colors = {'black': '000000',
- 'red': 'FF0000',
- 'pink': 'FFC0CB',
- 'purple': '800080',
- 'orange': 'FFA500',
- 'gold': 'FFD700',
- 'yellow': 'FFFF00',
- 'green': '008000',
- 'cyan': '00FFFF',
- 'aqua': '00FFFF',
- 'blue': '0000FF',
- 'brown': 'A52A2A',
- 'gray': '808080',
- 'grey': '808080',
- 'silver': 'C0C0C0',
- }
- if colors.has_key(color_in):
- return colors[color_in]
-
- return color_in
-
-def get_line_style(style):
- '''determine the line style
- '''
- line_style = {'line': 0,
- 'filled': 1,
- 'bold': 2,
- 'dot': 3,
- 'dashed': 4,
- 'gradient': 5,
- }
-
- if line_style.has_key(style):
- return line_style[style]
-
- return 0
-
-def get_calc_function(func):
- '''Determine the caclulation function'''
- rval = 2 # default to avg
- if 'min' in func:
- rval = 1
- elif 'max' in func:
- rval = 4
- elif 'all' in func:
- rval = 7
- elif 'last' in func:
- rval = 9
-
- return rval
-
-def get_graph_item_type(gtype):
- '''Determine the graph item type
- '''
- rval = 0 # simple graph type
- if 'sum' in gtype:
- rval = 2
-
- return rval
-
-def get_graph_items(zapi, gitems):
- '''Get graph items by id'''
-
- r_items = []
- for item in gitems:
- content = zapi.get_content('item%s' % item.get('item_type', ''),
- 'get',
- {'filter': {'name': item['item_name']}})
- _ = item.pop('item_name')
- color = get_color(item.pop('color', 'black'))
- drawtype = get_line_style(item.get('line_style', 'line'))
- func = get_calc_function(item.get('calc_func', 'avg'))
- g_type = get_graph_item_type(item.get('graph_item_type', 'simple'))
-
- if content.has_key('result'):
- tmp = {'itemid': content['result'][0]['itemid'],
- 'color': color,
- 'drawtype': drawtype,
- 'calc_fnc': func,
- 'type': g_type,
- }
- r_items.append(tmp)
-
- return r_items
-
-def compare_gitems(zabbix_items, user_items):
- '''Compare zabbix results with the user's supplied items
- return True if user_items are equal
- return False if any of the values differ
- '''
- if len(zabbix_items) != len(user_items):
- return False
-
- for u_item in user_items:
- for z_item in zabbix_items:
- if u_item['itemid'] == z_item['itemid']:
- if not all([str(value) == z_item[key] for key, value in u_item.items()]):
- return False
-
- return True
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_graphprototypes
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- height=dict(default=None, type='int'),
- width=dict(default=None, type='int'),
- graph_type=dict(default='normal', type='str'),
- show_legend=dict(default='show', type='str'),
- state=dict(default='present', type='str'),
- graph_items=dict(default=None, type='list'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'graphprototype'
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'filter': {'name': module.params['name']},
- #'templateids': templateid,
- 'selectGraphItems': 'extend',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['graphid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- params = {'name': module.params['name'],
- 'height': module.params['height'],
- 'width': module.params['width'],
- 'graphtype': get_graph_type(module.params['graph_type']),
- 'show_legend': get_show_legend(module.params['show_legend']),
- 'gitems': get_graph_items(zapi, module.params['graph_items']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'gitems':
- if not compare_gitems(zab_results[key], value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences['graphid'] = zab_results['graphid']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_host.py b/roles/lib_zabbix/library/zbx_host.py
deleted file mode 100644
index bbeec49ff..000000000
--- a/roles/lib_zabbix/library/zbx_host.py
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env python
-'''
-Zabbix host ansible module
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_group_ids(zapi, hostgroup_names):
- '''
- get hostgroups
- '''
- # Fetch groups by name
- group_ids = []
- for hgr in hostgroup_names:
- content = zapi.get_content('hostgroup', 'get', {'search': {'name': hgr}})
- if content.has_key('result'):
- group_ids.append({'groupid': content['result'][0]['groupid']})
-
- return group_ids
-
-def get_template_ids(zapi, template_names):
- '''
- get related templates
- '''
- template_ids = []
- # Fetch templates by name
- for template_name in template_names:
- content = zapi.get_content('template', 'get', {'search': {'host': template_name}})
- if content.has_key('result'):
- template_ids.append({'templateid': content['result'][0]['templateid']})
- return template_ids
-
-def interfaces_equal(zbx_interfaces, user_interfaces):
- '''
- compare interfaces from zabbix and interfaces from user
- '''
-
- for u_int in user_interfaces:
- for z_int in zbx_interfaces:
- for u_key, u_val in u_int.items():
- if str(z_int[u_key]) != str(u_val):
- return False
-
- return True
-
-def main():
- '''
- Ansible module for zabbix host
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- hostgroup_names=dict(default=[], type='list'),
- template_names=dict(default=[], type='list'),
- state=dict(default='present', type='str'),
- interfaces=dict(default=None, type='list'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'host'
- idname = "hostid"
- hname = module.params['name']
- state = module.params['state']
-
- # selectInterfaces doesn't appear to be working but is needed.
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'host': hname},
- 'selectGroups': 'groupid',
- 'selectParentTemplates': 'templateid',
- 'selectInterfaces': 'interfaceid',
- })
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
- ifs = module.params['interfaces'] or [{'type': 1, # interface type, 1 = agent
- 'main': 1, # default interface? 1 = true
- 'useip': 1, # default interface? 1 = true
- 'ip': '127.0.0.1', # default interface? 1 = true
- 'dns': '', # dns for host
- 'port': '10050', # port for interface? 10050
- }]
- hostgroup_names = list(set(module.params['hostgroup_names']))
- params = {'host': hname,
- 'groups': get_group_ids(zapi, hostgroup_names),
- 'templates': get_template_ids(zapi, module.params['template_names']),
- 'interfaces': ifs,
- }
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'templates' and zab_results.has_key('parentTemplates'):
- if zab_results['parentTemplates'] != value:
- differences[key] = value
-
-
- elif key == "interfaces":
- if not interfaces_equal(zab_results[key], value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_hostgroup.py b/roles/lib_zabbix/library/zbx_hostgroup.py
deleted file mode 100644
index 6c57d727e..000000000
--- a/roles/lib_zabbix/library/zbx_hostgroup.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-''' Ansible module for hostgroup
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix hostgroup ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def main():
- ''' ansible module for hostgroup
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'hostgroup'
- idname = "groupid"
- hname = module.params['name']
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'name': hname},
- })
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
- params = {'name': hname}
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
- if zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_httptest.py b/roles/lib_zabbix/library/zbx_httptest.py
deleted file mode 100644
index eab45d06e..000000000
--- a/roles/lib_zabbix/library/zbx_httptest.py
+++ /dev/null
@@ -1,290 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for zabbix httpservice
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix item ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_authentication_method(auth):
- ''' determine authentication type'''
- rval = 0
- if 'basic' in auth:
- rval = 1
- elif 'ntlm' in auth:
- rval = 2
-
- return rval
-
-def get_verify_host(verify):
- '''
- get the values for verify_host
- '''
- if verify:
- return 1
-
- return 0
-
-def get_app_id(zapi, application):
- '''
- get related templates
- '''
- # Fetch templates by name
- content = zapi.get_content('application',
- 'get',
- {'search': {'name': application},
- 'selectApplications': ['applicationid', 'name']})
- if content.has_key('result'):
- return content['result'][0]['applicationid']
-
- return None
-
-def get_template_id(zapi, template_name):
- '''
- get related templates
- '''
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name},
- 'selectApplications': ['applicationid', 'name']})
- if content.has_key('result'):
- return content['result'][0]['templateid']
-
- return None
-
-def get_host_id_by_name(zapi, host_name):
- '''Get host id by name'''
- content = zapi.get_content('host',
- 'get',
- {'filter': {'name': host_name}})
-
- return content['result'][0]['hostid']
-
-def get_status(status):
- ''' Determine the status of the web scenario '''
- rval = 0
- if 'disabled' in status:
- return 1
-
- return rval
-
-def find_step(idx, step_list):
- ''' find step by index '''
- for step in step_list:
- if str(step['no']) == str(idx):
- return step
-
- return None
-
-def steps_equal(zab_steps, user_steps):
- '''compare steps returned from zabbix
- and steps passed from user
- '''
-
- if len(user_steps) != len(zab_steps):
- return False
-
- for idx in range(1, len(user_steps)+1):
-
- user = find_step(idx, user_steps)
- zab = find_step(idx, zab_steps)
-
- for key, value in user.items():
- if str(value) != str(zab[key]):
- return False
-
- return True
-
-def process_steps(steps):
- '''Preprocess the step parameters'''
- for idx, step in enumerate(steps):
- if not step.has_key('no'):
- step['no'] = idx + 1
-
- return steps
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_item
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, require=True, type='str'),
- agent=dict(default=None, type='str'),
- template_name=dict(default=None, type='str'),
- host_name=dict(default=None, type='str'),
- interval=dict(default=60, type='int'),
- application=dict(default=None, type='str'),
- authentication=dict(default=None, type='str'),
- http_user=dict(default=None, type='str'),
- http_password=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- status=dict(default='enabled', type='str'),
- steps=dict(default='present', type='list'),
- verify_host=dict(default=False, type='bool'),
- retries=dict(default=1, type='int'),
- headers=dict(default=None, type='dict'),
- query_type=dict(default='filter', choices=['filter', 'search'], type='str'),
- ),
- #supports_check_mode=True
- mutually_exclusive=[['template_name', 'host_name']],
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'httptest'
- state = module.params['state']
- hostid = None
-
- # If a template name was passed then accept the template
- if module.params['template_name']:
- hostid = get_template_id(zapi, module.params['template_name'])
- else:
- hostid = get_host_id_by_name(zapi, module.params['host_name'])
-
- # Fail if a template was not found matching the name
- if not hostid:
- module.exit_json(failed=True,
- changed=False,
- results='Error: Could find template or host with name [%s].' %
- (module.params.get('template_name', module.params['host_name'])),
- state="Unkown")
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {module.params['query_type']: {'name': module.params['name']},
- 'selectSteps': 'extend',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['httptestid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- params = {'name': module.params['name'],
- 'hostid': hostid,
- 'agent': module.params['agent'],
- 'retries': module.params['retries'],
- 'steps': process_steps(module.params['steps']),
- 'applicationid': get_app_id(zapi, module.params['application']),
- 'delay': module.params['interval'],
- 'verify_host': get_verify_host(module.params['verify_host']),
- 'status': get_status(module.params['status']),
- 'headers': module.params['headers'],
- 'http_user': module.params['http_user'],
- 'http_password': module.params['http_password'],
- }
-
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'steps':
- if not steps_equal(zab_results[key], value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- # We have differences and need to update
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- differences['httptestid'] = zab_results['httptestid']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_item.py b/roles/lib_zabbix/library/zbx_item.py
deleted file mode 100644
index 189485fb7..000000000
--- a/roles/lib_zabbix/library/zbx_item.py
+++ /dev/null
@@ -1,303 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for zabbix items
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix item ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_data_type(data_type):
- '''
- Possible values:
- 0 - decimal;
- 1 - octal;
- 2 - hexadecimal;
- 3 - bool;
- '''
- vtype = 0
- if 'octal' in data_type:
- vtype = 1
- elif 'hexadecimal' in data_type:
- vtype = 2
- elif 'bool' in data_type:
- vtype = 3
-
- return vtype
-
-def get_value_type(value_type):
- '''
- Possible values:
- 0 - numeric float;
- 1 - character;
- 2 - log;
- 3 - numeric unsigned;
- 4 - text
- '''
- vtype = 0
- if 'int' in value_type:
- vtype = 3
- elif 'log' in value_type:
- vtype = 2
- elif 'char' in value_type:
- vtype = 1
- elif 'str' in value_type:
- vtype = 4
-
- return vtype
-
-def get_app_ids(application_names, app_name_ids):
- ''' get application ids from names
- '''
- applications = []
- if application_names:
- for app in application_names:
- applications.append(app_name_ids[app])
-
- return applications
-
-def get_template_id(zapi, template_name):
- '''
- get related templates
- '''
- template_ids = []
- app_ids = {}
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name},
- 'selectApplications': ['applicationid', 'name']})
- if content.has_key('result'):
- template_ids.append(content['result'][0]['templateid'])
- for app in content['result'][0]['applications']:
- app_ids[app['name']] = app['applicationid']
-
- return template_ids, app_ids
-
-def get_multiplier(inval):
- ''' Determine the multiplier
- '''
- if inval == None or inval == '':
- return None, 0
-
- rval = None
- try:
- rval = int(inval)
- except ValueError:
- pass
-
- if rval:
- return rval, 1
-
- return rval, 0
-
-def get_zabbix_type(ztype):
- '''
- Determine which type of discoverrule this is
- '''
- _types = {'agent': 0,
- 'SNMPv1': 1,
- 'trapper': 2,
- 'simple': 3,
- 'SNMPv2': 4,
- 'internal': 5,
- 'SNMPv3': 6,
- 'active': 7,
- 'aggregate': 8,
- 'web': 9,
- 'external': 10,
- 'database monitor': 11,
- 'ipmi': 12,
- 'ssh': 13,
- 'telnet': 14,
- 'calculated': 15,
- 'JMX': 16,
- 'SNMP trap': 17,
- }
-
- for typ in _types.keys():
- if ztype in typ or ztype == typ:
- _vtype = _types[typ]
- break
- else:
- _vtype = 2
-
- return _vtype
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_item
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- key=dict(default=None, type='str'),
- template_name=dict(default=None, type='str'),
- zabbix_type=dict(default='trapper', type='str'),
- value_type=dict(default='int', type='str'),
- data_type=dict(default='decimal', type='str'),
- interval=dict(default=60, type='int'),
- delta=dict(default=0, type='int'),
- multiplier=dict(default=None, type='str'),
- description=dict(default=None, type='str'),
- units=dict(default=None, type='str'),
- applications=dict(default=None, type='list'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'item'
- state = module.params['state']
-
- templateid, app_name_ids = get_template_id(zapi, module.params['template_name'])
-
- # Fail if a template was not found matching the name
- if not templateid:
- module.exit_json(failed=True,
- changed=False,
- results='Error: Could find template with name %s for item.' % module.params['template_name'],
- state="Unkown")
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'key_': module.params['key']},
- 'selectApplications': 'applicationid',
- 'templateids': templateid,
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['itemid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- formula, use_multiplier = get_multiplier(module.params['multiplier'])
- params = {'name': module.params.get('name', module.params['key']),
- 'key_': module.params['key'],
- 'hostid': templateid[0],
- 'type': get_zabbix_type(module.params['zabbix_type']),
- 'value_type': get_value_type(module.params['value_type']),
- 'data_type': get_data_type(module.params['data_type']),
- 'applications': get_app_ids(module.params['applications'], app_name_ids),
- 'formula': formula,
- 'multiplier': use_multiplier,
- 'description': module.params['description'],
- 'units': module.params['units'],
- 'delay': module.params['interval'],
- 'delta': module.params['delta'],
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- _ = params.pop('hostid', None)
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'applications':
- app_ids = [item['applicationid'] for item in zab_results[key]]
- if set(app_ids) != set(value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences['itemid'] = zab_results['itemid']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_itemprototype.py b/roles/lib_zabbix/library/zbx_itemprototype.py
deleted file mode 100644
index eab2a04ae..000000000
--- a/roles/lib_zabbix/library/zbx_itemprototype.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python
-'''
-Zabbix discovery rule ansible module
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_rule_id(zapi, discoveryrule_key, templateid):
- '''get a discoveryrule by name
- '''
- content = zapi.get_content('discoveryrule',
- 'get',
- {'search': {'key_': discoveryrule_key},
- 'output': 'extend',
- 'templateids': templateid,
- })
- if not content['result']:
- return None
- return content['result'][0]['itemid']
-
-def get_template(zapi, template_name):
- '''get a template by name
- '''
- if not template_name:
- return None
-
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name},
- 'output': 'extend',
- 'selectInterfaces': 'interfaceid',
- })
- if not content['result']:
- return None
- return content['result'][0]
-
-def get_multiplier(inval):
- ''' Determine the multiplier
- '''
- if inval == None or inval == '':
- return None, 0
-
- rval = None
- try:
- rval = int(inval)
- except ValueError:
- pass
-
- if rval:
- return rval, 1
-
- return rval, 0
-
-def get_zabbix_type(ztype):
- '''
- Determine which type of discoverrule this is
- '''
- _types = {'agent': 0,
- 'SNMPv1': 1,
- 'trapper': 2,
- 'simple': 3,
- 'SNMPv2': 4,
- 'internal': 5,
- 'SNMPv3': 6,
- 'active': 7,
- 'aggregate': 8,
- 'external': 10,
- 'database monitor': 11,
- 'ipmi': 12,
- 'ssh': 13,
- 'telnet': 14,
- 'calculated': 15,
- 'JMX': 16,
- 'SNMP trap': 17,
- }
-
- for typ in _types.keys():
- if ztype in typ or ztype == typ:
- _vtype = _types[typ]
- break
- else:
- _vtype = 2
-
- return _vtype
-
-def get_data_type(data_type):
- '''
- Possible values:
- 0 - decimal;
- 1 - octal;
- 2 - hexadecimal;
- 3 - bool;
- '''
- vtype = 0
- if 'octal' in data_type:
- vtype = 1
- elif 'hexadecimal' in data_type:
- vtype = 2
- elif 'bool' in data_type:
- vtype = 3
-
- return vtype
-
-def get_value_type(value_type):
- '''
- Possible values:
- 0 - numeric float;
- 1 - character;
- 2 - log;
- 3 - numeric unsigned;
- 4 - text
- '''
- vtype = 0
- if 'int' in value_type:
- vtype = 3
- elif 'char' in value_type:
- vtype = 1
- elif 'str' in value_type:
- vtype = 4
-
- return vtype
-
-def get_status(status):
- ''' Determine status
- '''
- _status = 0
- if status == 'disabled':
- _status = 1
- elif status == 'unsupported':
- _status = 3
-
- return _status
-
-def get_app_ids(zapi, application_names, templateid):
- ''' get application ids from names
- '''
- app_ids = []
- for app_name in application_names:
- content = zapi.get_content('application', 'get', {'filter': {'name': app_name}, 'templateids': templateid})
- if content.has_key('result'):
- app_ids.append(content['result'][0]['applicationid'])
- return app_ids
-
-# pylint: disable=too-many-branches
-def main():
- '''
- Ansible module for zabbix discovery rules
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- key=dict(default=None, type='str'),
- description=dict(default=None, type='str'),
- template_name=dict(default=None, type='str'),
- interfaceid=dict(default=None, type='int'),
- zabbix_type=dict(default='trapper', type='str'),
- value_type=dict(default='float', type='str'),
- data_type=dict(default='decimal', type='str'),
- delay=dict(default=60, type='int'),
- lifetime=dict(default=30, type='int'),
- state=dict(default='present', type='str'),
- status=dict(default='enabled', type='str'),
- applications=dict(default=[], type='list'),
- discoveryrule_key=dict(default=None, type='str'),
- interval=dict(default=60, type='int'),
- delta=dict(default=0, type='int'),
- multiplier=dict(default=None, type='str'),
- units=dict(default=None, type='str'),
-
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'itemprototype'
- idname = "itemid"
- state = module.params['state']
- template = get_template(zapi, module.params['template_name'])
-
- # selectInterfaces doesn't appear to be working but is needed.
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'key_': module.params['key']},
- 'selectApplications': 'applicationid',
- 'selectDiscoveryRule': 'itemid',
- 'templated': True,
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- formula, use_multiplier = get_multiplier(module.params['multiplier'])
-
- params = {'name': module.params['name'],
- 'key_': module.params['key'],
- 'hostid': template['templateid'],
- 'interfaceid': module.params['interfaceid'],
- 'ruleid': get_rule_id(zapi, module.params['discoveryrule_key'], template['templateid']),
- 'type': get_zabbix_type(module.params['zabbix_type']),
- 'value_type': get_value_type(module.params['value_type']),
- 'data_type': get_data_type(module.params['data_type']),
- 'applications': get_app_ids(zapi, module.params['applications'], template['templateid']),
- 'formula': formula,
- 'multiplier': use_multiplier,
- 'description': module.params['description'],
- 'units': module.params['units'],
- 'delay': module.params['interval'],
- 'delta': module.params['delta'],
- }
-
- if params['type'] in [2, 5, 7, 8, 11, 15]:
- params.pop('interfaceid')
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
- #******#
- # UPDATE
- #******#
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'ruleid':
- if value != zab_results['discoveryRule']['itemid']:
- differences[key] = value
-
- elif key == 'applications':
- app_ids = [app['applicationid'] for app in zab_results[key]]
- if set(app_ids) - set(value):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_itservice.py b/roles/lib_zabbix/library/zbx_itservice.py
deleted file mode 100644
index aa37f0a2b..000000000
--- a/roles/lib_zabbix/library/zbx_itservice.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for zabbix itservices
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix itservice ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_parent(dependencies):
- '''Put dependencies into the proper update format'''
- rval = None
- for dep in dependencies:
- if dep['relationship'] == 'parent':
- return dep
- return rval
-
-def format_dependencies(dependencies):
- '''Put dependencies into the proper update format'''
- rval = []
- for dep in dependencies:
- rval.append({'dependsOnServiceid': dep['serviceid'],
- 'soft': get_dependency_type(dep['dep_type']),
- })
-
- return rval
-
-def get_dependency_type(dep_type):
- '''Determine the dependency type'''
- rval = 0
- if 'soft' == dep_type:
- rval = 1
-
- return rval
-
-def get_service_id_by_name(zapi, dependencies):
- '''Fetch the service id for an itservice'''
- deps = []
- for dep in dependencies:
- if dep['name'] == 'root':
- deps.append(dep)
- continue
-
- content = zapi.get_content('service',
- 'get',
- {'filter': {'name': dep['name']},
- 'selectDependencies': 'extend',
- })
- if content.has_key('result') and content['result']:
- dep['serviceid'] = content['result'][0]['serviceid']
- deps.append(dep)
-
- return deps
-
-def add_dependencies(zapi, service_name, dependencies):
- '''Fetch the service id for an itservice
-
- Add a dependency on the parent for this current service item.
- '''
-
- results = get_service_id_by_name(zapi, [{'name': service_name}])
-
- content = {}
- for dep in dependencies:
- content = zapi.get_content('service',
- 'adddependencies',
- {'serviceid': results[0]['serviceid'],
- 'dependsOnServiceid': dep['serviceid'],
- 'soft': get_dependency_type(dep['dep_type']),
- })
- if content.has_key('result') and content['result']:
- continue
- else:
- break
-
- return content
-
-def get_show_sla(inc_sla):
- ''' Determine the showsla paramter
- '''
- rval = 1
- if 'do not cacluate' in inc_sla:
- rval = 0
- return rval
-
-def get_algorithm(inc_algorithm_str):
- '''
- Determine which type algorithm
- '''
- rval = 0
- if 'at least one' in inc_algorithm_str:
- rval = 1
- elif 'all' in inc_algorithm_str:
- rval = 2
-
- return rval
-
-# The branches are needed for CRUD and error handling
-# pylint: disable=too-many-branches
-def main():
- '''
- ansible zabbix module for zbx_itservice
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- algorithm=dict(default='do not calculate', choices=['do not calculate', 'at least one', 'all'], type='str'),
- show_sla=dict(default='calculate', choices=['do not calculate', 'calculate'], type='str'),
- good_sla=dict(default='99.9', type='float'),
- sort_order=dict(default=1, type='int'),
- state=dict(default='present', type='str'),
- trigger_id=dict(default=None, type='int'),
- dependencies=dict(default=[], type='list'),
- dep_type=dict(default='hard', choices=['hard', 'soft'], type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'service'
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'filter': {'name': module.params['name']},
- 'selectDependencies': 'extend',
- })
-
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['serviceid']])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- dependencies = get_service_id_by_name(zapi, module.params['dependencies'])
- params = {'name': module.params['name'],
- 'algorithm': get_algorithm(module.params['algorithm']),
- 'showsla': get_show_sla(module.params['show_sla']),
- 'goodsla': module.params['good_sla'],
- 'sortorder': module.params['sort_order'],
- 'triggerid': module.params['trigger_id']
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- if dependencies:
- content = add_dependencies(zapi, module.params['name'], dependencies)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- params['dependencies'] = dependencies
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'goodsla':
- if float(value) != float(zab_results[key]):
- differences[key] = value
-
- elif key == 'dependencies':
- zab_dep_ids = [item['serviceid'] for item in zab_results[key]]
- user_dep_ids = [item['serviceid'] for item in dependencies]
- if set(zab_dep_ids) != set(user_dep_ids):
- differences[key] = format_dependencies(dependencies)
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- differences['serviceid'] = zab_results['serviceid']
- content = zapi.get_content(zbx_class_name, 'update', differences)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_mediatype.py b/roles/lib_zabbix/library/zbx_mediatype.py
deleted file mode 100644
index b8def3ca4..000000000
--- a/roles/lib_zabbix/library/zbx_mediatype.py
+++ /dev/null
@@ -1,168 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for mediatype
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix mediatype ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_mtype(mtype):
- '''
- Transport used by the media type.
- Possible values:
- 0 - email;
- 1 - script;
- 2 - SMS;
- 3 - Jabber;
- 100 - Ez Texting.
- '''
- mtype = mtype.lower()
- media_type = None
- if mtype == 'script':
- media_type = 1
- elif mtype == 'sms':
- media_type = 2
- elif mtype == 'jabber':
- media_type = 3
- elif mtype == 'script':
- media_type = 100
- else:
- media_type = 0
-
- return media_type
-
-def main():
- '''
- Ansible zabbix module for mediatype
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- description=dict(default=None, type='str'),
- mtype=dict(default=None, type='str'),
- smtp_server=dict(default=None, type='str'),
- smtp_helo=dict(default=None, type='str'),
- smtp_email=dict(default=None, type='str'),
- passwd=dict(default=None, type='str'),
- path=dict(default=None, type='str'),
- username=dict(default=None, type='str'),
- status=dict(default='enabled', type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'mediatype'
- idname = "mediatypeid"
- description = module.params['description']
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name, 'get', {'search': {'description': description}})
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
- status = 1
- if module.params['status']:
- status = 0
- params = {'description': description,
- 'type': get_mtype(module.params['mtype']),
- 'smtp_server': module.params['smtp_server'],
- 'smtp_helo': module.params['smtp_helo'],
- 'smtp_email': module.params['smtp_email'],
- 'passwd': module.params['passwd'],
- 'exec_path': module.params['path'],
- 'username': module.params['username'],
- 'status': status,
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
- if zab_results[key] != value and \
- zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_template.py b/roles/lib_zabbix/library/zbx_template.py
deleted file mode 100644
index cc713b998..000000000
--- a/roles/lib_zabbix/library/zbx_template.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-'''
-Ansible module for template
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix template ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def main():
- ''' Ansible module for template
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'template'
- idname = 'templateid'
- tname = module.params['name']
- state = module.params['state']
- # get a template, see if it exists
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'host': tname},
- 'selectParentTemplates': 'templateid',
- 'selectGroups': 'groupid',
- 'selectApplications': 'applicationid',
- 'selectDiscoveries': 'extend',
- })
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- if not tname:
- module.exit_json(failed=True,
- changed=False,
- results='Must specifiy a template name.',
- state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
- params = {'groups': module.params.get('groups', [{'groupid': '1'}]),
- 'host': tname,
- }
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
- if key == 'templates' and zab_results.has_key('parentTemplates'):
- if zab_results['parentTemplates'] != value:
- differences[key] = value
- elif zab_results[key] != str(value) and zab_results[key] != value:
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=content['result'], state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_trigger.py b/roles/lib_zabbix/library/zbx_trigger.py
deleted file mode 100644
index 323defbd9..000000000
--- a/roles/lib_zabbix/library/zbx_trigger.py
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/usr/bin/env python
-'''
-ansible module for zabbix triggers
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix trigger ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_priority(priority):
- ''' determine priority
- '''
- prior = 0
- if 'info' in priority:
- prior = 1
- elif 'warn' in priority:
- prior = 2
- elif 'avg' == priority or 'ave' in priority:
- prior = 3
- elif 'high' in priority:
- prior = 4
- elif 'dis' in priority:
- prior = 5
-
- return prior
-
-def get_deps(zapi, deps):
- ''' get trigger dependencies
- '''
- results = []
- for desc in deps:
- content = zapi.get_content('trigger',
- 'get',
- {'filter': {'description': desc},
- 'expandExpression': True,
- 'selectDependencies': 'triggerid',
- })
- if content.has_key('result'):
- results.append({'triggerid': content['result'][0]['triggerid']})
-
- return results
-
-
-def get_trigger_status(inc_status):
- ''' Determine the trigger's status
- 0 is enabled
- 1 is disabled
- '''
- r_status = 0
- if inc_status == 'disabled':
- r_status = 1
-
- return r_status
-
-def get_template_id(zapi, template_name):
- '''
- get related templates
- '''
- template_ids = []
- app_ids = {}
- # Fetch templates by name
- content = zapi.get_content('template',
- 'get',
- {'search': {'host': template_name},
- 'selectApplications': ['applicationid', 'name']})
- if content.has_key('result'):
- template_ids.append(content['result'][0]['templateid'])
- for app in content['result'][0]['applications']:
- app_ids[app['name']] = app['applicationid']
-
- return template_ids, app_ids
-
-def main():
- '''
- Create a trigger in zabbix
-
- Example:
- "params": {
- "description": "Processor load is too high on {HOST.NAME}",
- "expression": "{Linux server:system.cpu.load[percpu,avg1].last()}>5",
- "dependencies": [
- {
- "triggerid": "14062"
- }
- ]
- },
-
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- expression=dict(default=None, type='str'),
- name=dict(default=None, type='str'),
- description=dict(default=None, type='str'),
- dependencies=dict(default=[], type='list'),
- priority=dict(default='avg', type='str'),
- url=dict(default=None, type='str'),
- status=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- template_name=dict(default=None, type='str'),
- hostgroup_name=dict(default=None, type='str'),
- query_type=dict(default='filter', choices=['filter', 'search'], type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'trigger'
- idname = "triggerid"
- state = module.params['state']
- tname = module.params['name']
-
- templateid = None
- if module.params['template_name']:
- templateid, _ = get_template_id(zapi, module.params['template_name'])
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {module.params['query_type']: {'description': tname},
- 'expandExpression': True,
- 'selectDependencies': 'triggerid',
- 'templateids': templateid,
- 'group': module.params['hostgroup_name'],
- })
-
- # Get
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- # Delete
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
- params = {'description': tname,
- 'comments': module.params['description'],
- 'expression': module.params['expression'],
- 'dependencies': get_deps(zapi, module.params['dependencies']),
- 'priority': get_priority(module.params['priority']),
- 'url': module.params['url'],
- 'status': get_trigger_status(module.params['status']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_triggerprototype.py b/roles/lib_zabbix/library/zbx_triggerprototype.py
deleted file mode 100644
index 34a7396a7..000000000
--- a/roles/lib_zabbix/library/zbx_triggerprototype.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python
-'''
-ansible module for zabbix triggerprototypes
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix triggerprototypes ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_priority(priority):
- ''' determine priority
- '''
- prior = 0
- if 'info' in priority:
- prior = 1
- elif 'warn' in priority:
- prior = 2
- elif 'avg' == priority or 'ave' in priority:
- prior = 3
- elif 'high' in priority:
- prior = 4
- elif 'dis' in priority:
- prior = 5
-
- return prior
-
-def get_trigger_status(inc_status):
- ''' Determine the trigger's status
- 0 is enabled
- 1 is disabled
- '''
- r_status = 0
- if inc_status == 'disabled':
- r_status = 1
-
- return r_status
-
-
-def main():
- '''
- Create a triggerprototype in zabbix
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- name=dict(default=None, type='str'),
- expression=dict(default=None, type='str'),
- description=dict(default=None, type='str'),
- priority=dict(default='avg', type='str'),
- url=dict(default=None, type='str'),
- status=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'triggerprototype'
- idname = "triggerid"
- state = module.params['state']
- tname = module.params['name']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'filter': {'description': tname},
- 'expandExpression': True,
- 'selectDependencies': 'triggerid',
- })
-
- # Get
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- # Delete
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
- params = {'description': tname,
- 'comments': module.params['description'],
- 'expression': module.params['expression'],
- 'priority': get_priority(module.params['priority']),
- 'url': module.params['url'],
- 'status': get_trigger_status(module.params['status']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_user.py b/roles/lib_zabbix/library/zbx_user.py
deleted file mode 100644
index 68c5cfbfe..000000000
--- a/roles/lib_zabbix/library/zbx_user.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python
-'''
-ansible module for zabbix users
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix user ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_usergroups(zapi, usergroups):
- ''' Get usergroups
- '''
- ugroups = []
- for ugr in usergroups:
- content = zapi.get_content('usergroup',
- 'get',
- {'search': {'name': ugr},
- #'selectUsers': 'userid',
- #'getRights': 'extend'
- })
- if content['result']:
- ugroups.append({'usrgrpid': content['result'][0]['usrgrpid']})
-
- return ugroups or None
-
-def get_passwd(passwd):
- '''Determine if password is set, if not, return 'zabbix'
- '''
- if passwd:
- return passwd
-
- return 'zabbix'
-
-def get_usertype(user_type):
- '''
- Determine zabbix user account type
- '''
- if not user_type:
- return None
-
- utype = 1
- if 'super' in user_type:
- utype = 3
- elif 'admin' in user_type or user_type == 'admin':
- utype = 2
-
- return utype
-
-def main():
- '''
- ansible zabbix module for users
- '''
-
- ##def user(self, name, state='present', params=None):
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- login=dict(default=None, type='str'),
- first_name=dict(default=None, type='str'),
- last_name=dict(default=None, type='str'),
- user_type=dict(default=None, type='str'),
- password=dict(default=None, type='str'),
- refresh=dict(default=None, type='int'),
- update_password=dict(default=False, type='bool'),
- user_groups=dict(default=[], type='list'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- ## before we can create a user media and users with media types we need media
- zbx_class_name = 'user'
- idname = "userid"
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'output': 'extend',
- 'search': {'alias': module.params['login']},
- "selectUsrgrps": 'usergrpid',
- })
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- if state == 'absent':
- if not exists(content) or len(content['result']) == 0:
- module.exit_json(changed=False, state="absent")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- if state == 'present':
-
- params = {'alias': module.params['login'],
- 'passwd': get_passwd(module.params['password']),
- 'usrgrps': get_usergroups(zapi, module.params['user_groups']),
- 'name': module.params['first_name'],
- 'surname': module.params['last_name'],
- 'refresh': module.params['refresh'],
- 'type': get_usertype(module.params['user_type']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] is None]
-
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('Error'):
- module.exit_json(failed=True, changed=False, results=content, state='present')
-
- module.exit_json(changed=True, results=content['result'], state='present')
- # already exists, we need to update it
- # let's compare properties
- differences = {}
-
- # Update password
- if not module.params['update_password']:
- params.pop('passwd', None)
-
- zab_results = content['result'][0]
- for key, value in params.items():
-
- if key == 'usrgrps':
- # this must be done as a list of ordered dictionaries fails comparison
- if not all([_ in value for _ in zab_results[key]]):
- differences[key] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_user_media.py b/roles/lib_zabbix/library/zbx_user_media.py
deleted file mode 100644
index fc5624346..000000000
--- a/roles/lib_zabbix/library/zbx_user_media.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python
-'''
- Ansible module for user media
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix user media ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_mtype(zapi, mtype):
- '''Get mediatype
-
- If passed an int, return it as the mediatypeid
- if its a string, then try to fetch through a description
- '''
- if isinstance(mtype, int):
- return mtype
- try:
- return int(mtype)
- except ValueError:
- pass
-
- content = zapi.get_content('mediatype', 'get', {'filter': {'description': mtype}})
- if content.has_key('result') and content['result']:
- return content['result'][0]['mediatypeid']
-
- return None
-
-def get_user(zapi, user):
- ''' Get userids from user aliases
- '''
- content = zapi.get_content('user', 'get', {'filter': {'alias': user}})
- if content['result']:
- return content['result'][0]
-
- return None
-
-def get_severity(severity):
- ''' determine severity
- '''
- if isinstance(severity, int) or \
- isinstance(severity, str):
- return severity
-
- val = 0
- sev_map = {
- 'not': 2**0,
- 'inf': 2**1,
- 'war': 2**2,
- 'ave': 2**3,
- 'avg': 2**3,
- 'hig': 2**4,
- 'dis': 2**5,
- }
- for level in severity:
- val |= sev_map[level[:3].lower()]
- return val
-
-def get_zbx_user_query_data(zapi, user_name):
- ''' If name exists, retrieve it, and build query params.
- '''
- query = {}
- if user_name:
- zbx_user = get_user(zapi, user_name)
- query = {'userid': zbx_user['userid']}
-
- return query
-
-def find_media(medias, user_media):
- ''' Find the user media in the list of medias
- '''
- for media in medias:
- if all([media[key] == str(user_media[key]) for key in user_media.keys()]):
- return media
- return None
-
-def get_active(is_active):
- '''Determine active value
- 0 - enabled
- 1 - disabled
- '''
- active = 1
- if is_active:
- active = 0
-
- return active
-
-def get_mediatype(zapi, mediatype, mediatype_desc):
- ''' Determine mediatypeid
- '''
- mtypeid = None
- if mediatype:
- mtypeid = get_mtype(zapi, mediatype)
- elif mediatype_desc:
- mtypeid = get_mtype(zapi, mediatype_desc)
-
- return mtypeid
-
-def preprocess_medias(zapi, medias):
- ''' Insert the correct information when processing medias '''
- for media in medias:
- # Fetch the mediatypeid from the media desc (name)
- if media.has_key('mediatype'):
- media['mediatypeid'] = get_mediatype(zapi, mediatype=None, mediatype_desc=media.pop('mediatype'))
-
- media['active'] = get_active(media.get('active'))
- media['severity'] = int(get_severity(media['severity']))
-
- return medias
-
-# Disabling branching as the logic requires branches.
-# I've also added a few safeguards which required more branches.
-# pylint: disable=too-many-branches
-def main():
- '''
- Ansible zabbix module for mediatype
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- login=dict(default=None, type='str'),
- active=dict(default=False, type='bool'),
- medias=dict(default=None, type='list'),
- mediaid=dict(default=None, type='int'),
- mediatype=dict(default=None, type='str'),
- mediatype_desc=dict(default=None, type='str'),
- #d-d,hh:mm-hh:mm;d-d,hh:mm-hh:mm...
- period=dict(default=None, type='str'),
- sendto=dict(default=None, type='str'),
- severity=dict(default=None, type='str'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- #Set the instance and the template for the rest of the calls
- zbx_class_name = 'user'
- idname = "mediaid"
- state = module.params['state']
-
- # User media is fetched through the usermedia.get
- zbx_user_query = get_zbx_user_query_data(zapi, module.params['login'])
- content = zapi.get_content('usermedia', 'get',
- {'userids': [uid for user, uid in zbx_user_query.items()]})
- #####
- # Get
- #####
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- ########
- # Delete
- ########
- if state == 'absent':
- if not exists(content) or len(content['result']) == 0:
- module.exit_json(changed=False, state="absent")
-
- if not module.params['login']:
- module.exit_json(failed=True, changed=False, results='Must specifiy a user login.', state="absent")
-
- content = zapi.get_content(zbx_class_name, 'deletemedia', [res[idname] for res in content['result']])
-
- if content.has_key('error'):
- module.exit_json(changed=False, results=content['error'], state="absent")
-
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
- active = get_active(module.params['active'])
- mtypeid = get_mediatype(zapi, module.params['mediatype'], module.params['mediatype_desc'])
-
- medias = module.params['medias']
- if medias == None:
- medias = [{'mediatypeid': mtypeid,
- 'sendto': module.params['sendto'],
- 'active': active,
- 'severity': int(get_severity(module.params['severity'])),
- 'period': module.params['period'],
- }]
- else:
- medias = preprocess_medias(zapi, medias)
-
- params = {'users': [zbx_user_query],
- 'medias': medias,
- 'output': 'extend',
- }
-
- ########
- # Create
- ########
- if not exists(content):
- if not params['medias']:
- module.exit_json(changed=False, results=content['result'], state='present')
-
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'addmedia', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
- # mediaid signifies an update
- # If user params exists, check to see if they already exist in zabbix
- # if they exist, then return as no update
- # elif they do not exist, then take user params only
- ########
- # Update
- ########
- diff = {'medias': [], 'users': {}}
- _ = [diff['medias'].append(media) for media in params['medias'] if not find_media(content['result'], media)]
-
- if not diff['medias']:
- module.exit_json(changed=False, results=content['result'], state="present")
-
- for user in params['users']:
- diff['users']['userid'] = user['userid']
-
- # Medias have no real unique key so therefore we need to make it like the incoming user's request
- diff['medias'] = medias
-
- # We have differences and need to update
- content = zapi.get_content(zbx_class_name, 'updatemedia', diff)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=False, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/library/zbx_usergroup.py b/roles/lib_zabbix/library/zbx_usergroup.py
deleted file mode 100644
index e30ce6678..000000000
--- a/roles/lib_zabbix/library/zbx_usergroup.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python
-'''
-zabbix ansible module for usergroups
-'''
-# vim: expandtab:tabstop=4:shiftwidth=4
-#
-# Zabbix usergroup ansible module
-#
-#
-# Copyright 2015 Red Hat Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is in place because each module looks similar to each other.
-# These need duplicate code as their behavior is very similar
-# but different for each zabbix class.
-# pylint: disable=duplicate-code
-
-# Disabling too-many-branches as we need the error checking and the if-statements
-# to determine the proper state
-# pylint: disable=too-many-branches
-
-# pylint: disable=import-error
-from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection
-
-def exists(content, key='result'):
- ''' Check if key exists in content or the size of content[key] > 0
- '''
- if not content.has_key(key):
- return False
-
- if not content[key]:
- return False
-
- return True
-
-def get_rights(zapi, rights):
- '''Get rights
- '''
- if rights == None:
- return None
-
- perms = []
- for right in rights:
- hstgrp = right.keys()[0]
- perm = right.values()[0]
- content = zapi.get_content('hostgroup', 'get', {'search': {'name': hstgrp}})
- if content['result']:
- permission = 0
- if perm == 'ro':
- permission = 2
- elif perm == 'rw':
- permission = 3
- perms.append({'id': content['result'][0]['groupid'],
- 'permission': permission})
- return perms
-
-def get_gui_access(access):
- ''' Return the gui_access for a usergroup
- '''
- access = access.lower()
- if access == 'internal':
- return 1
- elif access == 'disabled':
- return 2
-
- return 0
-
-def get_debug_mode(mode):
- ''' Return the debug_mode for a usergroup
- '''
- mode = mode.lower()
- if mode == 'enabled':
- return 1
-
- return 0
-
-def get_user_status(status):
- ''' Return the user_status for a usergroup
- '''
- status = status.lower()
- if status == 'enabled':
- return 0
-
- return 1
-
-
-def get_userids(zapi, users):
- ''' Get userids from user aliases
- '''
- if not users:
- return None
-
- userids = []
- for alias in users:
- content = zapi.get_content('user', 'get', {'search': {'alias': alias}})
- if content['result']:
- userids.append(content['result'][0]['userid'])
-
- return userids
-
-def main():
- ''' Ansible module for usergroup
- '''
-
- module = AnsibleModule(
- argument_spec=dict(
- zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
- zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
- zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
- zbx_debug=dict(default=False, type='bool'),
- debug_mode=dict(default='disabled', type='str'),
- gui_access=dict(default='default', type='str'),
- status=dict(default='enabled', type='str'),
- name=dict(default=None, type='str', required=True),
- rights=dict(default=None, type='list'),
- users=dict(default=None, type='list'),
- state=dict(default='present', type='str'),
- ),
- #supports_check_mode=True
- )
-
- zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
- module.params['zbx_user'],
- module.params['zbx_password'],
- module.params['zbx_debug']))
-
- zbx_class_name = 'usergroup'
- idname = "usrgrpid"
- uname = module.params['name']
- state = module.params['state']
-
- content = zapi.get_content(zbx_class_name,
- 'get',
- {'search': {'name': uname},
- 'selectUsers': 'userid',
- })
- #******#
- # GET
- #******#
- if state == 'list':
- module.exit_json(changed=False, results=content['result'], state="list")
-
- #******#
- # DELETE
- #******#
- if state == 'absent':
- if not exists(content):
- module.exit_json(changed=False, state="absent")
-
- if not uname:
- module.exit_json(failed=True, changed=False, results='Need to pass in a user.', state="error")
-
- content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
- module.exit_json(changed=True, results=content['result'], state="absent")
-
- # Create and Update
- if state == 'present':
-
- params = {'name': uname,
- 'rights': get_rights(zapi, module.params['rights']),
- 'users_status': get_user_status(module.params['status']),
- 'gui_access': get_gui_access(module.params['gui_access']),
- 'debug_mode': get_debug_mode(module.params['debug_mode']),
- 'userids': get_userids(zapi, module.params['users']),
- }
-
- # Remove any None valued params
- _ = [params.pop(key, None) for key in params.keys() if params[key] == None]
-
- #******#
- # CREATE
- #******#
- if not exists(content):
- # if we didn't find it, create it
- content = zapi.get_content(zbx_class_name, 'create', params)
-
- if content.has_key('error'):
- module.exit_json(failed=True, changed=True, results=content['error'], state="present")
-
- module.exit_json(changed=True, results=content['result'], state='present')
-
-
- ########
- # UPDATE
- ########
- differences = {}
- zab_results = content['result'][0]
- for key, value in params.items():
- if key == 'rights':
- differences['rights'] = value
-
- elif key == 'userids' and zab_results.has_key('users'):
- if zab_results['users'] != value:
- differences['userids'] = value
-
- elif zab_results[key] != value and zab_results[key] != str(value):
- differences[key] = value
-
- if not differences:
- module.exit_json(changed=False, results=zab_results, state="present")
-
- # We have differences and need to update
- differences[idname] = zab_results[idname]
- content = zapi.get_content(zbx_class_name, 'update', differences)
- module.exit_json(changed=True, results=content['result'], state="present")
-
- module.exit_json(failed=True,
- changed=False,
- results='Unknown state passed. %s' % state,
- state="unknown")
-
-# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
-# import module snippets. This are required
-from ansible.module_utils.basic import *
-
-main()
diff --git a/roles/lib_zabbix/tasks/create_template.yml b/roles/lib_zabbix/tasks/create_template.yml
deleted file mode 100644
index 783249c3a..000000000
--- a/roles/lib_zabbix/tasks/create_template.yml
+++ /dev/null
@@ -1,146 +0,0 @@
----
-- name: Template Create Template
- zbx_template:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ template.name }}"
- register: created_template
-
-
-- set_fact:
- lzbx_item_applications: "{{ template.zitems | default([], True) | oo_select_keys_from_list(['applications']) | oo_flatten | unique }}"
- lzbx_itemprototype_applications: "{{ template.zitemprototypes | default([], True) | oo_select_keys_from_list(['applications']) | oo_flatten | unique }}"
-
-- name: Create Application
- zbx_application:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item }}"
- template_name: "{{ template.name }}"
- with_items:
- - "{{ lzbx_item_applications }}"
- - "{{ lzbx_itemprototype_applications }}"
- register: created_application
- when: template.zitems is defined or template.zitemprototypes is defined
-
-- name: Create Items
- zbx_item:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- key: "{{ item.key }}"
- name: "{{ item.name | default(item.key, true) }}"
- value_type: "{{ item.value_type | default('int') }}"
- data_type: "{{ item.data_type | default('decimal') }}"
- description: "{{ item.description | default('', True) }}"
- multiplier: "{{ item.multiplier | default('', True) }}"
- units: "{{ item.units | default('', True) }}"
- template_name: "{{ template.name }}"
- applications: "{{ item.applications }}"
- zabbix_type: "{{ item.zabbix_type | default('trapper') }}"
- interval: "{{ item.interval | default(60, True) }}"
- delta: "{{ item.delta | default(0, True) }}"
- with_items: template.zitems
- register: created_items
- when: template.zitems is defined
-
-- name: Create Triggers
- zbx_trigger:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- description: "{{ item.description | default('', True) }}"
- dependencies: "{{ item.dependencies | default([], True) }}"
- expression: "{{ item.expression }}"
- priority: "{{ item.priority }}"
- url: "{{ item.url | default(None, True) }}"
- status: "{{ item.status | default('', True) }}"
- with_items: template.ztriggers
- when: template.ztriggers is defined
-
-- name: Create Actions
- zbx_action:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- state: "{{ item.state | default('present', True) }}"
- name: "{{ item.name }}"
- status: "{{ item.status | default('enabled', True) }}"
- escalation_time: "{{ item.escalation_time }}"
- conditions_filter: "{{ item.conditions_filter }}"
- operations: "{{ item.operations }}"
- with_items: template.zactions
- when: template.zactions is defined
-
-- name: Create Discoveryrules
- zbx_discoveryrule:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- key: "{{ item.key }}"
- lifetime: "{{ item.lifetime }}"
- template_name: "{{ template.name }}"
- description: "{{ item.description | default('', True) }}"
- with_items: template.zdiscoveryrules
- when: template.zdiscoveryrules is defined
-
-- name: Create Item Prototypes
- zbx_itemprototype:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- key: "{{ item.key }}"
- discoveryrule_key: "{{ item.discoveryrule_key }}"
- value_type: "{{ item.value_type }}"
- data_type: "{{ item.data_type | default('decimal') }}"
- template_name: "{{ template.name }}"
- applications: "{{ item.applications }}"
- description: "{{ item.description | default('', True) }}"
- multiplier: "{{ item.multiplier | default('', True) }}"
- units: "{{ item.units | default('', True) }}"
- interval: "{{ item.interval | default(60, True) }}"
- delta: "{{ item.delta | default(0, True) }}"
- with_items: template.zitemprototypes
- when: template.zitemprototypes is defined
-
-- name: Create Trigger Prototypes
- zbx_triggerprototype:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- expression: "{{ item.expression }}"
- url: "{{ item.url | default('', True) }}"
- priority: "{{ item.priority | default('average', True) }}"
- description: "{{ item.description | default('', True) }}"
- with_items: template.ztriggerprototypes
- when: template.ztriggerprototypes is defined
-
-- name: Create Graphs
- zbx_graph:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- height: "{{ item.height }}"
- width: "{{ item.width }}"
- graph_items: "{{ item.graph_items }}"
- with_items: template.zgraphs
- when: template.zgraphs is defined
-
-- name: Create Graph Prototypes
- zbx_graphprototype:
- zbx_server: "{{ server }}"
- zbx_user: "{{ user }}"
- zbx_password: "{{ password }}"
- name: "{{ item.name }}"
- height: "{{ item.height }}"
- width: "{{ item.width }}"
- graph_items: "{{ item.graph_items }}"
- with_items: template.zgraphprototypes
- when: template.zgraphprototypes is defined
diff --git a/roles/lib_zabbix/tasks/create_user.yml b/roles/lib_zabbix/tasks/create_user.yml
deleted file mode 100644
index 1f752a9e1..000000000
--- a/roles/lib_zabbix/tasks/create_user.yml
+++ /dev/null
@@ -1,11 +0,0 @@
----
-- name: Update zabbix credentialss for a user
- zbx_user:
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- alias: "{{ ozb_username }}"
- passwd: "{{ ozb_new_password | default(ozb_password, true) }}"
- register: user
-
-- debug: var=user.results
diff --git a/roles/os_ipv6_disable/tasks/main.yaml b/roles/os_ipv6_disable/tasks/main.yaml
deleted file mode 100644
index fae5beee7..000000000
--- a/roles/os_ipv6_disable/tasks/main.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# Disable ipv6 on RHEL7
-
-- name: Disable all ipv6
- sysctl: name="net.ipv6.conf.all.disable_ipv6" value=1 sysctl_set=yes state=present reload=yes
-
-- name: Disable default ipv6
- sysctl: name="net.ipv6.conf.default.disable_ipv6" value=1 sysctl_set=yes state=present reload=yes
-
-- name: Remove ipv6 localhost from /etc/hosts
- lineinfile: dest='/etc/hosts' regexp='^::1 ' state=absent owner=root group=root mode=0644
diff --git a/roles/os_reboot_server/tasks/main.yaml b/roles/os_reboot_server/tasks/main.yaml
deleted file mode 100644
index 581ed3e0a..000000000
--- a/roles/os_reboot_server/tasks/main.yaml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-# Role to reboot a server
-- name: Restart server
- shell: sleep 2 && shutdown -r now "Ansible updates triggered"
- async: 1
- poll: 0
- ignore_errors: true
-
-- name: Wait for server to restart
- local_action:
- module: wait_for
- host={{ ansible_ssh_host }}
- port=22
- delay=3
- timeout=300
- sudo: false
diff --git a/roles/os_utils/tasks/main.yaml b/roles/os_utils/tasks/main.yaml
deleted file mode 100644
index 346f6566f..000000000
--- a/roles/os_utils/tasks/main.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-# Utility packages that make things helpful
-
-- name: Install useful rpm packages
- action: "{{ ansible_pkg_mgr }} name={{ item }} state=present"
- with_items:
- - wget
- - git
- - net-tools
- - bind-utils
- - iptables-services
- - bridge-utils
- - bash-completion
- - atop
- - htop
- - ack
- - telnet
diff --git a/roles/os_zabbix/README.md b/roles/os_zabbix/README.md
deleted file mode 100644
index ac3dc2833..000000000
--- a/roles/os_zabbix/README.md
+++ /dev/null
@@ -1,40 +0,0 @@
-os_zabbix
-=========
-
-Automate zabbix tasks.
-
-Requirements
-------------
-
-This requires the openshift_tools rpm be installed for the zbxapi.py library. It can be found here: https://github.com/openshift/openshift-tools under openshift_tools/monitoring/zbxapi.py for now.
-
-Role Variables
---------------
-
-zab_server
-zab_username
-zab_password
-
-Dependencies
-------------
-
-This depeonds on the zbxapi.py library located here: https://github.com/openshift/openshift-tools under openshift_tools/monitoring/zbxapi.py for now.
-
-Example Playbook
-----------------
-
- - zbx_host:
- server: zab_server
- user: zab_user
- password: zab_password
- name: 'myhost'
-
-License
--------
-
-ASL 2.0
-
-Author Information
-------------------
-
-OpenShift operations, Red Hat, Inc
diff --git a/roles/os_zabbix/defaults/main.yml b/roles/os_zabbix/defaults/main.yml
deleted file mode 100644
index ed97d539c..000000000
--- a/roles/os_zabbix/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
----
diff --git a/roles/os_zabbix/handlers/main.yml b/roles/os_zabbix/handlers/main.yml
deleted file mode 100644
index ed97d539c..000000000
--- a/roles/os_zabbix/handlers/main.yml
+++ /dev/null
@@ -1 +0,0 @@
----
diff --git a/roles/os_zabbix/meta/main.yml b/roles/os_zabbix/meta/main.yml
deleted file mode 100644
index 360f5aad2..000000000
--- a/roles/os_zabbix/meta/main.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-galaxy_info:
- author: OpenShift
- description: ZabbixAPI
- company: Red Hat, Inc
- license: ASL 2.0
- min_ansible_version: 1.2
-dependencies:
-- lib_zabbix
diff --git a/roles/os_zabbix/tasks/main.yml b/roles/os_zabbix/tasks/main.yml
deleted file mode 100644
index 1c8d88854..000000000
--- a/roles/os_zabbix/tasks/main.yml
+++ /dev/null
@@ -1,166 +0,0 @@
----
-- name: Main List all templates
- zbx_template:
- zbx_server: "{{ ozb_server }}"
- zbx_user: "{{ ozb_user }}"
- zbx_password: "{{ ozb_password }}"
- state: list
- register: templates
-
-- include_vars: template_heartbeat.yml
- tags:
- - heartbeat
-- include_vars: template_os_linux.yml
- tags:
- - linux
-- include_vars: template_docker.yml
- tags:
- - docker
-- include_vars: template_openshift_master.yml
- tags:
- - openshift_master
-- include_vars: template_openshift_node.yml
- tags:
- - openshift_node
-- include_vars: template_ops_tools.yml
- tags:
- - ops_tools
-- include_vars: template_app_zabbix_server.yml
- tags:
- - zabbix_server
-- include_vars: template_app_zabbix_agent.yml
- tags:
- - zabbix_agent
-- include_vars: template_performance_copilot.yml
- tags:
- - pcp
-- include_vars: template_aws.yml
- tags:
- - aws
-- include_vars: template_zagg_server.yml
- tags:
- - zagg_server
-
-- include_vars: template_config_loop.yml
- tags:
- - config_loop
-
-- name: Include Template Heartbeat
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_heartbeat }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - heartbeat
-
-- name: Include Template os_linux
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_os_linux }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - linux
-
-- name: Include Template docker
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_docker }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - docker
-
-- name: Include Template Openshift Master
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_openshift_master }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - openshift_master
-
-- name: Include Template Openshift Node
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_openshift_node }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - openshift_node
-
-- name: Include Template Ops Tools
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_ops_tools }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - ops_tools
-
-- name: Include Template App Zabbix Server
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_app_zabbix_server }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - zabbix_server
-
-- name: Include Template App Zabbix Agent
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_app_zabbix_agent }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - zabbix_agent
-
-- name: Include Template Performance Copilot
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_performance_copilot }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - pcp
-
-- name: Include Template AWS
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_aws }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - aws
-
-- name: Include Template Zagg Server
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_zagg_server }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - zagg_server
-
-- name: Include Template Config Loop
- include: ../../lib_zabbix/tasks/create_template.yml
- vars:
- template: "{{ g_template_config_loop }}"
- server: "{{ ozb_server }}"
- user: "{{ ozb_user }}"
- password: "{{ ozb_password }}"
- tags:
- - config_loop
diff --git a/roles/os_zabbix/vars/main.yml b/roles/os_zabbix/vars/main.yml
deleted file mode 100644
index ed97d539c..000000000
--- a/roles/os_zabbix/vars/main.yml
+++ /dev/null
@@ -1 +0,0 @@
----
diff --git a/roles/os_zabbix/vars/template_app_zabbix_agent.yml b/roles/os_zabbix/vars/template_app_zabbix_agent.yml
deleted file mode 100644
index d636d4822..000000000
--- a/roles/os_zabbix/vars/template_app_zabbix_agent.yml
+++ /dev/null
@@ -1,23 +0,0 @@
----
-g_template_app_zabbix_agent:
- name: Template App Zabbix Agent
- zitems:
- - key: agent.hostname
- applications:
- - Zabbix agent
- value_type: character
- zabbix_type: agent
-
- - key: agent.ping
- applications:
- - Zabbix agent
- description: The agent always returns 1 for this item. It could be used in combination with nodata() for availability check.
- value_type: int
- zabbix_type: agent
-
- ztriggers:
- - name: '[Reboot] Zabbix agent on {HOST.NAME} is unreachable for 15 minutes'
- description: Zabbix agent is unreachable for 15 minutes.
- expression: '{Template App Zabbix Agent:agent.ping.nodata(15m)}=1'
- priority: high
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/check_ping.asciidoc
diff --git a/roles/os_zabbix/vars/template_app_zabbix_server.yml b/roles/os_zabbix/vars/template_app_zabbix_server.yml
deleted file mode 100644
index 43517113b..000000000
--- a/roles/os_zabbix/vars/template_app_zabbix_server.yml
+++ /dev/null
@@ -1,412 +0,0 @@
----
-g_template_app_zabbix_server:
- name: Template App Zabbix Server
- zitems:
- - key: housekeeper_creates
- applications:
- - Zabbix server
- description: A simple count of the number of partition creates output by the housekeeper script.
- units: ''
- value_type: int
- zabbix_type: internal
-
- - key: housekeeper_drops
- applications:
- - Zabbix server
- description: A simple count of the number of partition drops output by the housekeeper script.
- units: ''
- value_type: int
- zabbix_type: internal
-
- - key: housekeeper_errors
- applications:
- - Zabbix server
- description: A simple count of the number of errors output by the housekeeper script.
- units: ''
- value_type: int
- zabbix_type: internal
-
- - key: housekeeper_total
- applications:
- - Zabbix server
- description: A simple count of the total number of lines output by the housekeeper
- script.
- units: ''
- value_type: int
- zabbix_type: internal
-
- - key: zabbix[process,alerter,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,configuration syncer,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,db watchdog,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,discoverer,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,escalator,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,history syncer,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,housekeeper,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,http poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,icmp pinger,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,ipmi poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,java poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,node watcher,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,proxy poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,self-monitoring,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,snmp trapper,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,timer,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,trapper,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[process,unreachable poller,avg,busy]
- applications:
- - Zabbix server
- description: ''
- units: '%'
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[queue,10m]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: int
- zabbix_type: internal
- interval: 600
-
- - key: zabbix[queue]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: int
- zabbix_type: internal
- interval: 600
-
- - key: zabbix[rcache,buffer,pfree]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[wcache,history,pfree]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[wcache,text,pfree]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[wcache,trend,pfree]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: float
- zabbix_type: internal
-
- - key: zabbix[wcache,values]
- applications:
- - Zabbix server
- description: ''
- units: ''
- value_type: float
- zabbix_type: internal
- delta: 1 # speed per second
-
- ztriggers:
- - description: "There has been unexpected output while running the housekeeping script\
- \ on the Zabbix. There are only three kinds of lines we expect to see in the output,\
- \ and we've gotten something enw.\r\n\r\nCheck the script's output in /var/lib/zabbix/state\
- \ for more details."
- expression: '{Template App Zabbix Server:housekeeper_errors.last(0)}+{Template App Zabbix Server:housekeeper_creates.last(0)}+{Template App Zabbix Server:housekeeper_drops.last(0)}<>{Template App Zabbix Server:housekeeper_total.last(0)}'
- name: Unexpected output in Zabbix DB Housekeeping
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_DB_Housekeeping.asciidoc
-
- - description: An error has occurred during running the housekeeping script on the Zabbix. Check the script's output in /var/lib/zabbix/state for more details.
- expression: '{Template App Zabbix Server:housekeeper_errors.last(0)}>0'
- name: Errors during Zabbix DB Housekeeping
- priority: high
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,alerter,avg,busy].min(600)}>75'
- name: Zabbix alerter processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,configuration syncer,avg,busy].min(600)}>75'
- name: Zabbix configuration syncer processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,db watchdog,avg,busy].min(600)}>75'
- name: Zabbix db watchdog processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,discoverer,avg,busy].min(600)}>75'
- name: Zabbix discoverer processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,escalator,avg,busy].min(600)}>75'
- name: Zabbix escalator processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,history syncer,avg,busy].min(600)}>75'
- name: Zabbix history syncer processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,housekeeper,avg,busy].min(1800)}>75'
- name: Zabbix housekeeper processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,http poller,avg,busy].min(600)}>75'
- name: Zabbix http poller processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,icmp pinger,avg,busy].min(600)}>75'
- name: Zabbix icmp pinger processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,ipmi poller,avg,busy].min(600)}>75'
- name: Zabbix ipmi poller processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,java poller,avg,busy].min(600)}>75'
- name: Zabbix java poller processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,node watcher,avg,busy].min(600)}>75'
- name: Zabbix node watcher processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,poller,avg,busy].min(600)}>75'
- name: Zabbix poller processes more than 75% busy
- priority: high
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,proxy poller,avg,busy].min(600)}>75'
- name: Zabbix proxy poller processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,self-monitoring,avg,busy].min(600)}>75'
- name: Zabbix self-monitoring processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,snmp trapper,avg,busy].min(600)}>75'
- name: Zabbix snmp trapper processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: Timer processes usually are busy because they have to process time
- based trigger functions
- expression: '{Template App Zabbix Server:zabbix[process,timer,avg,busy].min(600)}>75'
- name: Zabbix timer processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,trapper,avg,busy].min(600)}>75'
- name: Zabbix trapper processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[process,unreachable poller,avg,busy].min(600)}>75'
- name: Zabbix unreachable poller processes more than 75% busy
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/Zabbix_state_check.asciidoc
-
- - description: "This alert generally indicates a performance problem or a problem\
- \ with the zabbix-server or proxy.\r\n\r\nThe first place to check for issues\
- \ is Administration > Queue. Be sure to check the general view and the per-proxy\
- \ view."
- expression: '{Template App Zabbix Server:zabbix[queue,10m].min(600)}>1000'
- name: More than 1000 items having missing data for more than 10 minutes
- priority: high
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/data_lost_overview_plugin.asciidoc
-
- - description: Consider increasing CacheSize in the zabbix_server.conf configuration
- file
- expression: '{Template App Zabbix Server:zabbix[rcache,buffer,pfree].min(600)}<5'
- name: Less than 5% free in the configuration cache
- priority: info
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/check_cache.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[wcache,history,pfree].min(600)}<25'
- name: Less than 25% free in the history cache
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/check_cache.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[wcache,text,pfree].min(600)}<25'
- name: Less than 25% free in the text history cache
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/check_cache.asciidoc
-
- - description: ''
- expression: '{Template App Zabbix Server:zabbix[wcache,trend,pfree].min(600)}<25'
- name: Less than 25% free in the trends cache
- priority: avg
- url: https://github.com/openshift/ops-sop/blob/master/Alerts/check_cache.asciidoc
diff --git a/roles/os_zabbix/vars/template_aws.yml b/roles/os_zabbix/vars/template_aws.yml
deleted file mode 100644
index 57832a3fe..000000000
--- a/roles/os_zabbix/vars/template_aws.yml
+++ /dev/null
@@ -1,25 +0,0 @@
----
-g_template_aws:
- name: Template AWS
- zdiscoveryrules:
- - name: disc.aws
- key: disc.aws
- lifetime: 14
- description: "Dynamically register AWS bucket info"
-
- zitemprototypes:
- - discoveryrule_key: disc.aws
- name: "S3 bucket size (GB) [{#S3_BUCKET}]"
- key: "disc.aws.size[{#S3_BUCKET}]"
- value_type: int
- description: "Size of S3 bucket"
- applications:
- - AWS
-
- - discoveryrule_key: disc.aws
- name: "S3 bucket object count [{#S3_BUCKET}]"
- key: "disc.aws.objects[{#S3_BUCKET}]"
- value_type: int
- description: "Objects in S3 bucket"
- applications:
- - AWS
diff --git a/roles/os_zabbix/vars/template_config_loop.yml b/roles/os_zabbix/vars/template_config_loop.yml
deleted file mode 100644
index 823da1868..000000000
--- a/roles/os_zabbix/vars/template_config_loop.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-g_template_config_loop:
- name: Template Config Loop
- zitems:
- - key: config_loop.run.exit_code
- applications:
- - Config Loop
- value_type: int
-
- ztriggers:
- - name: 'config_loop.run.exit_code not zero on {HOST.NAME}'
- expression: '{Template Config Loop:config_loop.run.exit_code.min(#2)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_config_loop.asciidoc'
- priority: average
diff --git a/roles/os_zabbix/vars/template_docker.yml b/roles/os_zabbix/vars/template_docker.yml
deleted file mode 100644
index dd13e76f7..000000000
--- a/roles/os_zabbix/vars/template_docker.yml
+++ /dev/null
@@ -1,116 +0,0 @@
----
-g_template_docker:
- name: Template Docker
- zitems:
- - key: docker.ping
- applications:
- - Docker Daemon
- value_type: int
-
- - key: docker.info_elapsed_ms
- applications:
- - Docker Daemon
- value_type: int
-
- - key: docker.container.dns.resolution
- applications:
- - Docker Daemon
- value_type: int
-
- - key: docker.container.existing.dns.resolution.failed
- applications:
- - Docker Daemon
- value_type: int
-
- - key: docker.storage.is_loopback
- applications:
- - Docker Storage
- value_type: int
-
- - key: docker.storage.data.space.total
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.data.space.used
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.data.space.available
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.data.space.percent_available
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.metadata.space.total
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.metadata.space.used
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.metadata.space.available
- applications:
- - Docker Storage
- value_type: float
-
- - key: docker.storage.metadata.space.percent_available
- applications:
- - Docker Storage
- value_type: float
- ztriggers:
- - name: 'docker.ping failed on {HOST.NAME}'
- expression: '{Template Docker:docker.ping.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_ping.asciidoc'
- priority: high
-
- # Re-enable for OpenShift 3.1.1 (https://bugzilla.redhat.com/show_bug.cgi?id=1292971#c6)
- - name: 'docker.container.dns.resolution failed on {HOST.NAME}'
- expression: '{Template Docker:docker.container.dns.resolution.min(#3)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_dns.asciidoc'
- priority: average
- status: disabled
-
- - name: 'docker.container.existing.dns.resolution.failed on {HOST.NAME}'
- expression: '{Template Docker:docker.container.existing.dns.resolution.failed.min(#3)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_dns.asciidoc'
- priority: average
-
- - name: 'Docker storage is using LOOPBACK on {HOST.NAME}'
- expression: '{Template Docker:docker.storage.is_loopback.last()}<>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_loopback.asciidoc'
- priority: high
-
- - name: 'Critically low docker storage data space on {HOST.NAME}'
- expression: '{Template Docker:docker.storage.data.space.percent_available.max(#3)}<5 or {Template Docker:docker.storage.data.space.available.max(#3)}<5' # < 5% or < 5GB
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_storage.asciidoc'
- priority: high
-
- - name: 'Critically low docker storage metadata space on {HOST.NAME}'
- expression: '{Template Docker:docker.storage.metadata.space.percent_available.max(#3)}<5 or {Template Docker:docker.storage.metadata.space.available.max(#3)}<0.005' # < 5% or < 5MB
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_storage.asciidoc'
- priority: high
-
- # Put triggers that depend on other triggers here (deps must be created first)
- - name: 'Low docker storage data space on {HOST.NAME}'
- expression: '{Template Docker:docker.storage.data.space.percent_available.max(#3)}<10 or {Template Docker:docker.storage.data.space.available.max(#3)}<10' # < 10% or < 10GB
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_storage.asciidoc'
- dependencies:
- - 'Critically low docker storage data space on {HOST.NAME}'
- priority: average
-
- - name: 'Low docker storage metadata space on {HOST.NAME}'
- expression: '{Template Docker:docker.storage.metadata.space.percent_available.max(#3)}<10 or {Template Docker:docker.storage.metadata.space.available.max(#3)}<0.01' # < 10% or < 10MB
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_docker_storage.asciidoc'
- dependencies:
- - 'Critically low docker storage metadata space on {HOST.NAME}'
- priority: average
-
diff --git a/roles/os_zabbix/vars/template_heartbeat.yml b/roles/os_zabbix/vars/template_heartbeat.yml
deleted file mode 100644
index ec953c79b..000000000
--- a/roles/os_zabbix/vars/template_heartbeat.yml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-g_template_heartbeat:
- name: Template Heartbeat
- zitems:
- - name: Heartbeat Ping
- applications:
- - Heartbeat
- key: heartbeat.ping
- ztriggers:
- - name: 'Heartbeat.ping has failed on {HOST.NAME}'
- expression: '{Template Heartbeat:heartbeat.ping.nodata(20m)}=1'
- priority: avg
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_node_heartbeat.asciidoc'
-
- - name: 'Heartbeat.ping has failed (60 min) on {HOST.NAME}'
- expression: '{Template Heartbeat:heartbeat.ping.nodata(60m)}=1'
- priority: high
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_node_heartbeat.asciidoc'
diff --git a/roles/os_zabbix/vars/template_openshift_master.yml b/roles/os_zabbix/vars/template_openshift_master.yml
deleted file mode 100644
index a38db9f65..000000000
--- a/roles/os_zabbix/vars/template_openshift_master.yml
+++ /dev/null
@@ -1,458 +0,0 @@
----
-g_template_openshift_master:
- name: Template Openshift Master
- zitems:
- - name: openshift.master.app.create
- applications:
- - Openshift Master
- key: openshift.master.app.create
-
- - key: openshift.master.app.build.create
- description: "check the app create with a build process"
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.app.create.time
- description: "check the time it takes app create with a build process"
- value_type: float
- applications:
- - Openshift Master
-
- - key: openshift.master.app.build.time
- description: "check the time it takes app build"
- value_type: float
- applications:
- - Openshift Master
-
- - key: openshift.master.process.count
- description: Shows number of master processes running
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.api.ping
- description: "Verify that the Openshift API is up (uses the cluster API URL)"
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.local.api.ping
- description: "Verify that the Openshift API is up on the host (uses the API URL as the https://127.0.0.1)"
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.api.healthz
- description: "Checks the healthz check of the master's api: https://<cluster_api_url>/healthz"
- value_type: int
- data_type: bool
- applications:
- - Openshift Master
-
- - key: openshift.master.local.api.healthz
- description: "Checks the healthz check of the master's api: https://127.0.0.1/healthz"
- value_type: int
- data_type: bool
- applications:
- - Openshift Master
-
- - key: openshift.master.user.count
- description: Shows number of users in a cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pod.running.count
- description: Shows number of pods running
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pod.user.running.count
- description: Shows number of user pods running (non infrastructure pods)
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pod.total.count
- description: Shows total number of pods (running and non running)
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.node.count
- description: Shows the total number of nodes found in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.project.count
- description: Shows number of projects on a cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.space.total
- description: Shows the total space of pv
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.space.available
- description: Shows the available space of pv
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.total.count
- description: Total number of Persistent Volumes in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.available.count
- description: Total number of Available Persistent Volumes in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.released.count
- description: Total number of Released Persistent Volumes in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.bound.count
- description: Total number of Bound Persistent Volumes in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.pv.failed.count
- description: Total number of Failed Persistent Volumes in the Openshift Cluster
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.skydns.port.open
- description: State of the SkyDNS port open and listening
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.skydns.query
- description: SkyDNS can be queried or not
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.etcd.create.success
- description: Show number of successful create actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.create.fail
- description: Show number of failed create actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.delete.success
- description: Show number of successful delete actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.delete.fail
- description: Show number of failed delete actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.get.success
- description: Show number of successful get actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.get.fail
- description: Show number of failed get actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.set.success
- description: Show number of successful set actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.set.fail
- description: Show number of failed set actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.update.success
- description: Show number of successful update actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.update.fail
- description: Show number of failed update actions
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.watchers
- description: Show number of etcd watchers
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.etcd.ping
- description: etcd ping
- value_type: int
- applications:
- - Openshift Etcd
-
- - key: openshift.master.metric.ping
- description: "This check verifies that the https://master/metrics check is alive and communicating properly."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.nodesnotready.count
- description: "This check shows how many nodes in a cluster are in NotReady state."
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.nodesnotschedulable.count
- description: "This check shows how many nodes in a cluster are not schedulable."
- value_type: int
- applications:
- - Openshift Master
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.list.5
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 50% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.list.9
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 90% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.list.99
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 99% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.5
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 50% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.9
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 90% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.99
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 99% of the pod operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.scheduler.e2e.scheduling.latency.quantile.5
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 50% of the end to end scheduling operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.scheduler.e2e.scheduling.latency.quantile.9
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 90% of the end to end scheduling operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- - key: openshift.master.scheduler.e2e.scheduling.latency.quantile.99
- description: "Value from https://master/metrics. This is the time, in miliseconds, that 99% of the end to end scheduling operations have taken to completed."
- value_type: int
- applications:
- - Openshift Master Metrics
-
- zdiscoveryrules:
- - name: disc.pv
- key: disc.pv
- lifetime: 1
- description: "Dynamically register the Persistent Volumes"
-
- zitemprototypes:
- - discoveryrule_key: disc.pv
- name: "disc.pv.count.{#OSO_PV}"
- key: "disc.pv.count[{#OSO_PV}]"
- value_type: int
- description: "Number of PV's of this size"
- applications:
- - Openshift Master
-
- - discoveryrule_key: disc.pv
- name: "disc.pv.available.{#OSO_PV}"
- key: "disc.pv.available[{#OSO_PV}]"
- value_type: int
- description: "Number of PV's of this size that are available"
- applications:
- - Openshift Master
-
- ztriggers:
- - name: 'Openshift Master process not running on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.process.count.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: high
-
- - name: 'Too many Openshift Master processes running on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.process.count.min(#3)}>1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: high
-
- - name: 'Etcd ping failed on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.etcd.ping.last(#1)}=0 and {Template Openshift Master:openshift.master.etcd.ping.last(#2)}=0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_etcd.asciidoc'
- priority: high
-
- - name: 'Number of users for Openshift Master on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.user.count.last()}=0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: info
-
- - name: 'There are no projects running on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.project.count.last()}=0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: info
-
- # Put triggers that depend on other triggers here (deps must be created first)
- - name: 'Application creation has failed on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.app.create.last(#1)}=1 and {Template Openshift Master:openshift.master.app.create.last(#2)}=1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_create_app.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: avg
-
- - name: 'Application creation with build has failed on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.app.build.create.last(#1)}=1 and {Template Openshift Master:openshift.master.app.build.create.last(#2)}=1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_create_app.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: avg
-
- - name: 'Application creation has failed multiple times in the last hour on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.app.create.sum(1h)}>3'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_create_app.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- description: The application create loop has failed 4 or more times in the last hour
- priority: avg
-
- - name: 'Application with build creation has failed multiple times in the last 2 hour on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.app.build.create.sum(2h)}>3'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_create_app.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- description: The application create loop has failed 4 or more times in the last hour
- priority: avg
-
- - name: 'Openshift Master API health check is failing on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.api.healthz.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: high
-
- - name: 'Openshift Master Local API health check is failing on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.local.api.healthz.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: high
-
- - name: 'Openshift Master API PING check is failing on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.api.ping.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- priority: high
-
- - name: 'Openshift Master Local API PING check is failing on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.local.api.ping.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: high
-
- - name: 'Openshift Master metric PING check is failing on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.metric.ping.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: avg
-
- - name: 'SkyDNS port not listening on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.skydns.port.open.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: high
-
- - name: 'SkyDNS query failed on {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.skydns.query.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_master.asciidoc'
- dependencies:
- - 'Openshift Master API health check is failing on {HOST.NAME}'
- priority: high
-
- - name: 'Hosts not ready according to {HOST.NAME}'
- expression: '{Template Openshift Master:openshift.master.nodesnotready.count.last(#2)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_node.asciidoc'
- dependencies:
- - 'Openshift Master process not running on {HOST.NAME}'
- priority: high
-
- zgraphs:
- - name: Openshift Master API Server Latency Pods LIST Quantiles
- width: 900
- height: 200
- graph_items:
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.list.5
- color: red
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.list.9
- color: blue
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.list.99
- color: orange
-
- - name: Openshift Master API Server Latency Pods WATCHLIST Quantiles
- width: 900
- height: 200
- graph_items:
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.5
- color: red
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.9
- color: blue
- - item_name: openshift.master.apiserver.latency.summary.pods.quantile.watchlist.99
- color: orange
-
- - name: Openshift Master Scheduler End to End Latency Quantiles
- width: 900
- height: 200
- graph_items:
- - item_name: openshift.master.scheduler.e2e.scheduling.latency.quantile.5
- color: red
- - item_name: openshift.master.scheduler.e2e.scheduling.latency.quantile.9
- color: blue
- - item_name: openshift.master.scheduler.e2e.scheduling.latency.quantile.99
- color: orange
diff --git a/roles/os_zabbix/vars/template_openshift_node.yml b/roles/os_zabbix/vars/template_openshift_node.yml
deleted file mode 100644
index 9f84a2cdf..000000000
--- a/roles/os_zabbix/vars/template_openshift_node.yml
+++ /dev/null
@@ -1,70 +0,0 @@
----
-g_template_openshift_node:
- name: Template Openshift Node
- zitems:
- - key: openshift.node.process.count
- description: Shows number of OpenShift Node processes running
- value_type: int
- applications:
- - Openshift Node
-
- - key: openshift.node.ovs.pids.count
- description: Shows number of ovs process ids running
- value_type: int
- applications:
- - Openshift Node
-
- - key: openshift.node.ovs.ports.count
- description: Shows number of OVS ports defined
- value_type: int
- applications:
- - Openshift Node
-
- - key: openshift.node.ovs.stray.rules
- description: Number of OVS stray rules found/removed
- value_type: int
- applications:
- - Openshift Node
-
- - key: openshift.node.registry-pods.healthy_pct
- description: Shows the percentage of healthy registries in the cluster
- value_type: int
- applications:
- - Openshift Node
-
- - key: openshift.node.registry.service.ping
- description: Ping docker-registry service from node
- value_type: int
- applications:
- - Openshift Node
-
- ztriggers:
- - name: 'One or more Docker Registries is unhealthy according to {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.registry-pods.healthy_pct.last(#2)}<100 and {Template Openshift Node:openshift.node.registry-pods.healthy_pct.last(#1)}<100'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_registry.asciidoc'
- priority: avg
-
- - name: 'Docker Registry service is unhealthy according to {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.registry.service.ping.last(#2)}<1 and {Template Openshift Node:openshift.node.registry.service.ping.last(#1)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/openshift_registry.asciidoc'
- priority: avg
-
- - name: 'Openshift Node process not running on {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.process.count.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/node/V3/Alerts/openshift_node.asciidoc'
- priority: high
-
- - name: 'Too many Openshift Node processes running on {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.process.count.min(#3)}>1'
- url: 'https://github.com/openshift/ops-sop/blob/node/V3/Alerts/openshift_node.asciidoc'
- priority: high
-
- - name: '[Heal] OVS may not be running on {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.ovs.pids.count.last(#1)}<>4 and {Template Openshift Node:openshift.node.ovs.pids.count.last(#2)}<>4'
- url: 'https://github.com/openshift/ops-sop/blob/node/V3/Alerts/openshift_node.asciidoc'
- priority: high
-
- - name: 'Number of OVS ports is 0 on {HOST.NAME}'
- expression: '{Template Openshift Node:openshift.node.ovs.ports.count.last()}=0'
- url: 'https://github.com/openshift/ops-sop/blob/node/V3/Alerts/openshift_node.asciidoc'
- priority: high
diff --git a/roles/os_zabbix/vars/template_ops_tools.yml b/roles/os_zabbix/vars/template_ops_tools.yml
deleted file mode 100644
index a0a5a4d03..000000000
--- a/roles/os_zabbix/vars/template_ops_tools.yml
+++ /dev/null
@@ -1,54 +0,0 @@
----
-g_template_ops_tools:
- name: Template Operations Tools
- zdiscoveryrules:
- - name: disc.ops.runner
- key: disc.ops.runner
- lifetime: 1
- description: "Dynamically register operations runner items"
-
- zitemprototypes:
- - discoveryrule_key: disc.ops.runner
- name: "Exit code of ops-runner[{#OSO_COMMAND}]"
- key: "disc.ops.runner.command.exitcode[{#OSO_COMMAND}]"
- value_type: int
- description: "The exit code of the command run from ops-runner"
- applications:
- - Ops Runner
-
- ztriggerprototypes:
- - name: 'ops-runner[{#OSO_COMMAND}]: non-zero exit code on {HOST.NAME}'
- expression: '{Template Operations Tools:disc.ops.runner.command.exitcode[{#OSO_COMMAND}].last()}<>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_ops_runner_command.asciidoc'
- priority: average
-
- zactions:
- - name: 'Remote command for [Heal] triggers'
- status: enabled
- escalation_time: 60
- conditions_filter:
- calculation_type: "and/or"
- conditions:
- - conditiontype: maintenance status
- operator: not in
- - conditiontype: trigger name
- operator: like
- value: "[Heal]"
- - conditiontype: trigger value
- operator: "="
- value: PROBLEM
- operations:
- - esc_step_from: 1
- esc_step_to: 1
- esc_period: 0
- operationtype: remote command
- opcommand:
- command: 'ssh -i /etc/openshift_tools/scriptrunner_id_rsa {{ ozb_scriptrunner_user }}@{{ ozb_scriptrunner_bastion_host }} remote-healer --host \"{HOST.NAME}\" --trigger \"{TRIGGER.NAME}\" --trigger-val \"{TRIGGER.VALUE}\"'
- execute_on: "zabbix server"
- type: 'custom script'
- target_hosts:
- - target_type: 'zabbix server'
- opconditions:
- - conditiontype: 'event acknowledged'
- operator: '='
- value: 'not acknowledged'
diff --git a/roles/os_zabbix/vars/template_os_linux.yml b/roles/os_zabbix/vars/template_os_linux.yml
deleted file mode 100644
index c6e557f12..000000000
--- a/roles/os_zabbix/vars/template_os_linux.yml
+++ /dev/null
@@ -1,314 +0,0 @@
----
-g_template_os_linux:
- name: Template OS Linux
- zitems:
- - key: kernel.uname.sysname
- applications:
- - Kernel
- value_type: string
-
- - key: kernel.all.cpu.wait.total
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.cpu.irq.hard
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.cpu.idle
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.uname.distro
- applications:
- - Kernel
- value_type: string
-
- - key: kernel.uname.nodename
- applications:
- - Kernel
- value_type: string
-
- - key: kernel.all.cpu.irq.soft
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.load.15_minute
- applications:
- - Kernel
- value_type: float
-
- - key: kernel.all.cpu.sys
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.load.5_minute
- applications:
- - Kernel
- value_type: float
-
- - key: kernel.all.cpu.nice
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.load.1_minute
- applications:
- - Kernel
- value_type: float
-
- - key: kernel.uname.version
- applications:
- - Kernel
- value_type: string
-
- - key: kernel.all.uptime
- applications:
- - Kernel
- value_type: int
-
- - key: kernel.all.cpu.user
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.uname.machine
- applications:
- - Kernel
- value_type: string
-
- - key: hinv.ncpu
- applications:
- - Kernel
- value_type: int
-
- - key: kernel.all.cpu.steal
- applications:
- - Kernel
- value_type: float
- units: '%'
-
- - key: kernel.all.pswitch
- applications:
- - Kernel
- value_type: int
-
- - key: kernel.uname.release
- applications:
- - Kernel
- value_type: string
-
- - key: proc.nprocs
- applications:
- - Kernel
- value_type: int
-
- # Memory Items
- - key: mem.freemem
- applications:
- - Memory
- value_type: int
- description: "PCP: free system memory metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: mem.util.bufmem
- applications:
- - Memory
- value_type: int
- description: "PCP: Memory allocated for buffer_heads.; I/O buffers metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: swap.used
- applications:
- - Memory
- value_type: int
- description: "PCP: swap used metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: swap.length
- applications:
- - Memory
- value_type: int
- description: "PCP: total swap available metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: mem.physmem
- applications:
- - Memory
- value_type: int
- description: "PCP: The value of this metric corresponds to the \"MemTotal\" field reported by /proc/meminfo. Note that this does not necessarily correspond to actual installed physical memory - there may be areas of the physical address space mapped as ROM in various peripheral devices and the bios may be mirroring certain ROMs in RAM."
- multiplier: 1024
- units: B
-
- - key: swap.free
- applications:
- - Memory
- value_type: int
- description: "PCP: swap free metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: mem.util.available
- applications:
- - Memory
- value_type: int
- description: "PCP: The amount of memory that is available for a new workload, without pushing the system into swap. Estimated from MemFree, Active(file), Inactive(file), and SReclaimable, as well as the \"low\" watermarks from /proc/zoneinfo.; available memory from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: mem.util.used
- applications:
- - Memory
- value_type: int
- description: "PCP: Used memory is the difference between mem.physmem and mem.freemem; used memory metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- - key: mem.util.cached
- applications:
- - Memory
- value_type: int
- description: "PCP: Memory used by the page cache, including buffered file data. This is in-memory cache for files read from the disk (the pagecache) but doesn't include SwapCached.; page cache metric from /proc/meminfo"
- multiplier: 1024
- units: B
-
- zdiscoveryrules:
- - name: disc.filesys
- key: disc.filesys
- lifetime: 1
- description: "Dynamically register the filesystems"
-
- - name: disc.disk
- key: disc.disk
- lifetime: 1
- description: "Dynamically register disks on a node"
-
- - name: disc.network
- key: disc.network
- lifetime: 1
- description: "Dynamically register network interfaces on a node"
-
- zitemprototypes:
- - discoveryrule_key: disc.filesys
- name: "disc.filesys.full.{#OSO_FILESYS}"
- key: "disc.filesys.full[{#OSO_FILESYS}]"
- value_type: float
- description: "PCP filesys.full option. This is the percent full returned from pcp filesys.full"
- applications:
- - Disk
-
- - discoveryrule_key: disc.filesys
- name: "Percentage of used inodes on {#OSO_FILESYS}"
- key: "disc.filesys.inodes.pused[{#OSO_FILESYS}]"
- value_type: float
- description: "PCP derived value of percentage of used inodes on a filesystem."
- applications:
- - Disk
-
- - discoveryrule_key: disc.disk
- name: "TPS (IOPS) for disk {#OSO_DISK}"
- key: "disc.disk.tps[{#OSO_DISK}]"
- value_type: int
- description: "PCP disk.dev.totals metric measured over a period of time. This shows how many disk transactions per second the disk is using"
- applications:
- - Disk
-
- - discoveryrule_key: disc.disk
- name: "Percent Utilized for disk {#OSO_DISK}"
- key: "disc.disk.putil[{#OSO_DISK}]"
- value_type: float
- description: "PCP disk.dev.avactive metric measured over a period of time. This is the '%util' in the iostat command"
- applications:
- - Disk
-
- - discoveryrule_key: disc.network
- name: "Bytes per second IN on network interface {#OSO_NET_INTERFACE}"
- key: "disc.network.in.bytes[{#OSO_NET_INTERFACE}]"
- value_type: int
- units: B
- delta: 1
- description: "PCP network.interface.in.bytes metric. This is setup as a delta in Zabbix to measure the speed per second"
- applications:
- - Network
-
- - discoveryrule_key: disc.network
- name: "Bytes per second OUT on network interface {#OSO_NET_INTERFACE}"
- key: "disc.network.out.bytes[{#OSO_NET_INTERFACE}]"
- value_type: int
- units: B
- delta: 1
- description: "PCP network.interface.out.bytes metric. This is setup as a delta in Zabbix to measure the speed per second"
- applications:
- - Network
-
- ztriggerprototypes:
- - name: 'Filesystem: {#OSO_FILESYS} has less than 10% free disk space on {HOST.NAME}'
- expression: '{Template OS Linux:disc.filesys.full[{#OSO_FILESYS}].last()}>90'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_filesys_full.asciidoc'
- priority: high
-
- # This has a dependency on the previous trigger
- # Trigger Prototypes do not work in 2.4. They will work in Zabbix 3.0
- - name: 'Filesystem: {#OSO_FILESYS} has less than 15% free disk space on {HOST.NAME}'
- expression: '{Template OS Linux:disc.filesys.full[{#OSO_FILESYS}].last()}>85'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_filesys_full.asciidoc'
- priority: warn
- dependencies:
- - 'Filesystem: {#OSO_FILESYS} has less than 10% free disk space on {HOST.NAME}'
-
- - name: 'Filesystem: {#OSO_FILESYS} has less than 5% free inodes on {HOST.NAME}'
- expression: '{Template OS Linux:disc.filesys.inodes.pused[{#OSO_FILESYS}].last()}>95'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_filesys_full.asciidoc'
- priority: high
-
- # This has a dependency on the previous trigger
- # Trigger Prototypes do not work in 2.4. They will work in Zabbix 3.0
- - name: 'Filesystem: {#OSO_FILESYS} has less than 10% free inodes on {HOST.NAME}'
- expression: '{Template OS Linux:disc.filesys.inodes.pused[{#OSO_FILESYS}].last()}>90'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_filesys_full.asciidoc'
- priority: warn
- dependencies:
- - 'Filesystem: {#OSO_FILESYS} has less than 5% free inodes on {HOST.NAME}'
-
- ztriggers:
- - name: 'Too many TOTAL processes on {HOST.NAME}'
- expression: '{Template OS Linux:proc.nprocs.last()}>5000'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_proc.asciidoc'
- priority: warn
-
- - name: 'Lack of available memory on {HOST.NAME}'
- expression: '{Template OS Linux:mem.freemem.last()}<30720000'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_memory.asciidoc'
- priority: warn
- description: 'Alert on less than 30MegaBytes. This is 30 Million Bytes. 30000 KB x 1024'
-
- # CPU Utilization #
- - name: 'CPU idle less than 5% on {HOST.NAME}'
- expression: '{Template OS Linux:kernel.all.cpu.idle.max(#5)}<5'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_cpu_idle.asciidoc'
- priority: average
- description: 'CPU is less than 5% idle'
-
- - name: 'CPU idle less than 10% on {HOST.NAME}'
- expression: '{Template OS Linux:kernel.all.cpu.idle.max(#5)}<10'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_cpu_idle.asciidoc'
- priority: average
- description: 'CPU is less than 10% idle'
- dependencies:
- - 'CPU idle less than 5% on {HOST.NAME}'
diff --git a/roles/os_zabbix/vars/template_performance_copilot.yml b/roles/os_zabbix/vars/template_performance_copilot.yml
deleted file mode 100644
index b62fa0228..000000000
--- a/roles/os_zabbix/vars/template_performance_copilot.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-g_template_performance_copilot:
- name: Template Performance Copilot
- zitems:
- - key: pcp.ping
- applications:
- - Performance Copilot
- value_type: int
-
- ztriggers:
- - name: 'pcp.ping failed on {HOST.NAME}'
- expression: '{Template Performance Copilot:pcp.ping.max(#3)}<1'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/check_pcp_ping.asciidoc'
- priority: average
diff --git a/roles/os_zabbix/vars/template_zagg_server.yml b/roles/os_zabbix/vars/template_zagg_server.yml
deleted file mode 100644
index db5665993..000000000
--- a/roles/os_zabbix/vars/template_zagg_server.yml
+++ /dev/null
@@ -1,46 +0,0 @@
----
-g_template_zagg_server:
- name: Template Zagg Server
- zitems:
- - key: zagg.server.metrics.count
- applications:
- - Zagg Server
- value_type: int
-
- - key: zagg.server.metrics.errors
- applications:
- - Zagg Server
- value_type: int
-
- - key: zagg.server.heartbeat.errors
- applications:
- - Zagg Server
- value_type: int
-
- - key: zagg.server.heartbeat.count
- applications:
- - Zagg Server
- value_type: int
-
- ztriggers:
- - name: 'Error processing metrics on {HOST.NAME}'
- expression: '{Template Zagg Server:zagg.server.metrics.errors.min(#3)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/zagg_server.asciidoc'
- priority: average
-
- - name: 'Error processing heartbeats on {HOST.NAME}'
- expression: '{Template Zagg Server:zagg.server.heartbeat.errors.min(#3)}>0'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/zagg_server.asciidoc'
- priority: average
-
- - name: 'Critically High number of metrics in Zagg queue {HOST.NAME}'
- expression: '{Template Zagg Server:zagg.server.metrics.count.min(#3)}>10000'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/zagg_server.asciidoc'
- priority: high
-
- - name: 'High number of metrics in Zagg queue {HOST.NAME}'
- expression: '{Template Zagg Server:zagg.server.metrics.count.min(#3)}>5000'
- url: 'https://github.com/openshift/ops-sop/blob/master/V3/Alerts/zagg_server.asciidoc'
- dependencies:
- - 'Critically High number of metrics in Zagg queue {HOST.NAME}'
- priority: average
diff --git a/roles/oso_host_monitoring/README.md b/roles/oso_host_monitoring/README.md
deleted file mode 100644
index f1fa05adb..000000000
--- a/roles/oso_host_monitoring/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-Role Name
-=========
-
-Applies local host monitoring container(s).
-
-Requirements
-------------
-
-None.
-
-Role Variables
---------------
-
-osohm_zagg_web_url: where to contact monitoring service
-osohm_host_monitoring: name of host monitoring container
-osohm_zagg_client: name of container with zabbix client
-osohm_docker_registry_url: docker repository containing above containers
-osohm_default_zagg_server_user: login info to zabbix server
-osohm_default_zagg_password: password to zabbix server
-
-Dependencies
-------------
-
-None.
-
-Example Playbook
-----------------
-
-Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
-
- - hosts: servers
- roles:
- - oso_host_monitoring
- vars:
- osohm_zagg_web_url: "https://..."
- osohm_host_monitoring: "oso-rhel7-host-monitoring"
- osohm_zagg_client: "oso-rhel7-zagg-client"
- osohm_docker_registry_url: "docker-registry.example.com/mon/"
- osohm_default_zagg_server_user: "zagg-client"
- osohm_default_zagg_password: "secret"
-
-License
--------
-
-ASL 2.0
-
-Author Information
-------------------
-
-OpenShift operations, Red Hat, Inc
diff --git a/roles/oso_host_monitoring/defaults/main.yml b/roles/oso_host_monitoring/defaults/main.yml
deleted file mode 100644
index ed97d539c..000000000
--- a/roles/oso_host_monitoring/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
----
diff --git a/roles/oso_host_monitoring/handlers/main.yml b/roles/oso_host_monitoring/handlers/main.yml
deleted file mode 100644
index 3a5d8024c..000000000
--- a/roles/oso_host_monitoring/handlers/main.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- name: "Restart the {{ osohm_host_monitoring }} service"
- service:
- name: "{{ osohm_host_monitoring }}"
- state: restarted
- enabled: yes
diff --git a/roles/oso_host_monitoring/meta/main.yml b/roles/oso_host_monitoring/meta/main.yml
deleted file mode 100644
index cce30c2db..000000000
--- a/roles/oso_host_monitoring/meta/main.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-galaxy_info:
- author: OpenShift
- description: apply monitoring container(s).
- company: Red Hat, Inc
- license: ASL 2.0
- min_ansible_version: 1.2
-dependencies: []
diff --git a/roles/oso_host_monitoring/tasks/main.yml b/roles/oso_host_monitoring/tasks/main.yml
deleted file mode 100644
index a0a453416..000000000
--- a/roles/oso_host_monitoring/tasks/main.yml
+++ /dev/null
@@ -1,47 +0,0 @@
----
-- fail:
- msg: "This playbook requires {{item}} to be set."
- when: "{{ item }} is not defined or {{ item }} == ''"
- with_items:
- - osohm_zagg_web_url
- - osohm_host_monitoring
- - osohm_docker_registry_url
- - osohm_default_zagg_server_user
- - osohm_default_zagg_server_password
-
-- name: create /etc/docker/ops
- file:
- path: /etc/docker/ops
- state: directory
- mode: 0770
- group: root
- owner: root
-
-- name: Copy dockercfg to /etc/docker/ops
- template:
- src: docker-registry.ops.cfg.j2
- dest: /etc/docker/ops/.dockercfg
- owner: root
- group: root
- mode: 0600
-
-- name: "Copy {{ osohm_host_monitoring }} systemd file"
- template:
- src: "{{ osohm_host_monitoring }}.service.j2"
- dest: "/etc/systemd/system/{{ osohm_host_monitoring }}.service"
- owner: root
- group: root
- mode: 0644
- notify:
- - "Restart the {{ osohm_host_monitoring }} service"
- register: systemd_host_monitoring
-
-- name: reload systemd
- command: /usr/bin/systemctl --system daemon-reload
- when: systemd_host_monitoring | changed
-
-- name: "Start the {{ osohm_host_monitoring }} service"
- service:
- name: "{{ osohm_host_monitoring }}"
- state: started
- enabled: yes
diff --git a/roles/oso_host_monitoring/templates/docker-registry.ops.cfg.j2 b/roles/oso_host_monitoring/templates/docker-registry.ops.cfg.j2
deleted file mode 100644
index 9e49da469..000000000
--- a/roles/oso_host_monitoring/templates/docker-registry.ops.cfg.j2
+++ /dev/null
@@ -1 +0,0 @@
-{"{{ osohm_docker_registry_ops_url }}":{"auth":"{{ osohm_docker_registry_ops_key }}","email":"{{ osohm_docker_registry_ops_email }}"}}
diff --git a/roles/oso_host_monitoring/templates/oso-rhel7-host-monitoring.service.j2 b/roles/oso_host_monitoring/templates/oso-rhel7-host-monitoring.service.j2
deleted file mode 100644
index e17092202..000000000
--- a/roles/oso_host_monitoring/templates/oso-rhel7-host-monitoring.service.j2
+++ /dev/null
@@ -1,78 +0,0 @@
-# This is a systemd file to run this docker container under systemd.
-# To make this work:
-# * pull the image (probably from ops docker registry)
-# * place this file in /etc/systemd/system without the .systemd extension
-# * run the commands:
-# systemctl daemon-reload
-# systemctl enable oso-rhel7-host-monitoring
-# systemctl start oso-rhel7-host-monitoring
-#
-#
-[Unit]
-Description=Openshift Host Monitoring Container
-Requires=docker.service
-After=docker.service
-
-
-[Service]
-Type=simple
-TimeoutStartSec=5m
-Environment=HOME=/etc/docker/ops
-#Slice=container-small.slice
-
-# systemd syntax '=-' ignore errors from return codes.
-ExecStartPre=-/usr/bin/docker kill "{{ osohm_host_monitoring }}"
-ExecStartPre=-/usr/bin/docker rm "{{ osohm_host_monitoring }}"
-ExecStartPre=-/usr/bin/docker pull "{{ osohm_docker_registry_url }}{{ osohm_host_monitoring }}"
-
-# mwoodson note 1-7-16:
-# pcp recommends mounting /run in their Dockerfile
-# /run conflicts with cron which also runs in this container.
-# I am leaving /run out for now. the guys in #pcp said that they mounted /run
-# to shared the pcp socket that is created in /run. We are not using this,
-# as far as I know.
-# This problem goes away with systemd being run in the containers and not using
-# cron but using systemd timers
-# -v /run:/run \
-
-ExecStart=/usr/bin/docker run --name {{ osohm_host_monitoring }} \
- --privileged \
- --pid=host \
- --net=host \
- --ipc=host \
- -e ZAGG_URL={{ osohm_zagg_web_url }} \
- -e ZAGG_USER={{ osohm_default_zagg_server_user }} \
- -e ZAGG_PASSWORD={{ osohm_default_zagg_server_password }} \
- -e ZAGG_CLIENT_HOSTNAME={{ oo_name }} \
- -e ZAGG_SSL_VERIFY={{ osohm_zagg_verify_ssl }} \
- -e OSO_CLUSTER_GROUP={{ cluster_group }} \
- -e OSO_CLUSTER_ID={{ oo_clusterid }} \
- -e OSO_ENVIRONMENT={{ oo_environment }} \
- -e OSO_HOST_TYPE={{ hostvars[inventory_hostname]['oo_hosttype'] }} \
- -e OSO_SUB_HOST_TYPE={{ hostvars[inventory_hostname]['oo_subhosttype'] }} \
- -e OSO_MASTER_HA={{ osohm_master_ha }} \
- -v /etc/localtime:/etc/localtime \
- -v /sys:/sys:ro \
- -v /sys/fs/selinux \
- -v /var/lib/docker:/var/lib/docker:ro \
- -v /var/run/docker.sock:/var/run/docker.sock \
- -v /var/run/openvswitch:/var/run/openvswitch \
-{% if hostvars[inventory_hostname]['oo_hosttype'] == 'master' %}
- -v /etc/origin/master/admin.kubeconfig:/etc/origin/master/admin.kubeconfig \
- -v /etc/origin/master/master.etcd-client.crt:/etc/origin/master/master.etcd-client.crt \
- -v /etc/origin/master/master.etcd-client.key:/etc/origin/master/master.etcd-client.key \
- -v /etc/origin/master/master-config.yaml:/etc/origin/master/master-config.yaml \
-{% elif hostvars[inventory_hostname]['oo_hosttype'] == 'node' %}
- -v /etc/origin/node:/etc/origin/node \
-{% endif %}
- {{ osohm_docker_registry_url }}{{ osohm_host_monitoring }}
-
-
-ExecReload=-/usr/bin/docker stop "{{ osohm_host_monitoring }}"
-ExecReload=-/usr/bin/docker rm "{{ osohm_host_monitoring }}"
-ExecStop=-/usr/bin/docker stop "{{ osohm_host_monitoring }}"
-Restart=always
-RestartSec=30
-
-[Install]
-WantedBy=default.target
diff --git a/roles/oso_host_monitoring/vars/main.yml b/roles/oso_host_monitoring/vars/main.yml
deleted file mode 100644
index ed97d539c..000000000
--- a/roles/oso_host_monitoring/vars/main.yml
+++ /dev/null
@@ -1 +0,0 @@
----
diff --git a/roles/oso_monitoring_tools/README.md b/roles/oso_monitoring_tools/README.md
deleted file mode 100644
index 4215f9eeb..000000000
--- a/roles/oso_monitoring_tools/README.md
+++ /dev/null
@@ -1,54 +0,0 @@
-Role Name
-=========
-
-This role will install the Openshift Monitoring Utilities
-
-Requirements
-------------
-
-Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
-
-Role Variables
---------------
-
-osomt_zagg_client_config
-
-from vars/main.yml:
-
-osomt_zagg_client_config:
- host:
- name: "{{ osomt_host_name }}"
- zagg:
- url: "{{ osomt_zagg_url }}"
- user: "{{ osomt_zagg_user }}"
- pass: "{{ osomt_zagg_password }}"
- ssl_verify: "{{ osomt_zagg_ssl_verify }}"
- verbose: "{{ osomt_zagg_verbose }}"
- debug: "{{ osomt_zagg_debug }}"
-
-Dependencies
-------------
-
-None
-
-Example Playbook
-----------------
-
-- role: "oso_monitoring_tools"
- osomt_host_name: hostname
- osomt_zagg_url: http://path.to/zagg_web
- osomt_zagg_user: admin
- osomt_zagg_password: password
- osomt_zagg_ssl_verify: True
- osomt_zagg_verbose: False
- osomt_zagg_debug: False
-
-License
--------
-
-BSD
-
-Author Information
-------------------
-
-Openshift Operations
diff --git a/roles/oso_monitoring_tools/defaults/main.yml b/roles/oso_monitoring_tools/defaults/main.yml
deleted file mode 100644
index a17424f25..000000000
--- a/roles/oso_monitoring_tools/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# defaults file for oso_monitoring_tools
diff --git a/roles/oso_monitoring_tools/handlers/main.yml b/roles/oso_monitoring_tools/handlers/main.yml
deleted file mode 100644
index cefa780ab..000000000
--- a/roles/oso_monitoring_tools/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# handlers file for oso_monitoring_tools
diff --git a/roles/oso_monitoring_tools/meta/main.yml b/roles/oso_monitoring_tools/meta/main.yml
deleted file mode 100644
index 9c42b68dc..000000000
--- a/roles/oso_monitoring_tools/meta/main.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-galaxy_info:
- author: OpenShift Operations
- description: Install Openshift Monitoring tools
- company: Red Hat, Inc
- license: ASL 2.0
- min_ansible_version: 1.2
-dependencies: []
diff --git a/roles/oso_monitoring_tools/tasks/main.yml b/roles/oso_monitoring_tools/tasks/main.yml
deleted file mode 100644
index c90fc56e2..000000000
--- a/roles/oso_monitoring_tools/tasks/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-# tasks file for oso_monitoring_tools
-- name: Install the Openshift Tools RPMS
- yum:
- name: "{{ item }}"
- state: latest
- with_items:
- - openshift-tools-scripts-monitoring-zagg-client
- - python-openshift-tools-monitoring-zagg
- - python-openshift-tools-monitoring-zabbix
-
-- debug: var=g_zagg_client_config
-
-- name: Generate the /etc/openshift_tools/zagg_client.yaml config file
- copy:
- content: "{{ osomt_zagg_client_config | to_nice_yaml }}"
- dest: /etc/openshift_tools/zagg_client.yaml
- mode: "644"
diff --git a/roles/oso_monitoring_tools/vars/main.yml b/roles/oso_monitoring_tools/vars/main.yml
deleted file mode 100644
index 3538ba30b..000000000
--- a/roles/oso_monitoring_tools/vars/main.yml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-# vars file for oso_monitoring_tools
-osomt_zagg_client_config:
- host:
- name: "{{ osomt_host_name }}"
- zagg:
- url: "{{ osomt_zagg_url }}"
- user: "{{ osomt_zagg_user }}"
- pass: "{{ osomt_zagg_password }}"
- ssl_verify: "{{ osomt_zagg_ssl_verify }}"
- verbose: "{{ osomt_zagg_verbose }}"
- debug: "{{ osomt_zagg_debug }}"
diff --git a/roles/tito/README.md b/roles/tito/README.md
deleted file mode 100644
index c4e2856dc..000000000
--- a/roles/tito/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-Role Name
-=========
-
-This role manages Tito.
-
-https://github.com/dgoodwin/tito
-
-Requirements
-------------
-
-None
-
-Role Variables
---------------
-
-None
-
-Dependencies
-------------
-
-None
-
-Example Playbook
-----------------
-
- - hosts: servers
- roles:
- - role: tito
-
-License
--------
-
-Apache License, Version 2.0
-
-Author Information
-------------------
-
-Thomas Wiest
diff --git a/roles/tito/defaults/main.yml b/roles/tito/defaults/main.yml
deleted file mode 100644
index dd7cd269e..000000000
--- a/roles/tito/defaults/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# defaults file for tito
diff --git a/roles/tito/handlers/main.yml b/roles/tito/handlers/main.yml
deleted file mode 100644
index e9ce609d5..000000000
--- a/roles/tito/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# handlers file for tito
diff --git a/roles/tito/meta/main.yml b/roles/tito/meta/main.yml
deleted file mode 100644
index fb121c08e..000000000
--- a/roles/tito/meta/main.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-galaxy_info:
- author: Thomas Wiest
- description: Manages Tito
- company: Red Hat
- license: Apache License, Version 2.0
- min_ansible_version: 1.2
- platforms:
- - name: EL
- versions:
- - 7
- categories:
- - packaging
-dependencies: []
diff --git a/roles/tito/tasks/main.yml b/roles/tito/tasks/main.yml
deleted file mode 100644
index 3cf9e2bfd..000000000
--- a/roles/tito/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-- action: "{{ ansible_pkg_mgr }} name=tito state=present"
diff --git a/roles/tito/vars/main.yml b/roles/tito/vars/main.yml
deleted file mode 100644
index 8a1aafc41..000000000
--- a/roles/tito/vars/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-# vars file for tito
diff --git a/roles/yum_repos/README.md b/roles/yum_repos/README.md
deleted file mode 100644
index 908ab4972..000000000
--- a/roles/yum_repos/README.md
+++ /dev/null
@@ -1,113 +0,0 @@
-Yum Repos
-=========
-
-This role allows easy deployment of yum repository config files.
-
-Requirements
-------------
-
-Yum or dnf
-
-Role Variables
---------------
-
-| Name | Default value | |
-|-------------------|---------------|--------------------------------------------|
-| repo_files | None | |
-| repo_enabled | 1 | Should repos be enabled by default |
-| repo_gpgcheck | 1 | Should repo gpgcheck be enabled by default |
-
-Dependencies
-------------
-
-Example Playbook
-----------------
-
-A single repo file containing a single repo:
- - hosts: servers
- roles:
- - role: yum_repos
- repo_files:
- - id: my_repo
- repos:
- - id: my_repo
- name: My Awesome Repo
- baseurl: https://my.awesome.repo/is/available/here
- skip_if_unavailable: yes
- gpgkey: https://my.awesome.repo/pubkey.gpg
-
-A single repo file containing a single repo, disabling gpgcheck
- - hosts: servers
- roles:
- - role: yum_repos
- repo_files:
- - id: my_other_repo
- repos:
- - id: my_other_repo
- name: My Other Awesome Repo
- baseurl: https://my.other.awesome.repo/is/available/here
- gpgcheck: no
-
-A single repo file containing a single disabled repo
- - hosts: servers
- roles:
- - role: yum_repos
- repo_files:
- - id: my_other_repo
- repos:
- - id: my_other_repo
- name: My Other Awesome Repo
- baseurl: https://my.other.awesome.repo/is/available/here
- enabled: no
-
-A single repo file containing multiple repos
- - hosts: servers
- roles:
- - role: yum_repos
- repo_files:
- id: my_repos
- repos:
- - id: my_repo
- name: My Awesome Repo
- baseurl: https://my.awesome.repo/is/available/here
- gpgkey: https://my.awesome.repo/pubkey.gpg
- - id: my_other_repo
- name: My Other Awesome Repo
- baseurl: https://my.other.awesome.repo/is/available/here
- gpgkey: https://my.other.awesome.repo/pubkey.gpg
-
-Multiple repo files containing multiple repos
- - hosts: servers
- roles:
- - role: yum_repos
- repo_files:
- - id: my_repos
- repos:
- - id: my_repo
- name: My Awesome Repo
- baseurl: https://my.awesome.repo/is/available/here
- gpgkey: https://my.awesome.repo/pubkey.gpg
- - id: my_other_repo
- name: My Other Awesome Repo
- baseurl: https://my.other.awesome.repo/is/available/here
- gpgkey: https://my.other.awesome.repo/pubkey.gpg
- - id: joes_repos
- repos:
- - id: joes_repo
- name: Joe's Less Awesome Repo
- baseurl: https://joes.repo/is/here
- gpgkey: https://joes.repo/pubkey.gpg
- - id: joes_otherrepo
- name: Joe's Other Less Awesome Repo
- baseurl: https://joes.repo/is/there
- gpgkey: https://joes.repo/pubkey.gpg
-
-License
--------
-
-ASL 2.0
-
-Author Information
-------------------
-
-openshift online operations
diff --git a/roles/yum_repos/defaults/main.yml b/roles/yum_repos/defaults/main.yml
deleted file mode 100644
index 515fb7a4a..000000000
--- a/roles/yum_repos/defaults/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-repo_enabled: 1
-repo_gpgcheck: 1
diff --git a/roles/yum_repos/meta/main.yml b/roles/yum_repos/meta/main.yml
deleted file mode 100644
index 6b8374da9..000000000
--- a/roles/yum_repos/meta/main.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-galaxy_info:
- author: openshift operations
- description:
- company: Red Hat, Inc.
- license: ASL 2.0
- min_ansible_version: 1.2
-dependencies: []
diff --git a/roles/yum_repos/tasks/main.yml b/roles/yum_repos/tasks/main.yml
deleted file mode 100644
index 46928a00b..000000000
--- a/roles/yum_repos/tasks/main.yml
+++ /dev/null
@@ -1,48 +0,0 @@
----
-# Convert old params to new params
-- set_fact:
- repo_files:
- - id: "{{ repo_tag }}"
- repos:
- - id: "{{ repo_tag }}"
- name: "{{ repo_name }}"
- baseurl: "{{ repo_baseurl }}"
- enabled: "{{ repo_enabled }}"
- gpgcheck: "{{ repo_gpg_check | default(repo_gpgcheck) }}"
- sslverify: "{{ repo_sslverify | default(None) }}"
- sslclientcert: "{{ repo_sslclientcert | default(None) }}"
- sslclientkey: "{{ repo_sslclientkey | default(None) }}"
- gpgkey: "{{ repo_gpgkey | default(None) }}"
- when: repo_files is not defined
-
-- name: Verify repo_files is a list
- assert:
- that:
- - repo_files is iterable and repo_files is not string and repo_files is not mapping
-
-- name: Verify repo_files items have an id and a repos list
- assert:
- that:
- - item is mapping
- - "'id' in item"
- - "'repos' in item"
- - item.repos is iterable and item.repos is not string and item.repos is not mapping
- with_items: repo_files
-
-- name: Verify that repo_files.repos have the required keys
- assert:
- that:
- - item.1 is mapping
- - "'id' in item.1"
- - "'name' in item.1"
- - "'baseurl' in item.1"
- with_subelements:
- - repo_files
- - repos
-
-- name: Installing yum-repo template
- template:
- src: yumrepo.j2
- dest: /etc/yum.repos.d/{{ item.id }}.repo
- with_items: repo_files
- when: not openshift.common.is_containerized | bool
diff --git a/roles/yum_repos/templates/yumrepo.j2 b/roles/yum_repos/templates/yumrepo.j2
deleted file mode 100644
index 0dfdbfe43..000000000
--- a/roles/yum_repos/templates/yumrepo.j2
+++ /dev/null
@@ -1,18 +0,0 @@
-{% set repos = item.repos %}
-{% for repo in repos %}
-[{{ repo.id }}]
-name={{ repo.name }}
-baseurl={{ repo.baseurl }}
-{% set repo_enabled_value = repo.enabled | default(repo_enabled) %}
-{% set enable_repo = 1 if (repo_enabled_value | int(0) == 1 or repo_enabled_value | lower in ['true', 'yes']) else 0 %}
-enabled={{ enable_repo }}
-{% set repo_gpgcheck_value = repo.gpgcheck | default(repo_gpgcheck) %}
-{% set enable_gpgcheck = 1 if (repo_gpgcheck_value | int(0) == 1 or repo_gpgcheck_value | lower in ['true', 'yes']) else 0 %}
-gpgcheck={{ enable_gpgcheck }}
-{% for key, value in repo.iteritems() %}
-{% if key not in ['id', 'name', 'baseurl', 'enabled', 'gpgcheck'] and value is defined and value != '' %}
-{{ key }}={{ value }}
-{% endif %}
-{% endfor %}
-
-{% endfor %}