From 4712e72c912a1102bff0508c98bd97da3f33ae95 Mon Sep 17 00:00:00 2001 From: Jason DeTiberus Date: Mon, 23 Mar 2015 23:53:17 -0400 Subject: openshift_facts role/module refactor default settings - Add openshift_facts role and module - Created new role openshift_facts that contains an openshift_facts module - Refactor openshift_* roles to use openshift_facts instead of relying on defaults - Refactor playbooks to use openshift_facts - Cleanup inventory group_vars - Update defaults - update openshift_master role firewall defaults - remove etcd peer port, since we will not be supporting clustered embedded etcd - remove 8444 since console now runs on the api port by default - add 8444 and 7001 to disabled services to ensure removal if updating - Add new role os_env_extras_node that is a subset of the docker role - previously, we were starting/enabling docker which was causing issues with some installations - Does not install or start docker, since the openshift-node role will handle that for us - Only adds root to the dockerroot group - Update playbooks to use ops_env_extras_node role instead of docker role - os_firewall bug fixes - ignore ip6tables for now, since we are not configuring any ipv6 rules - if installing package do a daemon-reload before starting/enabling service - Add aws support to bin/cluster - Add list action to bin/cluster - Add update action to bin/cluster - cleanup some stray debug statements - some variable renaming for clarity --- playbooks/aws/openshift-cluster/terminate.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 playbooks/aws/openshift-cluster/terminate.yml (limited to 'playbooks/aws/openshift-cluster/terminate.yml') diff --git a/playbooks/aws/openshift-cluster/terminate.yml b/playbooks/aws/openshift-cluster/terminate.yml new file mode 100644 index 000000000..39607633a --- /dev/null +++ b/playbooks/aws/openshift-cluster/terminate.yml @@ -0,0 +1,14 @@ +--- +- name: Terminate instance(s) + hosts: localhost + + vars_files: + - vars.yml + +- include: ../openshift-node/terminate.yml + vars: + oo_host_group_exp: 'groups["tag_env-host-type_{{ cluster_id }}-openshift-node"]' + +- include: ../openshift-master/terminate.yml + vars: + oo_host_group_exp: 'groups["tag_env-host-type_{{ cluster_id }}-openshift-master"]' -- cgit v1.2.3 From 6a4b7a5eb6c4b5e747bab795e2428d7c3992f559 Mon Sep 17 00:00:00 2001 From: Jason DeTiberus Date: Wed, 1 Apr 2015 15:09:19 -0400 Subject: Configuration updates for latest builds and major refactor Configuration updates for latest builds - Switch to using create-node-config - Switch sdn services to use etcd over SSL - This re-uses the client certificate deployed on each node - Additional node registration changes - Do not assume that metadata service is available in openshift_facts module - Call systemctl daemon-reload after installing openshift-master, openshift-sdn-master, openshift-node, openshift-sdn-node - Fix bug overriding openshift_hostname and openshift_public_hostname in byo playbooks - Start moving generated configs to /etc/openshift - Some custom module cleanup - Add known issue with ansible-1.9 to README_OSE.md - Update to genericize the kubernetes_register_node module - Default to use kubectl for commands - Allow for overriding kubectl_cmd - In openshift_register_node role, override kubectl_cmd to openshift_kube - Set default openshift_registry_url for enterprise when deployment_type is enterprise - Fix openshift_register_node for client config change - Ensure that master certs directory is created - Add roles and filter_plugin symlinks to playbooks/common/openshift-master and node - Allow non-root user with sudo nopasswd access - Updates for README_OSE.md - Update byo inventory for adding additional comments - Updates for node cert/config sync to work with non-root user using sudo - Move node config/certs to /etc/openshift/node - Don't use path for mktemp. addresses: https://github.com/openshift/openshift-ansible/issues/154 Create common playbooks - create common/openshift-master/config.yml - create common/openshift-node/config.yml - update playbooks to use new common playbooks - update launch playbooks to call update playbooks - fix openshift_registry and openshift_node_ip usage Set default deployment type to origin - openshift_repo updates for enabling origin deployments - also separate repo and gpgkey file structure - remove kubernetes repo since it isn't currently needed - full deployment type support for bin/cluster - honor OS_DEPLOYMENT_TYPE env variable - add --deployment-type option, which will override OS_DEPLOYMENT_TYPE if set - if neither OS_DEPLOYMENT_TYPE or --deployment-type is set, defaults to origin installs Additional changes: - Add separate config action to bin/cluster that runs ansible config but does not update packages - Some more duplication reduction in cluster playbooks. - Rename task files in playbooks dirs to have tasks in their name for clarity. - update aws/gce scripts to use a directory for inventory (otherwise when there are no hosts returned from dynamic inventory there is an error) libvirt refactor and update - add libvirt dynamic inventory - updates to use dynamic inventory for libvirt --- playbooks/aws/openshift-cluster/terminate.yml | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) (limited to 'playbooks/aws/openshift-cluster/terminate.yml') diff --git a/playbooks/aws/openshift-cluster/terminate.yml b/playbooks/aws/openshift-cluster/terminate.yml index 39607633a..1d2b60594 100644 --- a/playbooks/aws/openshift-cluster/terminate.yml +++ b/playbooks/aws/openshift-cluster/terminate.yml @@ -1,14 +1,26 @@ --- - name: Terminate instance(s) hosts: localhost - + gather_facts: no vars_files: - - vars.yml + - vars.yml + tasks: + - set_fact: scratch_group=tag_env-host-type_{{ cluster_id }}-openshift-node + - add_host: + name: "{{ item }}" + groups: oo_nodes_to_terminate + ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}" + ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}" + with_items: groups[scratch_group] | default([]) | difference(['localhost']) + + - set_fact: scratch_group=tag_env-host-type_{{ cluster_id }}-openshift-master + - add_host: + name: "{{ item }}" + groups: oo_masters_to_terminate + ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}" + ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}" + with_items: groups[scratch_group] | default([]) | difference(['localhost']) - include: ../openshift-node/terminate.yml - vars: - oo_host_group_exp: 'groups["tag_env-host-type_{{ cluster_id }}-openshift-node"]' - include: ../openshift-master/terminate.yml - vars: - oo_host_group_exp: 'groups["tag_env-host-type_{{ cluster_id }}-openshift-master"]' -- cgit v1.2.3 From ba5ae4dbc7741af1963df36fd92bcd0af03c6b4f Mon Sep 17 00:00:00 2001 From: Jason DeTiberus Date: Thu, 16 Apr 2015 22:44:12 -0400 Subject: aws terminate playbook improvements - Reduce duplication in terminate playbooks between openshift-master and openshift-node (they both now just include playbooks/aws/terminate.yml - update openshift-cluster terminate playbook to include the new shared terminate playbook, also delete all cluster hosts at once instead of treating masters and nodes differently. - remove env, host-type and env-host-type tags from instance before terminating (since most users can't terminate, we are mostly just renaming instances to -terminate and stopping them, so this prevents "terminated" hosts from being returned by the dynamic inventory, at least after the cache is refreshed) --- playbooks/aws/openshift-cluster/terminate.yml | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) (limited to 'playbooks/aws/openshift-cluster/terminate.yml') diff --git a/playbooks/aws/openshift-cluster/terminate.yml b/playbooks/aws/openshift-cluster/terminate.yml index 1d2b60594..617d0d456 100644 --- a/playbooks/aws/openshift-cluster/terminate.yml +++ b/playbooks/aws/openshift-cluster/terminate.yml @@ -5,22 +5,12 @@ vars_files: - vars.yml tasks: - - set_fact: scratch_group=tag_env-host-type_{{ cluster_id }}-openshift-node + - set_fact: scratch_group=tag_env_{{ cluster_id }} - add_host: name: "{{ item }}" - groups: oo_nodes_to_terminate + groups: oo_hosts_to_terminate ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}" ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}" with_items: groups[scratch_group] | default([]) | difference(['localhost']) - - set_fact: scratch_group=tag_env-host-type_{{ cluster_id }}-openshift-master - - add_host: - name: "{{ item }}" - groups: oo_masters_to_terminate - ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}" - ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}" - with_items: groups[scratch_group] | default([]) | difference(['localhost']) - -- include: ../openshift-node/terminate.yml - -- include: ../openshift-master/terminate.yml +- include: ../terminate.yml -- cgit v1.2.3