summaryrefslogtreecommitdiffstats
path: root/roles/openshift_logging/library
diff options
context:
space:
mode:
authorOpenShift Merge Robot <openshift-merge-robot@users.noreply.github.com>2018-01-02 16:08:15 -0800
committerGitHub <noreply@github.com>2018-01-02 16:08:15 -0800
commitb1be9cd5d7573dc9d18ca27915bb383f8591117b (patch)
treefa76d72a4ecf648056dfedc6c68cb5f315341409 /roles/openshift_logging/library
parent8119a5c87a1560c2f607c06f30383133cc7137e5 (diff)
parent8cb27ae800df71ee816852df56cd2c861a0f0a0a (diff)
downloadopenshift-b1be9cd5d7573dc9d18ca27915bb383f8591117b.tar.gz
openshift-b1be9cd5d7573dc9d18ca27915bb383f8591117b.tar.bz2
openshift-b1be9cd5d7573dc9d18ca27915bb383f8591117b.tar.xz
openshift-b1be9cd5d7573dc9d18ca27915bb383f8591117b.zip
Merge pull request #5894 from ewolinetz/logging_defaults_from_existing
Automatic merge from submit-queue. Update logging to use existing cluster deployment for defaults This will allow us to use logging facts to set defaults of specific configurations such as ES index replicas and shard count. The update to logging facts yields us output like: ```json "elasticsearch": { "clusterrolebindings": {}, "configmaps": { "logging-elasticsearch": { "elasticsearch.yml": { "cloud": { "kubernetes": { "namespace": "${NAMESPACE}", "pod_label": "${POD_LABEL}", "pod_port": 9300 } }, "cluster": { "name": "${CLUSTER_NAME}" }, "discovery": { "type": "kubernetes", "zen.minimum_master_nodes": "${NODE_QUORUM}", "zen.ping.multicast.enabled": false }, "gateway": { "expected_nodes": "${RECOVER_EXPECTED_NODES}", "recover_after_nodes": "${NODE_QUORUM}", "recover_after_time": "${RECOVER_AFTER_TIME}" }, "index": { "number_of_replicas": 0, "number_of_shards": 1, "translog": { "flush_threshold_period": "5m", "flush_threshold_size": "256mb" }, "unassigned.node_left.delayed_timeout": "2m" }, "io.fabric8.elasticsearch.authentication.users": [ "system.logging.kibana", "system.logging.fluentd", "system.logging.curator", "system.admin" ], ``` TODO: - [x] Update logging facts to pull out settings from config maps - [x] Move `openshift_sanitize_inventory/library/conditional_set_fact.py` up to repo level - [x] Generate diffs against currently deployed configs and correctly patch in custom changes from customers - [x] Use `conditional_set_fact` to easily set defaults for logging based on logging facts, or falling back to role defaults when not specified in the inventory - [x] Update all components to follow patching configmaps
Diffstat (limited to 'roles/openshift_logging/library')
-rw-r--r--roles/openshift_logging/library/logging_patch.py112
-rw-r--r--roles/openshift_logging/library/openshift_logging_facts.py13
2 files changed, 124 insertions, 1 deletions
diff --git a/roles/openshift_logging/library/logging_patch.py b/roles/openshift_logging/library/logging_patch.py
new file mode 100644
index 000000000..d2c0bc456
--- /dev/null
+++ b/roles/openshift_logging/library/logging_patch.py
@@ -0,0 +1,112 @@
+#!/usr/bin/python
+
+""" Ansible module to help with creating context patch file with whitelisting for logging """
+
+import difflib
+import re
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+DOCUMENTATION = '''
+---
+module: logging_patch
+
+short_description: This will create a context patch file while giving ability
+ to whitelist some lines (excluding them from comparison)
+
+description:
+ - "To create configmap patches for logging"
+
+author:
+ - Eric Wolinetz ewolinet@redhat.com
+'''
+
+
+EXAMPLES = '''
+- logging_patch:
+ original_file: "{{ tempdir }}/current.yml"
+ new_file: "{{ configmap_new_file }}"
+ whitelist: "{{ configmap_protected_lines | default([]) }}"
+
+'''
+
+
+def account_for_whitelist(file_contents, white_list=None):
+ """ This method will remove lines that contain whitelist values from the content
+ of the file so that we aren't build a patch based on that line
+
+ Usage:
+
+ for file_contents:
+
+ index:
+ number_of_shards: {{ es_number_of_shards | default ('1') }}
+ number_of_replicas: {{ es_number_of_replicas | default ('0') }}
+ unassigned.node_left.delayed_timeout: 2m
+ translog:
+ flush_threshold_size: 256mb
+ flush_threshold_period: 5m
+
+
+ and white_list:
+
+ ['number_of_shards', 'number_of_replicas']
+
+
+ We would end up with:
+
+ index:
+ unassigned.node_left.delayed_timeout: 2m
+ translog:
+ flush_threshold_size: 256mb
+ flush_threshold_period: 5m
+
+ """
+
+ for line in white_list:
+ file_contents = re.sub(r".*%s:.*\n" % line, "", file_contents)
+
+ return file_contents
+
+
+def run_module():
+ """ The body of the module, we check if the variable name specified as the value
+ for the key is defined. If it is then we use that value as for the original key """
+
+ module = AnsibleModule(
+ argument_spec=dict(
+ original_file=dict(type='str', required=True),
+ new_file=dict(type='str', required=True),
+ whitelist=dict(required=False, type='list', default=[])
+ ),
+ supports_check_mode=True
+ )
+
+ original_fh = open(module.params['original_file'], "r")
+ original_contents = original_fh.read()
+ original_fh.close()
+
+ original_contents = account_for_whitelist(original_contents, module.params['whitelist'])
+
+ new_fh = open(module.params['new_file'], "r")
+ new_contents = new_fh.read()
+ new_fh.close()
+
+ new_contents = account_for_whitelist(new_contents, module.params['whitelist'])
+
+ uni_diff = difflib.unified_diff(new_contents.splitlines(),
+ original_contents.splitlines(),
+ lineterm='')
+
+ return module.exit_json(changed=False, # noqa: F405
+ raw_patch="\n".join(uni_diff))
+
+
+def main():
+ """ main """
+ run_module()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/roles/openshift_logging/library/openshift_logging_facts.py b/roles/openshift_logging/library/openshift_logging_facts.py
index 98d0d1c4f..302a9b4c9 100644
--- a/roles/openshift_logging/library/openshift_logging_facts.py
+++ b/roles/openshift_logging/library/openshift_logging_facts.py
@@ -204,6 +204,14 @@ class OpenshiftLoggingFacts(OCBaseCommand):
if comp is not None:
self.add_facts_for(comp, "services", name, dict())
+ # pylint: disable=too-many-arguments
+ def facts_from_configmap(self, comp, kind, name, config_key, yaml_file=None):
+ '''Extracts facts in logging namespace from configmap'''
+ if yaml_file is not None:
+ config_facts = yaml.load(yaml_file)
+ self.facts[comp][kind][name][config_key] = config_facts
+ self.facts[comp][kind][name]["raw"] = yaml_file
+
def facts_for_configmaps(self, namespace):
''' Gathers facts for configmaps in logging namespace '''
self.default_keys_for("configmaps")
@@ -214,7 +222,10 @@ class OpenshiftLoggingFacts(OCBaseCommand):
name = item["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
- self.add_facts_for(comp, "configmaps", name, item["data"])
+ self.add_facts_for(comp, "configmaps", name, dict(item["data"]))
+ if comp in ["elasticsearch", "elasticsearch_ops"]:
+ for config_key in item["data"]:
+ self.facts_from_configmap(comp, "configmaps", name, config_key, item["data"][config_key])
def facts_for_oauthclients(self, namespace):
''' Gathers facts for oauthclients used with logging '''