1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
|
#!/usr/bin/env python
from time import time
import argparse
import yaml
import os
import sys
import pdb
import subprocess
import json
import pprint
class MetaInventory(object):
def __init__(self):
self.config = None
self.results = {}
self.result = {}
self.cache_path_cache = os.path.expanduser('~/.ansible/tmp/meta-inventory.cache')
self.parse_cli_args()
# load yaml
self.load_yaml_config()
# if its a host query, fetch and do not cache
if self.args.host:
self.get_inventory()
elif not self.is_cache_valid():
# go fetch the inventories and cache them if cache is expired
self.get_inventory()
self.write_to_cache()
else:
# get data from disk
self.get_inventory_from_cache()
def load_yaml_config(self,conf_file=os.path.join(os.getcwd(),'meta.yaml')):
"""Load a yaml config file with credentials to query the
respective cloud for inventory.
"""
config = None
with open(conf_file) as conf:
self.config = yaml.safe_load(conf)
def get_provider_tags(self,provider, env={}):
"""Call <provider> and query all of the tags that are usuable
by ansible. If environment is empty use the default env.
"""
if not env:
env = os.environ
# check to see if provider exists
if not os.path.isfile(os.path.join(os.getcwd(),provider)):
raise RuntimeError("Unkown provider: %s" % provider)
cmds = [provider]
if self.args.host:
cmds.append("--host")
cmds.append(self.args.host)
else:
cmds.append('--list')
cmds.append('--refresh-cache')
return subprocess.Popen(cmds, stderr=subprocess.PIPE, \
stdout=subprocess.PIPE, env=env)
def get_inventory(self):
"""Create the subprocess to fetch tags from a provider.
Host query:
Query to return a specific host. If > 1 queries have
results then fail.
List query:
Query all of the different clouds for their tags. Once completed
store all of their results into one merged updated hash.
"""
processes = {}
for account in self.config['clouds']:
env = account['env_vars']
name = account['name']
provider = account['provider']
processes[name] = self.get_provider_tags(provider, env)
# for each process collect stdout when its available
all_results = []
for name, process in processes.items():
out, err = process.communicate()
all_results.append({
"name": name,
"out": out.strip(),
"err": err.strip(),
"code": process.returncode
})
if not self.args.host:
# For any non-zero, raise an error on it
for result in all_results:
if result['code'] != 0:
raise RuntimeError(result['err'])
else:
self.results[result['name']] = json.loads(result['out'])
self.merge()
else:
# For any 0 result, return it
count = 0
for results in all_results:
if results['code'] == 0 and results['err'] == '' and results['out'] != '{}':
self.result = json.loads(out)
count += 1
if count > 1:
raise RuntimeError("Found > 1 results for --host %s. \
This is an invalid state." % self.args.host)
def merge(self):
"""Merge the results into a single hash. Duplicate keys are placed
into a list.
"""
for name, cloud_result in self.results.items():
for k,v in cloud_result.items():
if self.result.has_key(k):
# need to combine into a list
if isinstance(self.result[k], list):
self.result[k].append(v)
else:
self.result[k] = [self.result[k],v]
else:
self.result[k] = [v]
self.result = self.json_format_dict(self.result)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.config['cache_max_age']) > current_time:
#if os.path.isfile(self.cache_path_index):
return True
return False
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on a provider')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
self.args = parser.parse_args()
def write_to_cache(self):
''' Writes data in JSON format to a file '''
json_data = self.json_format_dict(self.result, True)
with open(self.cache_path_cache, 'w') as cache:
cache.write(json_data)
def get_inventory_from_cache(self):
''' Reads the inventory from the cache file and returns it as a JSON
object '''
with open(self.cache_path_cache, 'r') as cache:
self.result = json.loads(cache.read())
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
if __name__ == "__main__":
mi = MetaInventory()
#print mi.result
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(mi.result)
|