Scott Baker | bba67b6 | 2019-01-28 17:38:21 -0800 | [diff] [blame^] | 1 | #!/usr/bin/env python |
| 2 | |
| 3 | # Copyright 2017-present Open Networking Foundation |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | # you may not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
| 16 | |
| 17 | from __future__ import print_function |
| 18 | import jinja2 |
| 19 | import tempfile |
| 20 | import os |
| 21 | import json |
| 22 | import pickle |
| 23 | import pdb |
| 24 | import string |
| 25 | import random |
| 26 | import re |
| 27 | import traceback |
| 28 | import subprocess |
| 29 | import threading |
| 30 | |
| 31 | from multiprocessing import Process, Queue |
| 32 | from xosconfig import Config |
| 33 | |
| 34 | from multistructlog import create_logger |
| 35 | |
| 36 | log = create_logger(Config().get("logging")) |
| 37 | |
| 38 | |
| 39 | step_dir = Config.get("steps_dir") |
| 40 | sys_dir = Config.get("sys_dir") |
| 41 | |
| 42 | os_template_loader = jinja2.FileSystemLoader( |
| 43 | searchpath=[step_dir, "/opt/xos/synchronizers/shared_templates"] |
| 44 | ) |
| 45 | os_template_env = jinja2.Environment(loader=os_template_loader) |
| 46 | |
| 47 | |
| 48 | def id_generator(size=6, chars=string.ascii_uppercase + string.digits): |
| 49 | return "".join(random.choice(chars) for _ in range(size)) |
| 50 | |
| 51 | |
| 52 | def shellquote(s): |
| 53 | return "'" + s.replace("'", "'\\''") + "'" |
| 54 | |
| 55 | |
| 56 | def get_playbook_fn(opts, path): |
| 57 | if not opts.get("ansible_tag", None): |
| 58 | # if no ansible_tag is in the options, then generate a unique one |
| 59 | objname = id_generator() |
| 60 | opts = opts.copy() |
| 61 | opts["ansible_tag"] = objname |
| 62 | |
| 63 | objname = opts["ansible_tag"] |
| 64 | |
| 65 | pathed_sys_dir = os.path.join(sys_dir, path) |
| 66 | if not os.path.isdir(pathed_sys_dir): |
| 67 | os.makedirs(pathed_sys_dir) |
| 68 | |
| 69 | # symlink steps/roles into sys/roles so that playbooks can access roles |
| 70 | roledir = os.path.join(step_dir, "roles") |
| 71 | rolelink = os.path.join(pathed_sys_dir, "roles") |
| 72 | if os.path.isdir(roledir) and not os.path.islink(rolelink): |
| 73 | os.symlink(roledir, rolelink) |
| 74 | |
| 75 | return (opts, os.path.join(pathed_sys_dir, objname)) |
| 76 | |
| 77 | |
| 78 | def run_playbook(ansible_hosts, ansible_config, fqp, opts): |
| 79 | args = { |
| 80 | "ansible_hosts": ansible_hosts, |
| 81 | "ansible_config": ansible_config, |
| 82 | "fqp": fqp, |
| 83 | "opts": opts, |
| 84 | "config_file": Config.get_config_file(), |
| 85 | } |
| 86 | |
| 87 | keep_temp_files = Config.get("keep_temp_files") |
| 88 | |
| 89 | dir = tempfile.mkdtemp() |
| 90 | args_fn = None |
| 91 | result_fn = None |
| 92 | try: |
| 93 | log.info("creating args file", dir=dir) |
| 94 | |
| 95 | args_fn = os.path.join(dir, "args") |
| 96 | result_fn = os.path.join(dir, "result") |
| 97 | |
| 98 | open(args_fn, "w").write(pickle.dumps(args)) |
| 99 | |
| 100 | ansible_main_fn = os.path.join(os.path.dirname(__file__), "ansible_main.py") |
| 101 | |
| 102 | os.system("python %s %s %s" % (ansible_main_fn, args_fn, result_fn)) |
| 103 | |
| 104 | result = pickle.loads(open(result_fn).read()) |
| 105 | |
| 106 | if hasattr(result, "exception"): |
| 107 | log.error("Exception in playbook", exception=result["exception"]) |
| 108 | |
| 109 | stats = result.get("stats", None) |
| 110 | aresults = result.get("aresults", None) |
| 111 | except Exception as e: |
| 112 | log.exception("Exception running ansible_main") |
| 113 | stats = None |
| 114 | aresults = None |
| 115 | finally: |
| 116 | if not keep_temp_files: |
| 117 | if args_fn and os.path.exists(args_fn): |
| 118 | os.remove(args_fn) |
| 119 | if result_fn and os.path.exists(result_fn): |
| 120 | os.remove(result_fn) |
| 121 | os.rmdir(dir) |
| 122 | |
| 123 | return (stats, aresults) |
| 124 | |
| 125 | |
| 126 | def run_template( |
| 127 | name, |
| 128 | opts, |
| 129 | path="", |
| 130 | expected_num=None, |
| 131 | ansible_config=None, |
| 132 | ansible_hosts=None, |
| 133 | run_ansible_script=None, |
| 134 | object=None, |
| 135 | ): |
| 136 | template = os_template_env.get_template(name) |
| 137 | buffer = template.render(opts) |
| 138 | |
| 139 | (opts, fqp) = get_playbook_fn(opts, path) |
| 140 | |
| 141 | f = open(fqp, "w") |
| 142 | f.write(buffer) |
| 143 | f.flush() |
| 144 | |
| 145 | """ |
| 146 | q = Queue() |
| 147 | p = Process(target=run_playbook, args=(ansible_hosts, ansible_config, fqp, opts, q,)) |
| 148 | p.start() |
| 149 | stats,aresults = q.get() |
| 150 | p.join() |
| 151 | """ |
| 152 | stats, aresults = run_playbook(ansible_hosts, ansible_config, fqp, opts) |
| 153 | |
| 154 | error_msg = [] |
| 155 | |
| 156 | output_file = fqp + ".out" |
| 157 | try: |
| 158 | if aresults is None: |
| 159 | raise ValueError("Error executing playbook %s" % fqp) |
| 160 | |
| 161 | ok_results = [] |
| 162 | total_unreachable = 0 |
| 163 | failed = 0 |
| 164 | |
| 165 | ofile = open(output_file, "w") |
| 166 | |
| 167 | for x in aresults: |
| 168 | if not x.is_failed() and not x.is_unreachable() and not x.is_skipped(): |
| 169 | ok_results.append(x) |
| 170 | elif x.is_unreachable(): |
| 171 | failed += 1 |
| 172 | total_unreachable += 1 |
| 173 | try: |
| 174 | error_msg.append(x._result["msg"]) |
| 175 | except BaseException: |
| 176 | pass |
| 177 | elif x.is_failed(): |
| 178 | failed += 1 |
| 179 | try: |
| 180 | error_msg.append(x._result["msg"]) |
| 181 | except BaseException: |
| 182 | pass |
| 183 | |
| 184 | # FIXME (zdw, 2017-02-19) - may not be needed with new callback logging |
| 185 | |
| 186 | ofile.write("%s: %s\n" % (x._task, str(x._result))) |
| 187 | |
| 188 | if object: |
| 189 | oprops = object.tologdict() |
| 190 | ansible = x._result |
| 191 | oprops["xos_type"] = "ansible" |
| 192 | oprops["ansible_result"] = json.dumps(ansible) |
| 193 | |
| 194 | if failed == 0: |
| 195 | oprops["ansible_status"] = "OK" |
| 196 | else: |
| 197 | oprops["ansible_status"] = "FAILED" |
| 198 | |
| 199 | log.info("Ran Ansible task", task=x._task, **oprops) |
| 200 | |
| 201 | ofile.close() |
| 202 | |
| 203 | if (expected_num is not None) and (len(ok_results) != expected_num): |
| 204 | raise ValueError( |
| 205 | "Unexpected num %s!=%d" % (str(expected_num), len(ok_results)) |
| 206 | ) |
| 207 | |
| 208 | if failed: |
| 209 | raise ValueError("Ansible playbook failed.") |
| 210 | |
| 211 | # NOTE(smbaker): Playbook errors are slipping through where `aresults` does not show any failed tasks, but |
| 212 | # `stats` does show them. See CORD-3169. |
| 213 | hosts = sorted(stats.processed.keys()) |
| 214 | for h in hosts: |
| 215 | t = stats.summarize(h) |
| 216 | if t["unreachable"] > 0: |
| 217 | raise ValueError( |
| 218 | "Ansible playbook reported unreachable for host %s" % h |
| 219 | ) |
| 220 | if t["failures"] > 0: |
| 221 | raise ValueError("Ansible playbook reported failures for host %s" % h) |
| 222 | |
| 223 | except ValueError as e: |
| 224 | if error_msg: |
| 225 | try: |
| 226 | error = " // ".join(error_msg) |
| 227 | except BaseException: |
| 228 | error = "failed to join error_msg" |
| 229 | raise Exception(error) |
| 230 | else: |
| 231 | raise |
| 232 | |
| 233 | processed_results = map(lambda x: x._result, ok_results) |
| 234 | return processed_results[1:] # 0 is setup |
| 235 | |
| 236 | |
| 237 | def run_template_ssh(name, opts, path="", expected_num=None, object=None): |
| 238 | instance_name = opts["instance_name"] |
| 239 | hostname = opts["hostname"] |
| 240 | private_key = opts["private_key"] |
| 241 | baremetal_ssh = opts.get("baremetal_ssh", False) |
| 242 | if baremetal_ssh: |
| 243 | # no instance_id or ssh_ip for baremetal |
| 244 | # we never proxy to baremetal |
| 245 | proxy_ssh = False |
| 246 | else: |
| 247 | instance_id = opts["instance_id"] |
| 248 | ssh_ip = opts["ssh_ip"] |
| 249 | proxy_ssh = Config.get("proxy_ssh.enabled") |
| 250 | |
| 251 | if not ssh_ip: |
| 252 | raise Exception("IP of ssh proxy not available. Synchronization deferred") |
| 253 | |
| 254 | (opts, fqp) = get_playbook_fn(opts, path) |
| 255 | private_key_pathname = fqp + ".key" |
| 256 | config_pathname = fqp + ".cfg" |
| 257 | hosts_pathname = fqp + ".hosts" |
| 258 | |
| 259 | f = open(private_key_pathname, "w") |
| 260 | f.write(private_key) |
| 261 | f.close() |
| 262 | |
| 263 | f = open(config_pathname, "w") |
| 264 | f.write("[ssh_connection]\n") |
| 265 | if proxy_ssh: |
| 266 | proxy_ssh_key = Config.get("proxy_ssh.key") |
| 267 | proxy_ssh_user = Config.get("proxy_ssh.user") |
| 268 | if proxy_ssh_key: |
| 269 | # If proxy_ssh_key is known, then we can proxy into the compute |
| 270 | # node without needing to have the OpenCloud sshd machinery in |
| 271 | # place. |
| 272 | proxy_command = ( |
| 273 | "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s nc %s 22" |
| 274 | % (proxy_ssh_key, proxy_ssh_user, hostname, ssh_ip) |
| 275 | ) |
| 276 | else: |
| 277 | proxy_command = ( |
| 278 | "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s" |
| 279 | % (private_key_pathname, instance_id, hostname) |
| 280 | ) |
| 281 | f.write('ssh_args = -o "%s"\n' % proxy_command) |
| 282 | f.write("scp_if_ssh = True\n") |
| 283 | f.write("pipelining = True\n") |
| 284 | f.write("\n[defaults]\n") |
| 285 | f.write("host_key_checking = False\n") |
| 286 | f.write("timeout = 30\n") |
| 287 | f.close() |
| 288 | |
| 289 | f = open(hosts_pathname, "w") |
| 290 | f.write("[%s]\n" % instance_name) |
| 291 | f.write("%s ansible_ssh_private_key_file=%s\n" % (ssh_ip, private_key_pathname)) |
| 292 | f.close() |
| 293 | |
| 294 | # SSH will complain if private key is world or group readable |
| 295 | os.chmod(private_key_pathname, 0o600) |
| 296 | |
| 297 | print("ANSIBLE_CONFIG=%s" % config_pathname) |
| 298 | print("ANSIBLE_HOSTS=%s" % hosts_pathname) |
| 299 | |
| 300 | return run_template( |
| 301 | name, |
| 302 | opts, |
| 303 | path, |
| 304 | ansible_config=config_pathname, |
| 305 | ansible_hosts=hosts_pathname, |
| 306 | run_ansible_script="/opt/xos/synchronizers/base/run_ansible_verbose", |
| 307 | object=object, |
| 308 | ) |
| 309 | |
| 310 | |
| 311 | def main(): |
| 312 | run_template( |
| 313 | "ansible/sync_user_deployments.yaml", |
| 314 | { |
| 315 | "endpoint": "http://172.31.38.128:5000/v2.0/", |
| 316 | "name": "Sapan Bhatia", |
| 317 | "email": "gwsapan@gmail.com", |
| 318 | "password": "foobar", |
| 319 | "admin_user": "admin", |
| 320 | "admin_password": "6a789bf69dd647e2", |
| 321 | "admin_tenant": "admin", |
| 322 | "tenant": "demo", |
| 323 | "roles": ["user", "admin"], |
| 324 | }, |
| 325 | ) |