SEBA-560 Remove ansible from synchronizer framework;
Resolve exception from NetworkX upgrade;
Revise Makefile to use standard docker options

Change-Id: Idb29ed71ba46f97d8561a702b4c7e75d541d4e2c
diff --git a/VERSION b/VERSION
index 448fac8..818bd47 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-3.0.6-dev
+3.0.6
diff --git a/containers/chameleon/Dockerfile.chameleon b/containers/chameleon/Dockerfile.chameleon
index bb8e9da..52eb276 100644
--- a/containers/chameleon/Dockerfile.chameleon
+++ b/containers/chameleon/Dockerfile.chameleon
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/chameleon
-FROM xosproject/xos-base:3.0.5
+FROM xosproject/xos-base:3.0.6
 
 # xos-base already has protoc and dependencies installed
 
diff --git a/containers/xos/Dockerfile.client b/containers/xos/Dockerfile.client
index 6db1f11..428b4e3 100644
--- a/containers/xos/Dockerfile.client
+++ b/containers/xos/Dockerfile.client
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-client
-FROM xosproject/xos-libraries:3.0.5
+FROM xosproject/xos-libraries:3.0.6
 
 # Label image
 ARG org_label_schema_schema_version=1.0
diff --git a/containers/xos/Dockerfile.libraries b/containers/xos/Dockerfile.libraries
index 8c60ce1..9c08e63 100644
--- a/containers/xos/Dockerfile.libraries
+++ b/containers/xos/Dockerfile.libraries
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-libraries
-FROM xosproject/xos-base:3.0.5
+FROM xosproject/xos-base:3.0.6
 
 # Add libraries
 COPY lib /opt/xos/lib
diff --git a/containers/xos/Dockerfile.synchronizer-base b/containers/xos/Dockerfile.synchronizer-base
index 00e4766..0e88561 100644
--- a/containers/xos/Dockerfile.synchronizer-base
+++ b/containers/xos/Dockerfile.synchronizer-base
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-synchronizer-base
-FROM xosproject/xos-client:3.0.5
+FROM xosproject/xos-client:3.0.6
 
 COPY xos/xos/logger.py /opt/xos/xos/logger.py
 COPY xos/xos/__init__.py /opt/xos/xos/__init__.py
diff --git a/containers/xos/Dockerfile.xos-core b/containers/xos/Dockerfile.xos-core
index 2bdee57..1b4533c 100644
--- a/containers/xos/Dockerfile.xos-core
+++ b/containers/xos/Dockerfile.xos-core
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 # xosproject/xos-core
-FROM xosproject/xos-libraries:3.0.5
+FROM xosproject/xos-libraries:3.0.6
 
 # Install XOS
 ADD xos /opt/xos
diff --git a/containers/xos/Makefile b/containers/xos/Makefile
index b17f92e..e0ffca1 100644
--- a/containers/xos/Makefile
+++ b/containers/xos/Makefile
@@ -18,36 +18,40 @@
 # Optional parameters are:
 # `REGISTRY=192.168.99.100:3000/ REPOSITORY=xosproject/ DOCKER_BUILD_ARGS="--no-cache" TAG=dev make build`
 
-ifeq ($(TAG),)
-TAG := candidate
-endif
+# Variables
+VERSION                  ?= $(shell cat ../../VERSION)
 
-ifeq ($(REPOSITORY),)
-REPOSITORY := xosproject/
-endif
+## Docker related
+DOCKER_REGISTRY          ?=
+DOCKER_REPOSITORY        ?=
+DOCKER_BUILD_ARGS        ?=
+DOCKER_TAG               ?= ${VERSION}
+DOCKER_IMAGENAME_BASE    := ${DOCKER_REGISTRY}${DOCKER_REPOSITORY}xos-base:${DOCKER_TAG}
+DOCKER_IMAGENAME_LIBRARIES := ${DOCKER_REGISTRY}${DOCKER_REPOSITORY}xos-libraries:${DOCKER_TAG}
+DOCKER_IMAGENAME_CLIENT  := ${DOCKER_REGISTRY}${DOCKER_REPOSITORY}xos-client:${DOCKER_TAG}
+DOCKER_IMAGENAME_CORE    := ${DOCKER_REGISTRY}${DOCKER_REPOSITORY}xos-core:${DOCKER_TAG}
+DOCKER_IMAGENAME_SYNC    := ${DOCKER_REGISTRY}${DOCKER_REPOSITORY}xos-synchronizer-base:${DOCKER_TAG}
 
 summary:
 	@echo Building images with:
 	@echo "    Build args:    $(DOCKER_BUILD_ARGS)"
-	@echo "    Registry:      ${REGISTRY}"
-	@echo "    Repository:    ${REPOSITORY}"
-	@echo "    Tag:           ${TAG}"
+	@echo "    Registry:      ${DOCKER_REGISTRY}"
+	@echo "    Repository:    ${DOCKER_REPOSITORY}"
+	@echo "    Tag:           ${DOCKER_TAG}"
 
 build: summary xos-base xos-libraries xos-client xos-core xos-synchronizer-base
 
 xos-base:
-	docker build $(DOCKER_BUILD_ARGS) -t ${REGISTRY}${REPOSITORY}xos-base:${TAG} -f Dockerfile.base .
+	docker build $(DOCKER_BUILD_ARGS) -t ${DOCKER_IMAGENAME_BASE} -f Dockerfile.base .
 
 xos-libraries:
-	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${REGISTRY}${REPOSITORY}xos-libraries:${TAG} -f Dockerfile.libraries ../..
+	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${DOCKER_IMAGENAME_LIBRARIES} -f Dockerfile.libraries ../..
 
 xos-client:
-	rm -rf tmp.chameleon
-	cp -R ../../../../component/chameleon tmp.chameleon
-	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${REGISTRY}${REPOSITORY}xos-client:${TAG} -f Dockerfile.client ../..
+	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${DOCKER_IMAGENAME_CLIENT} -f Dockerfile.client ../..
 
 xos-core:
-	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${REGISTRY}${REPOSITORY}xos-core:${TAG} -f Dockerfile.xos-core ../..
+	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${DOCKER_IMAGENAME_CORE} -f Dockerfile.xos-core ../..
 
 xos-synchronizer-base:
-	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${REGISTRY}${REPOSITORY}xos-synchronizer-base:${TAG} -f Dockerfile.synchronizer-base ../..
+	docker build --no-cache $(DOCKER_BUILD_ARGS) -t ${DOCKER_IMAGENAME_SYNC} -f Dockerfile.synchronizer-base ../..
diff --git a/lib/xos-synchronizer/requirements.txt b/lib/xos-synchronizer/requirements.txt
index c539816..3fde4f8 100644
--- a/lib/xos-synchronizer/requirements.txt
+++ b/lib/xos-synchronizer/requirements.txt
@@ -1,5 +1,4 @@
 Jinja2~=2.10
-ansible~=2.7.9
 astunparse~=1.6.2
 confluent-kafka==0.11.5
 inflect~=2.1.0
diff --git a/lib/xos-synchronizer/xos-synchronizer-tests/test_run.py b/lib/xos-synchronizer/xos-synchronizer-tests/test_run.py
index f21428e..ae7c116 100644
--- a/lib/xos-synchronizer/xos-synchronizer-tests/test_run.py
+++ b/lib/xos-synchronizer/xos-synchronizer-tests/test_run.py
@@ -30,18 +30,6 @@
 sync_lib_dir = os.path.join(test_path, "..", "xossynchronizer")
 xos_dir = os.path.join(test_path, "..", "..", "..", "xos")
 
-ANSIBLE_FILE = "/tmp/payload_test"
-
-
-def run_fake_ansible_template(*args, **kwargs):
-    opts = args[1]
-    open(ANSIBLE_FILE, "w").write(json.dumps(opts))
-
-
-def get_ansible_output():
-    ansible_str = open(ANSIBLE_FILE).read()
-    return json.loads(ansible_str)
-
 
 class TestRun(unittest.TestCase):
     def setUp(self):
diff --git a/lib/xos-synchronizer/xossynchronizer/ansible_helper.py b/lib/xos-synchronizer/xossynchronizer/ansible_helper.py
deleted file mode 100644
index 846aeb9..0000000
--- a/lib/xos-synchronizer/xossynchronizer/ansible_helper.py
+++ /dev/null
@@ -1,321 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2017-present Open Networking Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import, print_function
-
-import json
-import os
-import pickle
-import random
-import string
-import tempfile
-
-import jinja2
-
-from multistructlog import create_logger
-from xosconfig import Config
-from six.moves import range
-
-log = create_logger(Config().get("logging"))
-
-
-step_dir = Config.get("steps_dir")
-sys_dir = Config.get("sys_dir")
-
-os_template_loader = jinja2.FileSystemLoader(
-    searchpath=[step_dir, "/opt/xos/synchronizers/shared_templates"]
-)
-os_template_env = jinja2.Environment(loader=os_template_loader)
-
-
-def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
-    return "".join(random.choice(chars) for _ in range(size))
-
-
-def shellquote(s):
-    return "'" + s.replace("'", "'\\''") + "'"
-
-
-def get_playbook_fn(opts, path):
-    if not opts.get("ansible_tag", None):
-        # if no ansible_tag is in the options, then generate a unique one
-        objname = id_generator()
-        opts = opts.copy()
-        opts["ansible_tag"] = objname
-
-    objname = opts["ansible_tag"]
-
-    pathed_sys_dir = os.path.join(sys_dir, path)
-    if not os.path.isdir(pathed_sys_dir):
-        os.makedirs(pathed_sys_dir)
-
-    # symlink steps/roles into sys/roles so that playbooks can access roles
-    roledir = os.path.join(step_dir, "roles")
-    rolelink = os.path.join(pathed_sys_dir, "roles")
-    if os.path.isdir(roledir) and not os.path.islink(rolelink):
-        os.symlink(roledir, rolelink)
-
-    return (opts, os.path.join(pathed_sys_dir, objname))
-
-
-def run_playbook(ansible_hosts, ansible_config, fqp, opts):
-    args = {
-        "ansible_hosts": ansible_hosts,
-        "ansible_config": ansible_config,
-        "fqp": fqp,
-        "opts": opts,
-        "config_file": Config.get_config_file(),
-    }
-
-    keep_temp_files = Config.get("keep_temp_files")
-
-    dir = tempfile.mkdtemp()
-    args_fn = None
-    result_fn = None
-    try:
-        log.info("creating args file", dir=dir)
-
-        args_fn = os.path.join(dir, "args")
-        result_fn = os.path.join(dir, "result")
-
-        open(args_fn, "w").write(pickle.dumps(args))
-
-        ansible_main_fn = os.path.join(os.path.dirname(__file__), "ansible_main.py")
-
-        os.system("python %s %s %s" % (ansible_main_fn, args_fn, result_fn))
-
-        result = pickle.loads(open(result_fn).read())
-
-        if hasattr(result, "exception"):
-            log.error("Exception in playbook", exception=result["exception"])
-
-        stats = result.get("stats", None)
-        aresults = result.get("aresults", None)
-    except BaseException:
-        log.exception("Exception running ansible_main")
-        stats = None
-        aresults = None
-    finally:
-        if not keep_temp_files:
-            if args_fn and os.path.exists(args_fn):
-                os.remove(args_fn)
-            if result_fn and os.path.exists(result_fn):
-                os.remove(result_fn)
-            os.rmdir(dir)
-
-    return (stats, aresults)
-
-
-def run_template(
-    name,
-    opts,
-    path="",
-    expected_num=None,
-    ansible_config=None,
-    ansible_hosts=None,
-    run_ansible_script=None,
-    object=None,
-):
-    template = os_template_env.get_template(name)
-    buffer = template.render(opts)
-
-    (opts, fqp) = get_playbook_fn(opts, path)
-
-    f = open(fqp, "w")
-    f.write(buffer)
-    f.flush()
-
-    """
-    q = Queue()
-    p = Process(target=run_playbook, args=(ansible_hosts, ansible_config, fqp, opts, q,))
-    p.start()
-    stats,aresults = q.get()
-    p.join()
-    """
-    stats, aresults = run_playbook(ansible_hosts, ansible_config, fqp, opts)
-
-    error_msg = []
-
-    output_file = fqp + ".out"
-    try:
-        if aresults is None:
-            raise ValueError("Error executing playbook %s" % fqp)
-
-        ok_results = []
-        total_unreachable = 0
-        failed = 0
-
-        ofile = open(output_file, "w")
-
-        for x in aresults:
-            if not x.is_failed() and not x.is_unreachable() and not x.is_skipped():
-                ok_results.append(x)
-            elif x.is_unreachable():
-                failed += 1
-                total_unreachable += 1
-                try:
-                    error_msg.append(x._result["msg"])
-                except BaseException:
-                    pass
-            elif x.is_failed():
-                failed += 1
-                try:
-                    error_msg.append(x._result["msg"])
-                except BaseException:
-                    pass
-
-            # FIXME (zdw, 2017-02-19) - may not be needed with new callback logging
-
-            ofile.write("%s: %s\n" % (x._task, str(x._result)))
-
-            if object:
-                oprops = object.tologdict()
-                ansible = x._result
-                oprops["xos_type"] = "ansible"
-                oprops["ansible_result"] = json.dumps(ansible)
-
-                if failed == 0:
-                    oprops["ansible_status"] = "OK"
-                else:
-                    oprops["ansible_status"] = "FAILED"
-
-                log.info("Ran Ansible task", task=x._task, **oprops)
-
-        ofile.close()
-
-        if (expected_num is not None) and (len(ok_results) != expected_num):
-            raise ValueError(
-                "Unexpected num %s!=%d" % (str(expected_num), len(ok_results))
-            )
-
-        if failed:
-            raise ValueError("Ansible playbook failed.")
-
-        # NOTE(smbaker): Playbook errors are slipping through where `aresults` does not show any failed tasks, but
-        # `stats` does show them. See CORD-3169.
-        hosts = sorted(stats.processed.keys())
-        for h in hosts:
-            t = stats.summarize(h)
-            if t["unreachable"] > 0:
-                raise ValueError(
-                    "Ansible playbook reported unreachable for host %s" % h
-                )
-            if t["failures"] > 0:
-                raise ValueError("Ansible playbook reported failures for host %s" % h)
-
-    except ValueError:
-        if error_msg:
-            try:
-                error = " // ".join(error_msg)
-            except BaseException:
-                error = "failed to join error_msg"
-            raise Exception(error)
-        else:
-            raise
-
-    processed_results = [x._result for x in ok_results]
-    return processed_results[1:]  # 0 is setup
-
-
-def run_template_ssh(name, opts, path="", expected_num=None, object=None):
-    instance_name = opts["instance_name"]
-    hostname = opts["hostname"]
-    private_key = opts["private_key"]
-    baremetal_ssh = opts.get("baremetal_ssh", False)
-    if baremetal_ssh:
-        # no instance_id or ssh_ip for baremetal
-        # we never proxy to baremetal
-        proxy_ssh = False
-    else:
-        instance_id = opts["instance_id"]
-        ssh_ip = opts["ssh_ip"]
-        proxy_ssh = Config.get("proxy_ssh.enabled")
-
-        if not ssh_ip:
-            raise Exception("IP of ssh proxy not available. Synchronization deferred")
-
-    (opts, fqp) = get_playbook_fn(opts, path)
-    private_key_pathname = fqp + ".key"
-    config_pathname = fqp + ".cfg"
-    hosts_pathname = fqp + ".hosts"
-
-    f = open(private_key_pathname, "w")
-    f.write(private_key)
-    f.close()
-
-    f = open(config_pathname, "w")
-    f.write("[ssh_connection]\n")
-    if proxy_ssh:
-        proxy_ssh_key = Config.get("proxy_ssh.key")
-        proxy_ssh_user = Config.get("proxy_ssh.user")
-        if proxy_ssh_key:
-            # If proxy_ssh_key is known, then we can proxy into the compute
-            # node without needing to have the OpenCloud sshd machinery in
-            # place.
-            proxy_command = (
-                "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s nc %s 22"
-                % (proxy_ssh_key, proxy_ssh_user, hostname, ssh_ip)
-            )
-        else:
-            proxy_command = (
-                "ProxyCommand ssh -q -i %s -o StrictHostKeyChecking=no %s@%s"
-                % (private_key_pathname, instance_id, hostname)
-            )
-        f.write('ssh_args = -o "%s"\n' % proxy_command)
-    f.write("scp_if_ssh = True\n")
-    f.write("pipelining = True\n")
-    f.write("\n[defaults]\n")
-    f.write("host_key_checking = False\n")
-    f.write("timeout = 30\n")
-    f.close()
-
-    f = open(hosts_pathname, "w")
-    f.write("[%s]\n" % instance_name)
-    f.write("%s ansible_ssh_private_key_file=%s\n" % (ssh_ip, private_key_pathname))
-    f.close()
-
-    # SSH will complain if private key is world or group readable
-    os.chmod(private_key_pathname, 0o600)
-
-    print("ANSIBLE_CONFIG=%s" % config_pathname)
-    print("ANSIBLE_HOSTS=%s" % hosts_pathname)
-
-    return run_template(
-        name,
-        opts,
-        path,
-        ansible_config=config_pathname,
-        ansible_hosts=hosts_pathname,
-        run_ansible_script="/opt/xos/synchronizers/base/run_ansible_verbose",
-        object=object,
-    )
-
-
-def main():
-    run_template(
-        "ansible/sync_user_deployments.yaml",
-        {
-            "endpoint": "http://172.31.38.128:5000/v2.0/",
-            "name": "Sapan Bhatia",
-            "email": "gwsapan@gmail.com",
-            "password": "foobar",
-            "admin_user": "admin",
-            "admin_password": "6a789bf69dd647e2",
-            "admin_tenant": "admin",
-            "tenant": "demo",
-            "roles": ["user", "admin"],
-        },
-    )
diff --git a/lib/xos-synchronizer/xossynchronizer/ansible_main.py b/lib/xos-synchronizer/xossynchronizer/ansible_main.py
deleted file mode 100644
index 6a8c711..0000000
--- a/lib/xos-synchronizer/xossynchronizer/ansible_main.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright 2017-present Open Networking Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import os
-import pickle
-import sys
-import traceback
-
-from xosconfig import Config
-
-try:
-    # Python 2: "reload" is built-in
-    reload  # pylint: disable=reload-builtin
-except NameError:
-    from importlib import reload
-
-sys.path.append("/opt/xos")
-
-
-def run_playbook(ansible_hosts, ansible_config, fqp, opts):
-    try:
-        if ansible_config:
-            os.environ["ANSIBLE_CONFIG"] = ansible_config
-        else:
-            try:
-                del os.environ["ANSIBLE_CONFIG"]
-            except KeyError:
-                pass
-
-        if ansible_hosts:
-            os.environ["ANSIBLE_HOSTS"] = ansible_hosts
-        else:
-            try:
-                del os.environ["ANSIBLE_HOSTS"]
-            except KeyError:
-                pass
-
-        from . import ansible_runner
-
-        reload(ansible_runner)
-
-        # Dropped support for observer_pretend - to be redone
-        runner = ansible_runner.Runner(
-            playbook=fqp, run_data=opts, host_file=ansible_hosts
-        )
-
-        stats, aresults = runner.run()
-    except Exception:
-        return {"stats": None, "aresults": None, "exception": traceback.format_exc()}
-
-    return {"stats": stats, "aresults": aresults}
-
-
-def main():
-    input_fn = sys.argv[1]
-    result_fn = sys.argv[2]
-
-    args = pickle.loads(open(input_fn).read())
-
-    Config.init(args["config_file"], "synchronizer-config-schema.yaml")
-
-    ansible_hosts = args["ansible_hosts"]
-    ansible_config = args["ansible_config"]
-    fqp = args["fqp"]
-    opts = args["opts"]
-
-    result = run_playbook(ansible_hosts, ansible_config, fqp, opts)
-
-    open(result_fn, "w").write(pickle.dumps(result))
-
-
-if __name__ == "__main__":
-    main()
diff --git a/lib/xos-synchronizer/xossynchronizer/ansible_runner.py b/lib/xos-synchronizer/xossynchronizer/ansible_runner.py
deleted file mode 100644
index 2615346..0000000
--- a/lib/xos-synchronizer/xossynchronizer/ansible_runner.py
+++ /dev/null
@@ -1,396 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2017-present Open Networking Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import json
-import os
-import uuid
-
-from ansible import constants
-from ansible.executor import playbook_executor
-from ansible.inventory.manager import InventoryManager
-from ansible.parsing.dataloader import DataLoader
-from ansible.plugins.callback import CallbackBase
-from ansible.utils.display import Display
-from ansible.vars.manager import VariableManager
-
-from multistructlog import create_logger
-from xosconfig import Config
-
-try:
-    # Python 2: "reload" is built-in
-    # pylint: disable=W1626
-    reload
-except NameError:
-    # Python 3: "reload" is part of importlib
-    from importlib import reload
-
-constants = reload(constants)
-
-
-log = create_logger(Config().get("logging"))
-
-
-class ResultCallback(CallbackBase):
-
-    CALLBACK_VERSION = 2.0
-    CALLBACK_NAME = "resultcallback"
-    CALLBACK_TYPE = "programmatic"
-
-    def __init__(self):
-        super(ResultCallback, self).__init__()
-        self.results = []
-        self.uuid = str(uuid.uuid1())
-        self.playbook_status = "OK"
-
-    def v2_playbook_on_start(self, playbook):
-        self.playbook = playbook._file_name
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "playbook start",
-            "ansible_status": "OK",
-            "ansible_playbook": self.playbook,
-        }
-        log.info("PLAYBOOK START", playbook=self.playbook, **log_extra)
-
-    def v2_playbook_on_stats(self, stats):
-        host_stats = {}
-        for host in stats.processed.keys():
-            host_stats[host] = stats.summarize(host)
-
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "playbook stats",
-            "ansible_status": self.playbook_status,
-            "ansible_playbook": self.playbook,
-            "ansible_result": json.dumps(host_stats),
-        }
-
-        if self.playbook_status == "OK":
-            log.info("PLAYBOOK END", playbook=self.playbook, **log_extra)
-        else:
-            log.error("PLAYBOOK END", playbook=self.playbook, **log_extra)
-
-    def v2_playbook_on_play_start(self, play):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "play start",
-            "ansible_status": self.playbook_status,
-            "ansible_playbook": self.playbook,
-        }
-        log.debug("PLAY START", play_name=play.name, **log_extra)
-
-    def v2_runner_on_ok(self, result, **kwargs):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "OK",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.debug("OK", task=str(result._task), **log_extra)
-        self.results.append(result)
-
-    def v2_runner_on_failed(self, result, **kwargs):
-        self.playbook_status = "FAILED"
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "FAILED",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.error("FAILED", task=str(result._task), **log_extra)
-        self.results.append(result)
-
-    def v2_runner_on_async_failed(self, result, **kwargs):
-        self.playbook_status = "FAILED"
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "ASYNC FAILED",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.error("ASYNC FAILED", task=str(result._task), **log_extra)
-
-    def v2_runner_on_skipped(self, result, **kwargs):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "SKIPPED",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.debug("SKIPPED", task=str(result._task), **log_extra)
-        self.results.append(result)
-
-    def v2_runner_on_unreachable(self, result, **kwargs):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "UNREACHABLE",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.error("UNREACHABLE", task=str(result._task), **log_extra)
-        self.results.append(result)
-
-    def v2_runner_retry(self, result, **kwargs):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "RETRY",
-            "ansible_result": json.dumps(result._result),
-            "ansible_task": result._task,
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.warning(
-            "RETRYING - attempt",
-            task=str(result._task),
-            attempt=result._result["attempts"],
-            **log_extra
-        )
-        self.results.append(result)
-
-    def v2_playbook_on_handler_task_start(self, task, **kwargs):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "task",
-            "ansible_status": "HANDLER",
-            "ansible_task": task.get_name().strip(),
-            "ansible_playbook": self.playbook,
-            # 'ansible_host': result._host.get_name()
-        }
-        log.debug("HANDLER", task=task.get_name().strip(), **log_extra)
-
-    def v2_playbook_on_import_for_host(self, result, imported_file):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "import",
-            "ansible_status": "IMPORT",
-            "ansible_result": json.dumps(result._result),
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.debug("IMPORT", imported_file=imported_file, **log_extra)
-        self.results.append(result)
-
-    def v2_playbook_on_not_import_for_host(self, result, missing_file):
-        log_extra = {
-            "xos_type": "ansible",
-            "ansible_uuid": self.uuid,
-            "ansible_type": "import",
-            "ansible_status": "MISSING IMPORT",
-            "ansible_result": json.dumps(result._result),
-            "ansible_playbook": self.playbook,
-            "ansible_host": result._host.get_name(),
-        }
-        log.debug("MISSING IMPORT", missing=missing_file, **log_extra)
-        self.results.append(result)
-
-
-class Options(object):
-    """
-    Options class to replace Ansible OptParser
-    """
-
-    def __init__(
-        self,
-        ask_pass=None,
-        ask_su_pass=None,
-        ask_sudo_pass=None,
-        become=None,
-        become_ask_pass=None,
-        become_method=None,
-        become_user=None,
-        check=None,
-        connection=None,
-        diff=None,
-        flush_cache=None,
-        force_handlers=None,
-        forks=1,
-        listtags=None,
-        listtasks=None,
-        module_path=None,
-        new_vault_password_file=None,
-        one_line=None,
-        output_file=None,
-        poll_interval=None,
-        private_key_file=None,
-        remote_user=None,
-        scp_extra_args=None,
-        seconds=None,
-        sftp_extra_args=None,
-        skip_tags=None,
-        ssh_common_args=None,
-        ssh_extra_args=None,
-        sudo=None,
-        sudo_user=None,
-        syntax=None,
-        tags=None,
-        timeout=None,
-        tree=None,
-        vault_password_files=None,
-        ask_vault_pass=None,
-        extra_vars=None,
-        inventory=None,
-        listhosts=None,
-        module_paths=None,
-        subset=None,
-        verbosity=None,
-    ):
-
-        if tags:
-            self.tags = tags
-
-        if skip_tags:
-            self.skip_tags = skip_tags
-
-        self.ask_pass = ask_pass
-        self.ask_su_pass = ask_su_pass
-        self.ask_sudo_pass = ask_sudo_pass
-        self.ask_vault_pass = ask_vault_pass
-        self.become = become
-        self.become_ask_pass = become_ask_pass
-        self.become_method = become_method
-        self.become_user = become_user
-        self.check = check
-        self.connection = connection
-        self.diff = diff
-        self.extra_vars = extra_vars
-        self.flush_cache = flush_cache
-        self.force_handlers = force_handlers
-        self.forks = forks
-        self.inventory = inventory
-        self.listhosts = listhosts
-        self.listtags = listtags
-        self.listtasks = listtasks
-        self.module_path = module_path
-        self.module_paths = module_paths
-        self.new_vault_password_file = new_vault_password_file
-        self.one_line = one_line
-        self.output_file = output_file
-        self.poll_interval = poll_interval
-        self.private_key_file = private_key_file
-        self.remote_user = remote_user
-        self.scp_extra_args = scp_extra_args
-        self.seconds = seconds
-        self.sftp_extra_args = sftp_extra_args
-        self.ssh_common_args = ssh_common_args
-        self.ssh_extra_args = ssh_extra_args
-        self.subset = subset
-        self.sudo = sudo
-        self.sudo_user = sudo_user
-        self.syntax = syntax
-        self.timeout = timeout
-        self.tree = tree
-        self.vault_password_files = vault_password_files
-        self.verbosity = verbosity
-
-
-class Runner(object):
-    def __init__(
-        self, playbook, run_data, private_key_file=None, verbosity=0, host_file=None
-    ):
-
-        self.playbook = playbook
-        self.run_data = run_data
-
-        self.options = Options()
-        self.options.output_file = playbook + ".result"
-        self.options.private_key_file = private_key_file
-        self.options.verbosity = verbosity
-        self.options.connection = "ssh"  # Need a connection type "smart" or "ssh"
-        # self.options.become = True
-        self.options.become_method = "sudo"
-        self.options.become_user = "root"
-
-        # Set global verbosity
-        self.display = Display()
-        self.display.verbosity = self.options.verbosity
-        # Executor appears to have it's own
-        # verbosity object/setting as well
-        playbook_executor.verbosity = self.options.verbosity
-
-        # Become Pass Needed if not logging in as user root
-        # passwords = {'become_pass': become_pass}
-
-        # Gets data from YAML/JSON files
-        self.loader = DataLoader()
-        try:
-            self.loader.set_vault_password(os.environ["VAULT_PASS"])
-        except AttributeError:
-            pass
-
-        # Set inventory, using most of above objects
-        if host_file:
-            self.inventory = InventoryManager(loader=self.loader, sources=host_file)
-        else:
-            self.inventory = InventoryManager(loader=self.loader)
-
-        # All the variables from all the various places
-        self.variable_manager = VariableManager(
-            loader=self.loader, inventory=self.inventory
-        )
-        self.variable_manager.extra_vars = {}  # self.run_data
-
-        # Setup playbook executor, but don't run until run() called
-        self.pbex = playbook_executor.PlaybookExecutor(
-            playbooks=[playbook],
-            inventory=self.inventory,
-            variable_manager=self.variable_manager,
-            loader=self.loader,
-            options=self.options,
-            passwords={},
-        )
-
-    def run(self):
-        os.environ[
-            "REQUESTS_CA_BUNDLE"
-        ] = "/usr/local/share/ca-certificates/local_certs.crt"
-        callback = ResultCallback()
-        self.pbex._tqm._stdout_callback = callback
-
-        self.pbex.run()
-        stats = self.pbex._tqm._stats
-
-        # os.remove(self.hosts.name)
-
-        return stats, callback.results
diff --git a/lib/xos-synchronizer/xossynchronizer/event_loop.py b/lib/xos-synchronizer/xossynchronizer/event_loop.py
index 0c69694..21040c3 100644
--- a/lib/xos-synchronizer/xossynchronizer/event_loop.py
+++ b/lib/xos-synchronizer/xossynchronizer/event_loop.py
@@ -27,6 +27,7 @@
 from networkx import (
     DiGraph,
     NetworkXNoPath,
+    NodeNotFound,
     all_shortest_paths,
     weakly_connected_component_subgraphs,
 )
@@ -527,7 +528,7 @@
         try:
             any(paths)
             paths = all_shortest_paths(G, m1, m2)
-        except NetworkXNoPath:
+        except (NetworkXNoPath, NodeNotFound):
             # Easy. The two models are unrelated.
             return False, None
 
diff --git a/lib/xos-synchronizer/xossynchronizer/steps/ansiblesyncstep.py b/lib/xos-synchronizer/xossynchronizer/steps/ansiblesyncstep.py
deleted file mode 100644
index 3781e1f..0000000
--- a/lib/xos-synchronizer/xossynchronizer/steps/ansiblesyncstep.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2017-present Open Networking Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-from xossynchronizer.ansible_helper import run_template
-
-from .syncstep import SyncStep
-
-
-class AnsibleSyncStep(SyncStep):
-    def sync_record(self, o):
-        self.log.debug("In default sync record", **o.tologdict())
-
-        tenant_fields = self.map_sync_inputs(o)
-        if tenant_fields == SyncStep.SYNC_WITHOUT_RUNNING:
-            return
-
-        main_obj = self.observes_classes[0]
-
-        path = "".join(main_obj.__name__).lower()
-        res = run_template(self.playbook, tenant_fields, path=path, object=o)
-
-        if hasattr(self, "map_sync_outputs"):
-            self.map_sync_outputs(o, res)
-
-        self.log.debug("Finished default sync record", **o.tologdict())
-
-    def delete_record(self, o):
-        self.log.debug("In default delete record", **o.tologdict())
-
-        # If there is no map_delete_inputs, then assume deleting a record is a no-op.
-        if not hasattr(self, "map_delete_inputs"):
-            return
-
-        tenant_fields = self.map_delete_inputs(o)
-
-        main_obj = self.observes_classes[0]
-
-        path = "".join(main_obj.__name__).lower()
-
-        tenant_fields["delete"] = True
-        res = run_template(self.playbook, tenant_fields, path=path)
-
-        if hasattr(self, "map_delete_outputs"):
-            self.map_delete_outputs(o, res)
-        else:
-            # "rc" is often only returned when something bad happens, so assume that no "rc" implies a successful rc
-            # of 0.
-            if res[0].get("rc", 0) != 0:
-                raise Exception("Nonzero rc from Ansible during delete_record")
-
-        self.log.debug("Finished default delete record", **o.tologdict())
diff --git a/lib/xos-synchronizer/xossynchronizer/steps/syncstep.py b/lib/xos-synchronizer/xossynchronizer/steps/syncstep.py
index 08637ed..715046a 100644
--- a/lib/xos-synchronizer/xossynchronizer/steps/syncstep.py
+++ b/lib/xos-synchronizer/xossynchronizer/steps/syncstep.py
@@ -60,11 +60,6 @@
         dependencies    list of names of models that must be synchronized first if the current model depends on them
     """
 
-    # map_sync_outputs can return this value to cause a step to be marked
-    # successful without running ansible. Used for sync_network_controllers
-    # on nat networks.
-    SYNC_WITHOUT_RUNNING = "sync_without_running"
-
     slow = False
 
     def get_prop(self, prop):